,
pub(crate) install_mirrors: PythonInstallMirrors,
pub(crate) refresh: Refresh,
pub(crate) settings: ResolverInstallerSettings,
@@ -1190,6 +1191,7 @@ impl SyncSettings {
package,
script,
python,
+ python_platform,
check,
no_check,
} = args;
@@ -1249,6 +1251,7 @@ impl SyncSettings {
all_packages,
package,
python: python.and_then(Maybe::into_option),
+ python_platform,
refresh: Refresh::from(refresh),
settings,
install_mirrors,
diff --git a/crates/uv/tests/it/sync.rs b/crates/uv/tests/it/sync.rs
index 690079abf..d4479296a 100644
--- a/crates/uv/tests/it/sync.rs
+++ b/crates/uv/tests/it/sync.rs
@@ -10026,3 +10026,42 @@ fn read_only() -> Result<()> {
Ok(())
}
+
+#[test]
+fn sync_python_platform() -> Result<()> {
+ let context = TestContext::new("3.12");
+
+ let pyproject_toml = context.temp_dir.child("pyproject.toml");
+ pyproject_toml.write_str(
+ r#"
+ [project]
+ name = "project"
+ version = "0.1.0"
+ requires-python = ">=3.12"
+ dependencies = ["black"]
+ "#,
+ )?;
+
+ // Lock the project
+ context.lock().assert().success();
+
+ // Sync with a specific platform should filter packages
+ uv_snapshot!(context.filters(), context.sync().arg("--python-platform").arg("linux"), @r"
+ success: true
+ exit_code: 0
+ ----- stdout -----
+
+ ----- stderr -----
+ Resolved 8 packages in [TIME]
+ Prepared 6 packages in [TIME]
+ Installed 6 packages in [TIME]
+ + black==24.3.0
+ + click==8.1.7
+ + mypy-extensions==1.0.0
+ + packaging==24.0
+ + pathspec==0.12.1
+ + platformdirs==4.2.0
+ ");
+
+ Ok(())
+}
diff --git a/docs/reference/cli.md b/docs/reference/cli.md
index f20bcf7ec..989cbc54b 100644
--- a/docs/reference/cli.md
+++ b/docs/reference/cli.md
@@ -1138,7 +1138,51 @@ used.
synced to the given environment. The interpreter will be used to create a virtual
environment in the project.
See uv python for details on Python discovery and supported request formats.
-May also be set with the UV_PYTHON environment variable.
--quiet , -qUse quiet output.
+May also be set with the UV_PYTHON environment variable.
--python-platform python-platform The platform for which requirements should be installed.
+Represented as a "target triple", a string that describes the target platform in terms of its CPU, vendor, and operating system name, like x86_64-unknown-linux-gnu or aarch64-apple-darwin.
+When targeting macOS (Darwin), the default minimum version is 12.0. Use MACOSX_DEPLOYMENT_TARGET to specify a different minimum version, e.g., 13.0.
+WARNING: When specified, uv will select wheels that are compatible with the target platform; as a result, the installed distributions may not be compatible with the current platform. Conversely, any distributions that are built from source may be incompatible with the target platform, as they will be built for the current platform. The --python-platform option is intended for advanced use cases.
+Possible values:
+
+windows: An alias for x86_64-pc-windows-msvc, the default target for Windows
+linux: An alias for x86_64-unknown-linux-gnu, the default target for Linux
+macos: An alias for aarch64-apple-darwin, the default target for macOS
+x86_64-pc-windows-msvc: A 64-bit x86 Windows target
+i686-pc-windows-msvc: A 32-bit x86 Windows target
+x86_64-unknown-linux-gnu: An x86 Linux target. Equivalent to x86_64-manylinux_2_17
+aarch64-apple-darwin: An ARM-based macOS target, as seen on Apple Silicon devices
+x86_64-apple-darwin: An x86 macOS target
+aarch64-unknown-linux-gnu: An ARM64 Linux target. Equivalent to aarch64-manylinux_2_17
+aarch64-unknown-linux-musl: An ARM64 Linux target
+x86_64-unknown-linux-musl: An x86_64 Linux target
+x86_64-manylinux2014: An x86_64 target for the manylinux2014 platform. Equivalent to x86_64-manylinux_2_17
+x86_64-manylinux_2_17: An x86_64 target for the manylinux_2_17 platform
+x86_64-manylinux_2_28: An x86_64 target for the manylinux_2_28 platform
+x86_64-manylinux_2_31: An x86_64 target for the manylinux_2_31 platform
+x86_64-manylinux_2_32: An x86_64 target for the manylinux_2_32 platform
+x86_64-manylinux_2_33: An x86_64 target for the manylinux_2_33 platform
+x86_64-manylinux_2_34: An x86_64 target for the manylinux_2_34 platform
+x86_64-manylinux_2_35: An x86_64 target for the manylinux_2_35 platform
+x86_64-manylinux_2_36: An x86_64 target for the manylinux_2_36 platform
+x86_64-manylinux_2_37: An x86_64 target for the manylinux_2_37 platform
+x86_64-manylinux_2_38: An x86_64 target for the manylinux_2_38 platform
+x86_64-manylinux_2_39: An x86_64 target for the manylinux_2_39 platform
+x86_64-manylinux_2_40: An x86_64 target for the manylinux_2_40 platform
+aarch64-manylinux2014: An ARM64 target for the manylinux2014 platform. Equivalent to aarch64-manylinux_2_17
+aarch64-manylinux_2_17: An ARM64 target for the manylinux_2_17 platform
+aarch64-manylinux_2_28: An ARM64 target for the manylinux_2_28 platform
+aarch64-manylinux_2_31: An ARM64 target for the manylinux_2_31 platform
+aarch64-manylinux_2_32: An ARM64 target for the manylinux_2_32 platform
+aarch64-manylinux_2_33: An ARM64 target for the manylinux_2_33 platform
+aarch64-manylinux_2_34: An ARM64 target for the manylinux_2_34 platform
+aarch64-manylinux_2_35: An ARM64 target for the manylinux_2_35 platform
+aarch64-manylinux_2_36: An ARM64 target for the manylinux_2_36 platform
+aarch64-manylinux_2_37: An ARM64 target for the manylinux_2_37 platform
+aarch64-manylinux_2_38: An ARM64 target for the manylinux_2_38 platform
+aarch64-manylinux_2_39: An ARM64 target for the manylinux_2_39 platform
+aarch64-manylinux_2_40: An ARM64 target for the manylinux_2_40 platform
+wasm32-pyodide2024: A wasm32 target using the the Pyodide 2024 platform. Meant for use with Python 3.12
+ --quiet , -qUse quiet output.
Repeating this option, e.g., -qq, will enable a silent mode in which uv will write no output to stdout.
--refreshRefresh all cached data
--refresh-package refresh-package Refresh cached data for a specific package
From cc5d5d5fba546afca77788ced6ae2f2488b90f17 Mon Sep 17 00:00:00 2001
From: Charlie Marsh
Date: Thu, 10 Jul 2025 21:41:32 -0400
Subject: [PATCH 006/130] Fix repeated word in Pyodide doc (#14554)
---
crates/uv-configuration/src/target_triple.rs | 2 +-
docs/reference/cli.md | 8 ++++----
uv.schema.json | 2 +-
3 files changed, 6 insertions(+), 6 deletions(-)
diff --git a/crates/uv-configuration/src/target_triple.rs b/crates/uv-configuration/src/target_triple.rs
index b9ca3fafe..81499deff 100644
--- a/crates/uv-configuration/src/target_triple.rs
+++ b/crates/uv-configuration/src/target_triple.rs
@@ -227,7 +227,7 @@ pub enum TargetTriple {
#[serde(alias = "aarch64-manylinux240")]
Aarch64Manylinux240,
- /// A wasm32 target using the the Pyodide 2024 platform. Meant for use with Python 3.12.
+ /// A wasm32 target using the Pyodide 2024 platform. Meant for use with Python 3.12.
#[cfg_attr(feature = "clap", value(name = "wasm32-pyodide2024"))]
Wasm32Pyodide2024,
}
diff --git a/docs/reference/cli.md b/docs/reference/cli.md
index 989cbc54b..f8247d5aa 100644
--- a/docs/reference/cli.md
+++ b/docs/reference/cli.md
@@ -1763,7 +1763,7 @@ interpreter. Use --universal to display the tree for all platforms,
aarch64-manylinux_2_38: An ARM64 target for the manylinux_2_38 platform
aarch64-manylinux_2_39: An ARM64 target for the manylinux_2_39 platform
aarch64-manylinux_2_40: An ARM64 target for the manylinux_2_40 platform
-wasm32-pyodide2024: A wasm32 target using the the Pyodide 2024 platform. Meant for use with Python 3.12
+wasm32-pyodide2024: A wasm32 target using the Pyodide 2024 platform. Meant for use with Python 3.12
--python-version python-version The Python version to use when filtering the tree.
For example, pass --python-version 3.10 to display the dependencies that would be included when installing on Python 3.10.
Defaults to the version of the discovered Python interpreter.
@@ -3448,7 +3448,7 @@ by --python-version.
aarch64-manylinux_2_38: An ARM64 target for the manylinux_2_38 platform
aarch64-manylinux_2_39: An ARM64 target for the manylinux_2_39 platform
aarch64-manylinux_2_40: An ARM64 target for the manylinux_2_40 platform
-wasm32-pyodide2024: A wasm32 target using the the Pyodide 2024 platform. Meant for use with Python 3.12
+wasm32-pyodide2024: A wasm32 target using the Pyodide 2024 platform. Meant for use with Python 3.12
--python-version python-version The Python version to use for resolution.
For example, 3.8 or 3.8.17.
Defaults to the version of the Python interpreter used for resolution.
@@ -3705,7 +3705,7 @@ be used with caution, as it can modify the system Python installation.
aarch64-manylinux_2_38: An ARM64 target for the manylinux_2_38 platform
aarch64-manylinux_2_39: An ARM64 target for the manylinux_2_39 platform
aarch64-manylinux_2_40: An ARM64 target for the manylinux_2_40 platform
-wasm32-pyodide2024: A wasm32 target using the the Pyodide 2024 platform. Meant for use with Python 3.12
+wasm32-pyodide2024: A wasm32 target using the Pyodide 2024 platform. Meant for use with Python 3.12
--python-version python-version The minimum Python version that should be supported by the requirements (e.g., 3.7 or 3.7.9).
If a patch version is omitted, the minimum patch version is assumed. For example, 3.7 is mapped to 3.7.0.
--quiet , -qUse quiet output.
@@ -3987,7 +3987,7 @@ should be used with caution, as it can modify the system Python installation.aarch64-manylinux_2_38: An ARM64 target for the manylinux_2_38 platform
aarch64-manylinux_2_39: An ARM64 target for the manylinux_2_39 platform
aarch64-manylinux_2_40: An ARM64 target for the manylinux_2_40 platform
-wasm32-pyodide2024: A wasm32 target using the the Pyodide 2024 platform. Meant for use with Python 3.12
+wasm32-pyodide2024: A wasm32 target using the Pyodide 2024 platform. Meant for use with Python 3.12
--python-version python-version The minimum Python version that should be supported by the requirements (e.g., 3.7 or 3.7.9).
If a patch version is omitted, the minimum patch version is assumed. For example, 3.7 is mapped to 3.7.0.
--quiet , -qUse quiet output.
diff --git a/uv.schema.json b/uv.schema.json
index aba25a46e..4190672e9 100644
--- a/uv.schema.json
+++ b/uv.schema.json
@@ -2242,7 +2242,7 @@
"const": "aarch64-manylinux_2_40"
},
{
- "description": "A wasm32 target using the the Pyodide 2024 platform. Meant for use with Python 3.12.",
+ "description": "A wasm32 target using the Pyodide 2024 platform. Meant for use with Python 3.12.",
"type": "string",
"const": "wasm32-pyodide2024"
}
From 2e0f399eeb57e06dadf6e4d652d31329ada64e85 Mon Sep 17 00:00:00 2001
From: Charlie Marsh
Date: Thu, 10 Jul 2025 21:46:44 -0400
Subject: [PATCH 007/130] Run `cargo dev generate-all` (#14555)
## Summary
I think we had a missing rebase.
---
docs/reference/cli.md | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/docs/reference/cli.md b/docs/reference/cli.md
index f8247d5aa..0364703c2 100644
--- a/docs/reference/cli.md
+++ b/docs/reference/cli.md
@@ -1181,7 +1181,7 @@ environment in the project.
aarch64-manylinux_2_38: An ARM64 target for the manylinux_2_38 platform
aarch64-manylinux_2_39: An ARM64 target for the manylinux_2_39 platform
aarch64-manylinux_2_40: An ARM64 target for the manylinux_2_40 platform
-wasm32-pyodide2024: A wasm32 target using the the Pyodide 2024 platform. Meant for use with Python 3.12
+wasm32-pyodide2024: A wasm32 target using the Pyodide 2024 platform. Meant for use with Python 3.12
--quiet , -qUse quiet output.
Repeating this option, e.g., -qq, will enable a silent mode in which uv will write no output to stdout.
--refreshRefresh all cached data
From 71470b7b1ae41867846822716b11d66f23ecdabd Mon Sep 17 00:00:00 2001
From: Zanie Blue
Date: Fri, 11 Jul 2025 07:35:27 -0500
Subject: [PATCH 008/130] Add `UV_HTTP_RETRIES` to customize retry counts
(#14544)
I want to increase this number in CI and was surprised we didn't support
configuration yet.
---
crates/uv-client/src/base_client.rs | 26 +++++++++-
crates/uv-client/src/registry_client.rs | 5 ++
crates/uv-requirements/src/lib.rs | 3 ++
crates/uv-static/src/env_vars.rs | 6 +++
crates/uv/src/commands/build_frontend.rs | 1 +
crates/uv/src/commands/pip/compile.rs | 1 +
crates/uv/src/commands/pip/install.rs | 1 +
crates/uv/src/commands/pip/list.rs | 1 +
crates/uv/src/commands/pip/sync.rs | 1 +
crates/uv/src/commands/pip/tree.rs | 1 +
crates/uv/src/commands/pip/uninstall.rs | 1 +
crates/uv/src/commands/project/add.rs | 2 +
crates/uv/src/commands/project/init.rs | 2 +
crates/uv/src/commands/project/lock.rs | 2 +
crates/uv/src/commands/project/mod.rs | 7 +++
crates/uv/src/commands/project/run.rs | 4 ++
crates/uv/src/commands/project/sync.rs | 1 +
crates/uv/src/commands/project/tree.rs | 1 +
crates/uv/src/commands/publish.rs | 1 +
crates/uv/src/commands/python/install.rs | 1 +
crates/uv/src/commands/python/pin.rs | 1 +
crates/uv/src/commands/tool/install.rs | 2 +
crates/uv/src/commands/tool/run.rs | 1 +
crates/uv/src/commands/tool/upgrade.rs | 1 +
crates/uv/src/commands/venv.rs | 3 ++
crates/uv/tests/it/pip_install.rs | 60 ++++++++++++++++++++++++
docs/reference/environment.md | 8 ++++
27 files changed, 143 insertions(+), 1 deletion(-)
diff --git a/crates/uv-client/src/base_client.rs b/crates/uv-client/src/base_client.rs
index e11845adb..9ddc30e75 100644
--- a/crates/uv-client/src/base_client.rs
+++ b/crates/uv-client/src/base_client.rs
@@ -6,6 +6,7 @@ use std::sync::Arc;
use std::time::Duration;
use std::{env, io, iter};
+use anyhow::Context;
use anyhow::anyhow;
use http::{
HeaderMap, HeaderName, HeaderValue, Method, StatusCode,
@@ -166,6 +167,25 @@ impl<'a> BaseClientBuilder<'a> {
self
}
+ /// Read the retry count from [`EnvVars::UV_HTTP_RETRIES`] if set, otherwise, make no change.
+ ///
+ /// Errors when [`EnvVars::UV_HTTP_RETRIES`] is not a valid u32.
+ pub fn retries_from_env(self) -> anyhow::Result {
+ // TODO(zanieb): We should probably parse this in another layer, but there's not a natural
+ // fit for it right now
+ if let Some(value) = env::var_os(EnvVars::UV_HTTP_RETRIES) {
+ Ok(self.retries(
+ value
+ .to_string_lossy()
+ .as_ref()
+ .parse::()
+ .context("Failed to parse `UV_HTTP_RETRIES`")?,
+ ))
+ } else {
+ Ok(self)
+ }
+ }
+
#[must_use]
pub fn native_tls(mut self, native_tls: bool) -> Self {
self.native_tls = native_tls;
@@ -238,7 +258,11 @@ impl<'a> BaseClientBuilder<'a> {
/// Create a [`RetryPolicy`] for the client.
fn retry_policy(&self) -> ExponentialBackoff {
- ExponentialBackoff::builder().build_with_max_retries(self.retries)
+ let mut builder = ExponentialBackoff::builder();
+ if env::var_os(EnvVars::UV_TEST_NO_HTTP_RETRY_DELAY).is_some() {
+ builder = builder.retry_bounds(Duration::from_millis(0), Duration::from_millis(0));
+ }
+ builder.build_with_max_retries(self.retries)
}
pub fn build(&self) -> BaseClient {
diff --git a/crates/uv-client/src/registry_client.rs b/crates/uv-client/src/registry_client.rs
index c7694676c..afa1b03ae 100644
--- a/crates/uv-client/src/registry_client.rs
+++ b/crates/uv-client/src/registry_client.rs
@@ -115,6 +115,11 @@ impl<'a> RegistryClientBuilder<'a> {
self
}
+ pub fn retries_from_env(mut self) -> anyhow::Result {
+ self.base_client_builder = self.base_client_builder.retries_from_env()?;
+ Ok(self)
+ }
+
#[must_use]
pub fn native_tls(mut self, native_tls: bool) -> Self {
self.base_client_builder = self.base_client_builder.native_tls(native_tls);
diff --git a/crates/uv-requirements/src/lib.rs b/crates/uv-requirements/src/lib.rs
index 812f9141f..68fe84abc 100644
--- a/crates/uv-requirements/src/lib.rs
+++ b/crates/uv-requirements/src/lib.rs
@@ -31,6 +31,9 @@ pub enum Error {
#[error(transparent)]
WheelFilename(#[from] uv_distribution_filename::WheelFilenameError),
+ #[error("Failed to construct HTTP client")]
+ ClientError(#[source] anyhow::Error),
+
#[error(transparent)]
Io(#[from] std::io::Error),
}
diff --git a/crates/uv-static/src/env_vars.rs b/crates/uv-static/src/env_vars.rs
index 4ac2976d9..5b91fccea 100644
--- a/crates/uv-static/src/env_vars.rs
+++ b/crates/uv-static/src/env_vars.rs
@@ -402,6 +402,9 @@ impl EnvVars {
/// Timeout (in seconds) for HTTP requests. (default: 30 s)
pub const UV_HTTP_TIMEOUT: &'static str = "UV_HTTP_TIMEOUT";
+ /// The number of retries for HTTP requests. (default: 3)
+ pub const UV_HTTP_RETRIES: &'static str = "UV_HTTP_RETRIES";
+
/// Timeout (in seconds) for HTTP requests. Equivalent to `UV_HTTP_TIMEOUT`.
pub const UV_REQUEST_TIMEOUT: &'static str = "UV_REQUEST_TIMEOUT";
@@ -659,6 +662,9 @@ impl EnvVars {
#[attr_hidden]
pub const UV_TEST_VENDOR_LINKS_URL: &'static str = "UV_TEST_VENDOR_LINKS_URL";
+ /// Used to disable delay for HTTP retries in tests.
+ pub const UV_TEST_NO_HTTP_RETRY_DELAY: &'static str = "UV_TEST_NO_HTTP_RETRY_DELAY";
+
/// Used to set an index url for tests.
#[attr_hidden]
pub const UV_TEST_INDEX_URL: &'static str = "UV_TEST_INDEX_URL";
diff --git a/crates/uv/src/commands/build_frontend.rs b/crates/uv/src/commands/build_frontend.rs
index 2cef9a406..fd6ed73d7 100644
--- a/crates/uv/src/commands/build_frontend.rs
+++ b/crates/uv/src/commands/build_frontend.rs
@@ -207,6 +207,7 @@ async fn build_impl(
} = settings;
let client_builder = BaseClientBuilder::default()
+ .retries_from_env()?
.connectivity(network_settings.connectivity)
.native_tls(network_settings.native_tls)
.allow_insecure_host(network_settings.allow_insecure_host.clone());
diff --git a/crates/uv/src/commands/pip/compile.rs b/crates/uv/src/commands/pip/compile.rs
index a1846d418..c40716763 100644
--- a/crates/uv/src/commands/pip/compile.rs
+++ b/crates/uv/src/commands/pip/compile.rs
@@ -179,6 +179,7 @@ pub(crate) async fn pip_compile(
}
let client_builder = BaseClientBuilder::new()
+ .retries_from_env()?
.connectivity(network_settings.connectivity)
.native_tls(network_settings.native_tls)
.keyring(keyring_provider)
diff --git a/crates/uv/src/commands/pip/install.rs b/crates/uv/src/commands/pip/install.rs
index aa6e6a6c9..bbfe99c50 100644
--- a/crates/uv/src/commands/pip/install.rs
+++ b/crates/uv/src/commands/pip/install.rs
@@ -99,6 +99,7 @@ pub(crate) async fn pip_install(
let start = std::time::Instant::now();
let client_builder = BaseClientBuilder::new()
+ .retries_from_env()?
.connectivity(network_settings.connectivity)
.native_tls(network_settings.native_tls)
.keyring(keyring_provider)
diff --git a/crates/uv/src/commands/pip/list.rs b/crates/uv/src/commands/pip/list.rs
index 356574436..40e8c770d 100644
--- a/crates/uv/src/commands/pip/list.rs
+++ b/crates/uv/src/commands/pip/list.rs
@@ -87,6 +87,7 @@ pub(crate) async fn pip_list(
let capabilities = IndexCapabilities::default();
let client_builder = BaseClientBuilder::new()
+ .retries_from_env()?
.connectivity(network_settings.connectivity)
.native_tls(network_settings.native_tls)
.keyring(keyring_provider)
diff --git a/crates/uv/src/commands/pip/sync.rs b/crates/uv/src/commands/pip/sync.rs
index 8f26aaea2..6858ddad0 100644
--- a/crates/uv/src/commands/pip/sync.rs
+++ b/crates/uv/src/commands/pip/sync.rs
@@ -81,6 +81,7 @@ pub(crate) async fn pip_sync(
preview: PreviewMode,
) -> Result {
let client_builder = BaseClientBuilder::new()
+ .retries_from_env()?
.connectivity(network_settings.connectivity)
.native_tls(network_settings.native_tls)
.keyring(keyring_provider)
diff --git a/crates/uv/src/commands/pip/tree.rs b/crates/uv/src/commands/pip/tree.rs
index b0ba44c35..81a566b8e 100644
--- a/crates/uv/src/commands/pip/tree.rs
+++ b/crates/uv/src/commands/pip/tree.rs
@@ -86,6 +86,7 @@ pub(crate) async fn pip_tree(
let capabilities = IndexCapabilities::default();
let client_builder = BaseClientBuilder::new()
+ .retries_from_env()?
.connectivity(network_settings.connectivity)
.native_tls(network_settings.native_tls)
.keyring(keyring_provider)
diff --git a/crates/uv/src/commands/pip/uninstall.rs b/crates/uv/src/commands/pip/uninstall.rs
index 835e7de65..f617a0203 100644
--- a/crates/uv/src/commands/pip/uninstall.rs
+++ b/crates/uv/src/commands/pip/uninstall.rs
@@ -42,6 +42,7 @@ pub(crate) async fn pip_uninstall(
let start = std::time::Instant::now();
let client_builder = BaseClientBuilder::new()
+ .retries_from_env()?
.connectivity(network_settings.connectivity)
.native_tls(network_settings.native_tls)
.keyring(keyring_provider)
diff --git a/crates/uv/src/commands/project/add.rs b/crates/uv/src/commands/project/add.rs
index f255194de..d65866483 100644
--- a/crates/uv/src/commands/project/add.rs
+++ b/crates/uv/src/commands/project/add.rs
@@ -176,6 +176,7 @@ pub(crate) async fn add(
}
let client_builder = BaseClientBuilder::new()
+ .retries_from_env()?
.connectivity(network_settings.connectivity)
.native_tls(network_settings.native_tls)
.allow_insecure_host(network_settings.allow_insecure_host.clone());
@@ -329,6 +330,7 @@ pub(crate) async fn add(
.ok();
let client_builder = BaseClientBuilder::new()
+ .retries_from_env()?
.connectivity(network_settings.connectivity)
.native_tls(network_settings.native_tls)
.keyring(settings.resolver.keyring_provider)
diff --git a/crates/uv/src/commands/project/init.rs b/crates/uv/src/commands/project/init.rs
index 15fed409e..9ff321a72 100644
--- a/crates/uv/src/commands/project/init.rs
+++ b/crates/uv/src/commands/project/init.rs
@@ -218,6 +218,7 @@ async fn init_script(
warn_user_once!("`--package` is a no-op for Python scripts, which are standalone");
}
let client_builder = BaseClientBuilder::new()
+ .retries_from_env()?
.connectivity(network_settings.connectivity)
.native_tls(network_settings.native_tls)
.allow_insecure_host(network_settings.allow_insecure_host.clone());
@@ -348,6 +349,7 @@ async fn init_project(
let reporter = PythonDownloadReporter::single(printer);
let client_builder = BaseClientBuilder::new()
+ .retries_from_env()?
.connectivity(network_settings.connectivity)
.native_tls(network_settings.native_tls)
.allow_insecure_host(network_settings.allow_insecure_host.clone());
diff --git a/crates/uv/src/commands/project/lock.rs b/crates/uv/src/commands/project/lock.rs
index 9cbd43ea9..f79557d9e 100644
--- a/crates/uv/src/commands/project/lock.rs
+++ b/crates/uv/src/commands/project/lock.rs
@@ -99,6 +99,7 @@ pub(crate) async fn lock(
let script = match script {
Some(ScriptPath::Path(path)) => {
let client_builder = BaseClientBuilder::new()
+ .retries_from_env()?
.connectivity(network_settings.connectivity)
.native_tls(network_settings.native_tls)
.allow_insecure_host(network_settings.allow_insecure_host.clone());
@@ -588,6 +589,7 @@ async fn do_lock(
// Initialize the client.
let client_builder = BaseClientBuilder::new()
+ .retries_from_env()?
.connectivity(network_settings.connectivity)
.native_tls(network_settings.native_tls)
.keyring(*keyring_provider)
diff --git a/crates/uv/src/commands/project/mod.rs b/crates/uv/src/commands/project/mod.rs
index a012e2855..1a0274cac 100644
--- a/crates/uv/src/commands/project/mod.rs
+++ b/crates/uv/src/commands/project/mod.rs
@@ -690,6 +690,7 @@ impl ScriptInterpreter {
}
let client_builder = BaseClientBuilder::new()
+ .retries_from_env()?
.connectivity(network_settings.connectivity)
.native_tls(network_settings.native_tls)
.allow_insecure_host(network_settings.allow_insecure_host.clone());
@@ -946,6 +947,7 @@ impl ProjectInterpreter {
}
let client_builder = BaseClientBuilder::default()
+ .retries_from_env()?
.connectivity(network_settings.connectivity)
.native_tls(network_settings.native_tls)
.allow_insecure_host(network_settings.allow_insecure_host.clone());
@@ -1656,6 +1658,8 @@ pub(crate) async fn resolve_names(
} = settings;
let client_builder = BaseClientBuilder::new()
+ .retries_from_env()
+ .map_err(uv_requirements::Error::ClientError)?
.connectivity(network_settings.connectivity)
.native_tls(network_settings.native_tls)
.keyring(*keyring_provider)
@@ -1813,6 +1817,7 @@ pub(crate) async fn resolve_environment(
} = spec.requirements;
let client_builder = BaseClientBuilder::new()
+ .retries_from_env()?
.connectivity(network_settings.connectivity)
.native_tls(network_settings.native_tls)
.keyring(*keyring_provider)
@@ -1984,6 +1989,7 @@ pub(crate) async fn sync_environment(
} = settings;
let client_builder = BaseClientBuilder::new()
+ .retries_from_env()?
.connectivity(network_settings.connectivity)
.native_tls(network_settings.native_tls)
.keyring(keyring_provider)
@@ -2147,6 +2153,7 @@ pub(crate) async fn update_environment(
} = settings;
let client_builder = BaseClientBuilder::new()
+ .retries_from_env()?
.connectivity(network_settings.connectivity)
.native_tls(network_settings.native_tls)
.keyring(*keyring_provider)
diff --git a/crates/uv/src/commands/project/run.rs b/crates/uv/src/commands/project/run.rs
index f0a46f16a..3eece5432 100644
--- a/crates/uv/src/commands/project/run.rs
+++ b/crates/uv/src/commands/project/run.rs
@@ -618,6 +618,7 @@ hint: If you are running a script with `{}` in the shebang, you may need to incl
// If we're isolating the environment, use an ephemeral virtual environment as the
// base environment for the project.
let client_builder = BaseClientBuilder::new()
+ .retries_from_env()?
.connectivity(network_settings.connectivity)
.native_tls(network_settings.native_tls)
.allow_insecure_host(network_settings.allow_insecure_host.clone());
@@ -859,6 +860,7 @@ hint: If you are running a script with `{}` in the shebang, you may need to incl
let interpreter = {
let client_builder = BaseClientBuilder::new()
+ .retries_from_env()?
.connectivity(network_settings.connectivity)
.native_tls(network_settings.native_tls)
.allow_insecure_host(network_settings.allow_insecure_host.clone());
@@ -929,6 +931,7 @@ hint: If you are running a script with `{}` in the shebang, you may need to incl
None
} else {
let client_builder = BaseClientBuilder::new()
+ .retries_from_env()?
.connectivity(network_settings.connectivity)
.native_tls(network_settings.native_tls)
.allow_insecure_host(network_settings.allow_insecure_host.clone());
@@ -1526,6 +1529,7 @@ impl RunCommand {
.tempfile()?;
let client = BaseClientBuilder::new()
+ .retries_from_env()?
.connectivity(network_settings.connectivity)
.native_tls(network_settings.native_tls)
.allow_insecure_host(network_settings.allow_insecure_host.clone())
diff --git a/crates/uv/src/commands/project/sync.rs b/crates/uv/src/commands/project/sync.rs
index 664eb2a94..a9a161527 100644
--- a/crates/uv/src/commands/project/sync.rs
+++ b/crates/uv/src/commands/project/sync.rs
@@ -623,6 +623,7 @@ pub(super) async fn do_sync(
} = settings;
let client_builder = BaseClientBuilder::new()
+ .retries_from_env()?
.connectivity(network_settings.connectivity)
.native_tls(network_settings.native_tls)
.keyring(keyring_provider)
diff --git a/crates/uv/src/commands/project/tree.rs b/crates/uv/src/commands/project/tree.rs
index d401940d9..cd1339d3e 100644
--- a/crates/uv/src/commands/project/tree.rs
+++ b/crates/uv/src/commands/project/tree.rs
@@ -215,6 +215,7 @@ pub(crate) async fn tree(
let client = RegistryClientBuilder::new(
cache.clone().with_refresh(Refresh::All(Timestamp::now())),
)
+ .retries_from_env()?
.native_tls(network_settings.native_tls)
.connectivity(network_settings.connectivity)
.allow_insecure_host(network_settings.allow_insecure_host.clone())
diff --git a/crates/uv/src/commands/publish.rs b/crates/uv/src/commands/publish.rs
index 63a0f2756..e7f5e00a2 100644
--- a/crates/uv/src/commands/publish.rs
+++ b/crates/uv/src/commands/publish.rs
@@ -95,6 +95,7 @@ pub(crate) async fn publish(
false,
);
let registry_client_builder = RegistryClientBuilder::new(cache.clone())
+ .retries_from_env()?
.native_tls(network_settings.native_tls)
.connectivity(network_settings.connectivity)
.allow_insecure_host(network_settings.allow_insecure_host.clone())
diff --git a/crates/uv/src/commands/python/install.rs b/crates/uv/src/commands/python/install.rs
index 3df0cf91d..8c8387d07 100644
--- a/crates/uv/src/commands/python/install.rs
+++ b/crates/uv/src/commands/python/install.rs
@@ -376,6 +376,7 @@ pub(crate) async fn install(
// Download and unpack the Python versions concurrently
let client = uv_client::BaseClientBuilder::new()
+ .retries_from_env()?
.connectivity(network_settings.connectivity)
.native_tls(network_settings.native_tls)
.allow_insecure_host(network_settings.allow_insecure_host.clone())
diff --git a/crates/uv/src/commands/python/pin.rs b/crates/uv/src/commands/python/pin.rs
index 395981751..f4d10cdfa 100644
--- a/crates/uv/src/commands/python/pin.rs
+++ b/crates/uv/src/commands/python/pin.rs
@@ -107,6 +107,7 @@ pub(crate) async fn pin(
}
let client_builder = BaseClientBuilder::new()
+ .retries_from_env()?
.connectivity(network_settings.connectivity)
.native_tls(network_settings.native_tls)
.allow_insecure_host(network_settings.allow_insecure_host.clone());
diff --git a/crates/uv/src/commands/tool/install.rs b/crates/uv/src/commands/tool/install.rs
index 5ced211b3..27f18abe4 100644
--- a/crates/uv/src/commands/tool/install.rs
+++ b/crates/uv/src/commands/tool/install.rs
@@ -66,6 +66,7 @@ pub(crate) async fn install(
preview: PreviewMode,
) -> Result {
let client_builder = BaseClientBuilder::new()
+ .retries_from_env()?
.connectivity(network_settings.connectivity)
.native_tls(network_settings.native_tls)
.allow_insecure_host(network_settings.allow_insecure_host.clone());
@@ -97,6 +98,7 @@ pub(crate) async fn install(
let workspace_cache = WorkspaceCache::default();
let client_builder = BaseClientBuilder::new()
+ .retries_from_env()?
.connectivity(network_settings.connectivity)
.native_tls(network_settings.native_tls)
.allow_insecure_host(network_settings.allow_insecure_host.clone());
diff --git a/crates/uv/src/commands/tool/run.rs b/crates/uv/src/commands/tool/run.rs
index 2746d65ad..c8297243d 100644
--- a/crates/uv/src/commands/tool/run.rs
+++ b/crates/uv/src/commands/tool/run.rs
@@ -690,6 +690,7 @@ async fn get_or_create_environment(
preview: PreviewMode,
) -> Result<(ToolRequirement, PythonEnvironment), ProjectError> {
let client_builder = BaseClientBuilder::new()
+ .retries_from_env()?
.connectivity(network_settings.connectivity)
.native_tls(network_settings.native_tls)
.allow_insecure_host(network_settings.allow_insecure_host.clone());
diff --git a/crates/uv/src/commands/tool/upgrade.rs b/crates/uv/src/commands/tool/upgrade.rs
index 95b7d1e2d..9d2d32a21 100644
--- a/crates/uv/src/commands/tool/upgrade.rs
+++ b/crates/uv/src/commands/tool/upgrade.rs
@@ -80,6 +80,7 @@ pub(crate) async fn upgrade(
let reporter = PythonDownloadReporter::single(printer);
let client_builder = BaseClientBuilder::new()
+ .retries_from_env()?
.connectivity(network_settings.connectivity)
.native_tls(network_settings.native_tls)
.allow_insecure_host(network_settings.allow_insecure_host.clone());
diff --git a/crates/uv/src/commands/venv.rs b/crates/uv/src/commands/venv.rs
index 9334d844d..6d6e15758 100644
--- a/crates/uv/src/commands/venv.rs
+++ b/crates/uv/src/commands/venv.rs
@@ -193,6 +193,9 @@ async fn venv_impl(
.unwrap_or(PathBuf::from(".venv")),
);
+ // TODO(zanieb): We don't use [`BaseClientBuilder::retries_from_env`] here because it's a pain
+ // to map into a miette diagnostic. We should just remove miette diagnostics here, we're not
+ // using them elsewhere.
let client_builder = BaseClientBuilder::default()
.connectivity(network_settings.connectivity)
.native_tls(network_settings.native_tls)
diff --git a/crates/uv/tests/it/pip_install.rs b/crates/uv/tests/it/pip_install.rs
index f231198e4..f142beefa 100644
--- a/crates/uv/tests/it/pip_install.rs
+++ b/crates/uv/tests/it/pip_install.rs
@@ -499,6 +499,66 @@ fn install_package() {
context.assert_command("import flask").success();
}
+#[tokio::test]
+async fn install_http_retries() {
+ let context = TestContext::new("3.12");
+
+ let server = MockServer::start().await;
+
+ // Create a server that always fails, so we can see the number of retries used
+ Mock::given(method("GET"))
+ .respond_with(ResponseTemplate::new(503))
+ .mount(&server)
+ .await;
+
+ uv_snapshot!(context.filters(), context.pip_install()
+ .arg("anyio")
+ .arg("--index")
+ .arg(server.uri())
+ .env(EnvVars::UV_HTTP_RETRIES, "foo"), @r"
+ success: false
+ exit_code: 2
+ ----- stdout -----
+
+ ----- stderr -----
+ error: Failed to parse `UV_HTTP_RETRIES`
+ Caused by: invalid digit found in string
+ "
+ );
+
+ uv_snapshot!(context.filters(), context.pip_install()
+ .arg("anyio")
+ .arg("--index")
+ .arg(server.uri())
+ .env(EnvVars::UV_HTTP_RETRIES, "999999999999"), @r"
+ success: false
+ exit_code: 2
+ ----- stdout -----
+
+ ----- stderr -----
+ error: Failed to parse `UV_HTTP_RETRIES`
+ Caused by: number too large to fit in target type
+ "
+ );
+
+ uv_snapshot!(context.filters(), context.pip_install()
+ .arg("anyio")
+ .arg("--index")
+ .arg(server.uri())
+ .env(EnvVars::UV_HTTP_RETRIES, "5")
+ .env(EnvVars::UV_TEST_NO_HTTP_RETRY_DELAY, "true"), @r"
+ success: false
+ exit_code: 2
+ ----- stdout -----
+
+ ----- stderr -----
+ error: Request failed after 5 retries
+ Caused by: Failed to fetch: `http://[LOCALHOST]/anyio/`
+ Caused by: HTTP status server error (503 Service Unavailable) for url (http://[LOCALHOST]/anyio/)
+ "
+ );
+}
+
/// Install a package from a `requirements.txt` into a virtual environment.
#[test]
fn install_requirements_txt() -> Result<()> {
diff --git a/docs/reference/environment.md b/docs/reference/environment.md
index 61889ddb3..bf8bf29ec 100644
--- a/docs/reference/environment.md
+++ b/docs/reference/environment.md
@@ -102,6 +102,10 @@ Equivalent to the `--token` argument for self update. A GitHub token for authent
Enables fetching files stored in Git LFS when installing a package from a Git repository.
+### `UV_HTTP_RETRIES`
+
+The number of retries for HTTP requests. (default: 3)
+
### `UV_HTTP_TIMEOUT`
Timeout (in seconds) for HTTP requests. (default: 30 s)
@@ -416,6 +420,10 @@ WARNING: `UV_SYSTEM_PYTHON=true` is intended for use in continuous integration (
or containerized environments and should be used with caution, as modifying the system
Python can lead to unexpected behavior.
+### `UV_TEST_NO_HTTP_RETRY_DELAY`
+
+Used to disable delay for HTTP retries in tests.
+
### `UV_TOOL_BIN_DIR`
Specifies the "bin" directory for installing tool executables.
From 567468ce72a3c7c1644cb061f9ed0d396118b2fb Mon Sep 17 00:00:00 2001
From: Ivan Smirnov
Date: Fri, 11 Jul 2025 16:01:54 +0100
Subject: [PATCH 009/130] More efficient cache-key globbing + support parent
paths in globs (#13469)
## Summary
(Related PR: #13438 - would be nice to have it merged as well since it
touches on the same globwalker code)
There's a few issues with `cache-key` globs, which this PR attempts to
address:
- As of the current state, parent or absolute paths are not allowed,
which is not obvious and is not documented. E.g., cache-key paths of the
form `{file = "../dep/**"}` will be essentially ignored.
- Absolute glob patterns also don't work (funnily enough, there's logic
in `globwalk` itself that attempts to address it in
[`globwalk::glob_builder()`](https://github.com/Gilnaa/globwalk/blob/8973fa2bc560be54c91448131238fa50d56ee121/src/lib.rs#L415),
which serves as inspiration to some parts of this PR).
- The reason for parent paths being ignored is the way globwalker is
currently being triggered in `uv-cache-info`: the base directory is
being walked over completely and each entry is then being matched to one
of the provided match patterns.
- This may also end up being very inefficient if you have a huge root
folder with thousands of files: if your match patterns are `a/b/*.rs`
and `a/c/*.py` then instead of walking over the root directory, you can
just walk over `a/b` and `a/c` and match the relevant patterns there.
- Why supporting parent paths may be important to the point of being a
blocker: in large codebases with python projects depending on other
local non-python projects (e.g. rust crates), cache-keys can be very
useful to track dependency on the source code of the latter (e.g.
`cache-keys = [{ file = "../../crates/some-dep/**" }]`.
- TLDR: parent/absolute cache-key globs don't work, glob walk can be
slow.
## Solution
- In this PR, user-provided glob patterns are first clustered
(LCP-style) into pattern groups with longest common path prefix; each of
these groups can then be walked over separately.
- Pattern groups do not overlap, so we would never walk over the same
directory twice (unless there's symlinks pointing to same folders).
- Paths are not canonicalized nor virtually normalized (which is
impossible on Unix without FS access), so the method is symlink-safe
(i.e. we don't treat `a/b/..` as `a`) and should work fine with #13438.
- Because of LCP logic, the minimal amount of directory space will be
traversed to cover all patterns.
- Absolute glob patterns will now work.
- Parent-relative glob patterns will now work.
- Glob walking will be more efficient in some cases.
## Possible improvements
- Efficiency can be further greatly improved if we limit max depth for
globwalk. Currently, a simple ".toml" will deep-traverse the whole
folder. Essentially, max depth can be always set to either N or
infinity. If a pattern at a pivot node contains `**`, we collect all
children nodes from the subtree into the same group and don't limit max
depth; otherwise, we set max depth to the length of the glob pattern.
This wouldn't change correctness though and can we done separately if
needed.
- If this is considered important enough, docs can be updated to
indicate that parent and absolute globs are supported (and symlinks are
resolved, if the relevant PR is algo merged in).
## Test Plan
- Glob splitting and clustering tests are included in the PR.
- Relative and absolute glob cache-keys were tested in an actual
codebase.
---
crates/uv-cache-info/src/cache_info.rs | 50 ++--
crates/uv-cache-info/src/glob.rs | 318 +++++++++++++++++++++++++
crates/uv-cache-info/src/lib.rs | 1 +
3 files changed, 347 insertions(+), 22 deletions(-)
create mode 100644 crates/uv-cache-info/src/glob.rs
diff --git a/crates/uv-cache-info/src/cache_info.rs b/crates/uv-cache-info/src/cache_info.rs
index ce98cc513..27a98ab54 100644
--- a/crates/uv-cache-info/src/cache_info.rs
+++ b/crates/uv-cache-info/src/cache_info.rs
@@ -7,6 +7,7 @@ use serde::Deserialize;
use tracing::{debug, warn};
use crate::git_info::{Commit, Tags};
+use crate::glob::cluster_globs;
use crate::timestamp::Timestamp;
#[derive(Debug, thiserror::Error)]
@@ -212,34 +213,39 @@ impl CacheInfo {
}
}
- // If we have any globs, process them in a single pass.
+ // If we have any globs, first cluster them using LCP and then do a single pass on each group.
if !globs.is_empty() {
- let walker = globwalk::GlobWalkerBuilder::from_patterns(directory, &globs)
+ for (glob_base, glob_patterns) in cluster_globs(&globs) {
+ let walker = globwalk::GlobWalkerBuilder::from_patterns(
+ directory.join(glob_base),
+ &glob_patterns,
+ )
.file_type(globwalk::FileType::FILE | globwalk::FileType::SYMLINK)
.build()?;
- for entry in walker {
- let entry = match entry {
- Ok(entry) => entry,
- Err(err) => {
- warn!("Failed to read glob entry: {err}");
+ for entry in walker {
+ let entry = match entry {
+ Ok(entry) => entry,
+ Err(err) => {
+ warn!("Failed to read glob entry: {err}");
+ continue;
+ }
+ };
+ let metadata = match entry.metadata() {
+ Ok(metadata) => metadata,
+ Err(err) => {
+ warn!("Failed to read metadata for glob entry: {err}");
+ continue;
+ }
+ };
+ if !metadata.is_file() {
+ warn!(
+ "Expected file for cache key, but found directory: `{}`",
+ entry.path().display()
+ );
continue;
}
- };
- let metadata = match entry.metadata() {
- Ok(metadata) => metadata,
- Err(err) => {
- warn!("Failed to read metadata for glob entry: {err}");
- continue;
- }
- };
- if !metadata.is_file() {
- warn!(
- "Expected file for cache key, but found directory: `{}`",
- entry.path().display()
- );
- continue;
+ timestamp = max(timestamp, Some(Timestamp::from_metadata(&metadata)));
}
- timestamp = max(timestamp, Some(Timestamp::from_metadata(&metadata)));
}
}
diff --git a/crates/uv-cache-info/src/glob.rs b/crates/uv-cache-info/src/glob.rs
new file mode 100644
index 000000000..e9c85897f
--- /dev/null
+++ b/crates/uv-cache-info/src/glob.rs
@@ -0,0 +1,318 @@
+use std::{
+ collections::BTreeMap,
+ path::{Component, Components, Path, PathBuf},
+};
+
+/// Check if a component of the path looks like it may be a glob pattern.
+///
+/// Note: this function is being used when splitting a glob pattern into a long possible
+/// base and the glob remainder (scanning through components until we hit the first component
+/// for which this function returns true). It is acceptable for this function to return
+/// false positives (e.g. patterns like 'foo[bar' or 'foo{bar') in which case correctness
+/// will not be affected but efficiency might be (because we'll traverse more than we should),
+/// however it should not return false negatives.
+fn is_glob_like(part: Component) -> bool {
+ matches!(part, Component::Normal(_))
+ && part.as_os_str().to_str().is_some_and(|part| {
+ ["*", "{", "}", "?", "[", "]"]
+ .into_iter()
+ .any(|c| part.contains(c))
+ })
+}
+
+#[derive(Debug, Default, Clone, PartialEq, Eq)]
+struct GlobParts {
+ base: PathBuf,
+ pattern: PathBuf,
+}
+
+/// Split a glob into longest possible base + shortest possible glob pattern.
+fn split_glob(pattern: impl AsRef) -> GlobParts {
+ let pattern: &Path = pattern.as_ref().as_ref();
+
+ let mut glob = GlobParts::default();
+ let mut globbing = false;
+ let mut last = None;
+
+ for part in pattern.components() {
+ if let Some(last) = last {
+ if last != Component::CurDir {
+ if globbing {
+ glob.pattern.push(last);
+ } else {
+ glob.base.push(last);
+ }
+ }
+ }
+ if !globbing {
+ globbing = is_glob_like(part);
+ }
+ // we don't know if this part is the last one, defer handling it by one iteration
+ last = Some(part);
+ }
+
+ if let Some(last) = last {
+ // defer handling the last component to prevent draining entire pattern into base
+ if globbing || matches!(last, Component::Normal(_)) {
+ glob.pattern.push(last);
+ } else {
+ glob.base.push(last);
+ }
+ }
+ glob
+}
+
+/// Classic trie with edges being path components and values being glob patterns.
+#[derive(Default)]
+struct Trie<'a> {
+ children: BTreeMap, Trie<'a>>,
+ patterns: Vec<&'a Path>,
+}
+
+impl<'a> Trie<'a> {
+ fn insert(&mut self, mut components: Components<'a>, pattern: &'a Path) {
+ if let Some(part) = components.next() {
+ self.children
+ .entry(part)
+ .or_default()
+ .insert(components, pattern);
+ } else {
+ self.patterns.push(pattern);
+ }
+ }
+
+ #[allow(clippy::needless_pass_by_value)]
+ fn collect_patterns(
+ &self,
+ pattern_prefix: PathBuf,
+ group_prefix: PathBuf,
+ patterns: &mut Vec,
+ groups: &mut Vec<(PathBuf, Vec)>,
+ ) {
+ // collect all patterns beneath and including this node
+ for pattern in &self.patterns {
+ patterns.push(pattern_prefix.join(pattern));
+ }
+ for (part, child) in &self.children {
+ if let Component::Normal(_) = part {
+ // for normal components, collect all descendant patterns ('normal' edges only)
+ child.collect_patterns(
+ pattern_prefix.join(part),
+ group_prefix.join(part),
+ patterns,
+ groups,
+ );
+ } else {
+ // for non-normal component edges, kick off separate group collection at this node
+ child.collect_groups(group_prefix.join(part), groups);
+ }
+ }
+ }
+
+ #[allow(clippy::needless_pass_by_value)]
+ fn collect_groups(&self, prefix: PathBuf, groups: &mut Vec<(PathBuf, Vec)>) {
+ // LCP-style grouping of patterns
+ if self.patterns.is_empty() {
+ // no patterns in this node; child nodes can form independent groups
+ for (part, child) in &self.children {
+ child.collect_groups(prefix.join(part), groups);
+ }
+ } else {
+ // pivot point, we've hit a pattern node; we have to stop here and form a group
+ let mut group = Vec::new();
+ self.collect_patterns(PathBuf::new(), prefix.clone(), &mut group, groups);
+ groups.push((prefix, group));
+ }
+ }
+}
+
+/// Given a collection of globs, cluster them into (base, globs) groups so that:
+/// - base doesn't contain any glob symbols
+/// - each directory would only be walked at most once
+/// - base of each group is the longest common prefix of globs in the group
+pub(crate) fn cluster_globs(patterns: &[impl AsRef]) -> Vec<(PathBuf, Vec)> {
+ // split all globs into base/pattern
+ let globs: Vec<_> = patterns.iter().map(split_glob).collect();
+
+ // construct a path trie out of all split globs
+ let mut trie = Trie::default();
+ for glob in &globs {
+ trie.insert(glob.base.components(), &glob.pattern);
+ }
+
+ // run LCP-style aggregation of patterns in the trie into groups
+ let mut groups = Vec::new();
+ trie.collect_groups(PathBuf::new(), &mut groups);
+
+ // finally, convert resulting patterns to strings
+ groups
+ .into_iter()
+ .map(|(base, patterns)| {
+ (
+ base,
+ patterns
+ .iter()
+ // NOTE: this unwrap is ok because input patterns are valid utf-8
+ .map(|p| p.to_str().unwrap().to_owned())
+ .collect(),
+ )
+ })
+ .collect()
+}
+
+#[cfg(test)]
+mod tests {
+ use super::{GlobParts, cluster_globs, split_glob};
+
+ fn windowsify(path: &str) -> String {
+ if cfg!(windows) {
+ path.replace('/', "\\")
+ } else {
+ path.to_owned()
+ }
+ }
+
+ #[test]
+ fn test_split_glob() {
+ #[track_caller]
+ fn check(input: &str, base: &str, pattern: &str) {
+ let result = split_glob(input);
+ let expected = GlobParts {
+ base: base.into(),
+ pattern: pattern.into(),
+ };
+ assert_eq!(result, expected, "{input:?} != {base:?} + {pattern:?}");
+ }
+
+ check("", "", "");
+ check("a", "", "a");
+ check("a/b", "a", "b");
+ check("a/b/", "a", "b");
+ check("a/.//b/", "a", "b");
+ check("./a/b/c", "a/b", "c");
+ check("c/d/*", "c/d", "*");
+ check("c/d/*/../*", "c/d", "*/../*");
+ check("a/?b/c", "a", "?b/c");
+ check("/a/b/*", "/a/b", "*");
+ check("../x/*", "../x", "*");
+ check("a/{b,c}/d", "a", "{b,c}/d");
+ check("a/[bc]/d", "a", "[bc]/d");
+ check("*", "", "*");
+ check("*/*", "", "*/*");
+ check("..", "..", "");
+ check("/", "/", "");
+ }
+
+ #[test]
+ fn test_cluster_globs() {
+ #[track_caller]
+ fn check(input: &[&str], expected: &[(&str, &[&str])]) {
+ let input = input.iter().map(|s| windowsify(s)).collect::>();
+
+ let mut result_sorted = cluster_globs(&input);
+ for (_, patterns) in &mut result_sorted {
+ patterns.sort_unstable();
+ }
+ result_sorted.sort_unstable();
+
+ let mut expected_sorted = Vec::new();
+ for (base, patterns) in expected {
+ let mut patterns_sorted = Vec::new();
+ for pattern in *patterns {
+ patterns_sorted.push(windowsify(pattern));
+ }
+ patterns_sorted.sort_unstable();
+ expected_sorted.push((windowsify(base).into(), patterns_sorted));
+ }
+ expected_sorted.sort_unstable();
+
+ assert_eq!(
+ result_sorted, expected_sorted,
+ "{input:?} != {expected_sorted:?} (got: {result_sorted:?})"
+ );
+ }
+
+ check(&["a/b/*", "a/c/*"], &[("a/b", &["*"]), ("a/c", &["*"])]);
+ check(&["./a/b/*", "a/c/*"], &[("a/b", &["*"]), ("a/c", &["*"])]);
+ check(&["/a/b/*", "/a/c/*"], &[("/a/b", &["*"]), ("/a/c", &["*"])]);
+ check(
+ &["../a/b/*", "../a/c/*"],
+ &[("../a/b", &["*"]), ("../a/c", &["*"])],
+ );
+ check(&["x/*", "y/*"], &[("x", &["*"]), ("y", &["*"])]);
+ check(&[], &[]);
+ check(
+ &["./*", "a/*", "../foo/*.png"],
+ &[("", &["*", "a/*"]), ("../foo", &["*.png"])],
+ );
+ check(
+ &[
+ "?",
+ "/foo/?",
+ "/foo/bar/*",
+ "../bar/*.png",
+ "../bar/../baz/*.jpg",
+ ],
+ &[
+ ("", &["?"]),
+ ("/foo", &["?", "bar/*"]),
+ ("../bar", &["*.png"]),
+ ("../bar/../baz", &["*.jpg"]),
+ ],
+ );
+ check(&["/abs/path/*"], &[("/abs/path", &["*"])]);
+ check(&["/abs/*", "rel/*"], &[("/abs", &["*"]), ("rel", &["*"])]);
+ check(&["a/{b,c}/*", "a/d?/*"], &[("a", &["{b,c}/*", "d?/*"])]);
+ check(
+ &[
+ "../shared/a/[abc].png",
+ "../shared/a/b/*",
+ "../shared/b/c/?x/d",
+ "docs/important/*.{doc,xls}",
+ "docs/important/very/*",
+ ],
+ &[
+ ("../shared/a", &["[abc].png", "b/*"]),
+ ("../shared/b/c", &["?x/d"]),
+ ("docs/important", &["*.{doc,xls}", "very/*"]),
+ ],
+ );
+ check(&["file.txt"], &[("", &["file.txt"])]);
+ check(&["/"], &[("/", &[""])]);
+ check(&[".."], &[("..", &[""])]);
+ check(
+ &["file1.txt", "file2.txt"],
+ &[("", &["file1.txt", "file2.txt"])],
+ );
+ check(
+ &["a/file1.txt", "a/file2.txt"],
+ &[("a", &["file1.txt", "file2.txt"])],
+ );
+ check(
+ &["*", "a/b/*", "a/../c/*.jpg", "a/../c/*.png", "/a/*", "/b/*"],
+ &[
+ ("", &["*", "a/b/*"]),
+ ("a/../c", &["*.jpg", "*.png"]),
+ ("/a", &["*"]),
+ ("/b", &["*"]),
+ ],
+ );
+
+ if cfg!(windows) {
+ check(
+ &[
+ r"\\foo\bar\shared/a/[abc].png",
+ r"\\foo\bar\shared/a/b/*",
+ r"\\foo\bar/shared/b/c/?x/d",
+ r"D:\docs\important/*.{doc,xls}",
+ r"D:\docs/important/very/*",
+ ],
+ &[
+ (r"\\foo\bar\shared\a", &["[abc].png", r"b\*"]),
+ (r"\\foo\bar\shared\b\c", &[r"?x\d"]),
+ (r"D:\docs\important", &["*.{doc,xls}", r"very\*"]),
+ ],
+ );
+ }
+ }
+}
diff --git a/crates/uv-cache-info/src/lib.rs b/crates/uv-cache-info/src/lib.rs
index 286411f68..092d40652 100644
--- a/crates/uv-cache-info/src/lib.rs
+++ b/crates/uv-cache-info/src/lib.rs
@@ -3,4 +3,5 @@ pub use crate::timestamp::*;
mod cache_info;
mod git_info;
+mod glob;
mod timestamp;
From 088a436efe34ec517d5762545a753ab9424278d8 Mon Sep 17 00:00:00 2001
From: Zanie Blue
Date: Fri, 11 Jul 2025 11:45:45 -0500
Subject: [PATCH 010/130] Move `run_to_completion` utility to `crate::child`
instead of `crate::commands::run` (#14566)
This was really confusing as everything else in the `commands` module is
a command
---
crates/uv/src/{commands/run.rs => child.rs} | 0
crates/uv/src/commands/mod.rs | 1 -
crates/uv/src/commands/project/run.rs | 2 +-
crates/uv/src/commands/tool/run.rs | 2 +-
crates/uv/src/lib.rs | 1 +
5 files changed, 3 insertions(+), 3 deletions(-)
rename crates/uv/src/{commands/run.rs => child.rs} (100%)
diff --git a/crates/uv/src/commands/run.rs b/crates/uv/src/child.rs
similarity index 100%
rename from crates/uv/src/commands/run.rs
rename to crates/uv/src/child.rs
diff --git a/crates/uv/src/commands/mod.rs b/crates/uv/src/commands/mod.rs
index 0203d4dd5..d1e647363 100644
--- a/crates/uv/src/commands/mod.rs
+++ b/crates/uv/src/commands/mod.rs
@@ -72,7 +72,6 @@ mod project;
mod publish;
mod python;
pub(crate) mod reporters;
-mod run;
#[cfg(feature = "self-update")]
mod self_update;
mod tool;
diff --git a/crates/uv/src/commands/project/run.rs b/crates/uv/src/commands/project/run.rs
index 3eece5432..63850f563 100644
--- a/crates/uv/src/commands/project/run.rs
+++ b/crates/uv/src/commands/project/run.rs
@@ -40,6 +40,7 @@ use uv_static::EnvVars;
use uv_warnings::warn_user;
use uv_workspace::{DiscoveryOptions, VirtualProject, Workspace, WorkspaceCache, WorkspaceError};
+use crate::child::run_to_completion;
use crate::commands::pip::loggers::{
DefaultInstallLogger, DefaultResolveLogger, SummaryInstallLogger, SummaryResolveLogger,
};
@@ -55,7 +56,6 @@ use crate::commands::project::{
validate_project_requires_python,
};
use crate::commands::reporters::PythonDownloadReporter;
-use crate::commands::run::run_to_completion;
use crate::commands::{ExitStatus, diagnostics, project};
use crate::printer::Printer;
use crate::settings::{NetworkSettings, ResolverInstallerSettings};
diff --git a/crates/uv/src/commands/tool/run.rs b/crates/uv/src/commands/tool/run.rs
index c8297243d..f6b79774c 100644
--- a/crates/uv/src/commands/tool/run.rs
+++ b/crates/uv/src/commands/tool/run.rs
@@ -42,6 +42,7 @@ use uv_warnings::warn_user;
use uv_warnings::warn_user_once;
use uv_workspace::WorkspaceCache;
+use crate::child::run_to_completion;
use crate::commands::ExitStatus;
use crate::commands::pip::loggers::{
DefaultInstallLogger, DefaultResolveLogger, SummaryInstallLogger, SummaryResolveLogger,
@@ -51,7 +52,6 @@ use crate::commands::project::{
EnvironmentSpecification, PlatformState, ProjectError, resolve_names,
};
use crate::commands::reporters::PythonDownloadReporter;
-use crate::commands::run::run_to_completion;
use crate::commands::tool::common::{matching_packages, refine_interpreter};
use crate::commands::tool::{Target, ToolRequest};
use crate::commands::{diagnostics, project::environment::CachedEnvironment};
diff --git a/crates/uv/src/lib.rs b/crates/uv/src/lib.rs
index 261dd8d7c..84d889599 100644
--- a/crates/uv/src/lib.rs
+++ b/crates/uv/src/lib.rs
@@ -52,6 +52,7 @@ use crate::settings::{
PublishSettings,
};
+pub(crate) mod child;
pub(crate) mod commands;
pub(crate) mod logging;
pub(crate) mod printer;
From a9e21f7f6b26e4ad27718a35efb53d7cda490e69 Mon Sep 17 00:00:00 2001
From: dmitry-bychkov
Date: Fri, 11 Jul 2025 20:05:15 +0300
Subject: [PATCH 011/130] Update CONTRIBUTING.md with instructions to format
markdown files (#14246)
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
## Summary
Current documentation requires contributors to have Node.js/npm
installed locally to format Markdown files. This might be problematic
for users who don't work with JavaScript ecosystem or users who want to
avoid toolchain setup.
This change adds docker-based alternative:
```
docker run --rm -v .:/src/ -w /src/ node:alpine npx prettier --prose-wrap always --write "**/*.md"
```
Which mounts current working directory into /src/ inside of a container
and also sets working directory (-w) to /src/ so prettier loads
.editorconfig.
## Test Plan
Both commands should produce the same output
Native Prettier
```console
➜ uv git:(docs/contributing-md-formatting) npx prettier --prose-wrap always --write "**/*.md"
.github/PULL_REQUEST_TEMPLATE.md 28ms (unchanged)
BENCHMARKS.md 30ms (unchanged)
changelogs/0.1.x.md 264ms (unchanged)
changelogs/0.2.x.md 223ms (unchanged)
changelogs/0.3.x.md 29ms (unchanged)
changelogs/0.4.x.md 126ms (unchanged)
changelogs/0.5.x.md 153ms (unchanged)
changelogs/0.6.x.md 77ms (unchanged)
CONTRIBUTING.md 9ms (unchanged)
crates/README.md 4ms (unchanged)
crates/uv-build/README.md 1ms (unchanged)
crates/uv-client/README.md 1ms (unchanged)
crates/uv-globfilter/README.md 3ms (unchanged)
crates/uv-pep440/Readme.md 6ms (unchanged)
crates/uv-pep508/Readme.md 3ms (unchanged)
crates/uv-python/python/packaging/README.md 1ms (unchanged)
crates/uv-trampoline/README.md 14ms (unchanged)
crates/uv-virtualenv/README.md 1ms (unchanged)
docs/concepts/authentication.md 10ms (unchanged)
docs/concepts/build-backend.md 11ms (unchanged)
docs/concepts/cache.md 17ms (unchanged)
docs/concepts/configuration-files.md 9ms (unchanged)
docs/concepts/index.md 2ms (unchanged)
docs/concepts/indexes.md 22ms (unchanged)
docs/concepts/projects/build.md 4ms (unchanged)
docs/concepts/projects/config.md 25ms (unchanged)
docs/concepts/projects/dependencies.md 29ms (unchanged)
docs/concepts/projects/index.md 2ms (unchanged)
docs/concepts/projects/init.md 10ms (unchanged)
docs/concepts/projects/layout.md 10ms (unchanged)
docs/concepts/projects/run.md 4ms (unchanged)
docs/concepts/projects/sync.md 11ms (unchanged)
docs/concepts/projects/workspaces.md 12ms (unchanged)
docs/concepts/python-versions.md 26ms (unchanged)
docs/concepts/resolution.md 40ms (unchanged)
docs/concepts/tools.md 19ms (unchanged)
docs/getting-started/features.md 8ms (unchanged)
docs/getting-started/first-steps.md 2ms (unchanged)
docs/getting-started/help.md 8ms (unchanged)
docs/getting-started/index.md 2ms (unchanged)
docs/getting-started/installation.md 8ms (unchanged)
docs/guides/index.md 2ms (unchanged)
docs/guides/install-python.md 31ms (unchanged)
docs/guides/integration/alternative-indexes.md 21ms (unchanged)
docs/guides/integration/aws-lambda.md 49ms (unchanged)
docs/guides/integration/dependency-bots.md 16ms (unchanged)
docs/guides/integration/docker.md 37ms (unchanged)
docs/guides/integration/fastapi.md 8ms (unchanged)
docs/guides/integration/github.md 36ms (unchanged)
docs/guides/integration/index.md 4ms (unchanged)
docs/guides/integration/jupyter.md 17ms (unchanged)
docs/guides/integration/marimo.md 11ms (unchanged)
docs/guides/integration/pre-commit.md 27ms (unchanged)
docs/guides/integration/pytorch.md 12ms (unchanged)
docs/guides/package.md 5ms (unchanged)
docs/guides/projects.md 12ms (unchanged)
docs/guides/scripts.md 19ms (unchanged)
docs/guides/tools.md 8ms (unchanged)
docs/index.md 7ms (unchanged)
docs/pip/compatibility.md 44ms (unchanged)
docs/pip/compile.md 13ms (unchanged)
docs/pip/dependencies.md 3ms (unchanged)
docs/pip/environments.md 10ms (unchanged)
docs/pip/index.md 2ms (unchanged)
docs/pip/inspection.md 1ms (unchanged)
docs/pip/packages.md 3ms (unchanged)
docs/reference/benchmarks.md 3ms (unchanged)
docs/reference/index.md 3ms (unchanged)
docs/reference/installer.md 2ms (unchanged)
docs/reference/policies/index.md 2ms (unchanged)
docs/reference/policies/license.md 2ms (unchanged)
docs/reference/policies/platforms.md 4ms (unchanged)
docs/reference/policies/versioning.md 2ms (unchanged)
docs/reference/resolver-internals.md 19ms (unchanged)
docs/reference/troubleshooting/build-failures.md 13ms (unchanged)
docs/reference/troubleshooting/index.md 1ms (unchanged)
docs/reference/troubleshooting/reproducible-examples.md 7ms (unchanged)
PIP_COMPATIBILITY.md 1ms (unchanged)
README.md 10ms (unchanged)
scripts/benchmark/README.md 1ms (unchanged)
scripts/packages/built-by-uv/README.md 1ms (unchanged)
scripts/packages/dependent_locals/first_local/README.md 0ms (unchanged)
scripts/packages/dependent_locals/second_local/README.md 0ms (unchanged)
scripts/packages/hatchling_editable/README.md 0ms (unchanged)
scripts/packages/README.md 1ms (unchanged)
scripts/packages/root_editable/README.md 0ms (unchanged)
scripts/workspaces/albatross-virtual-workspace/packages/Unrelated.md 1ms (unchanged)
SECURITY.md 2ms (unchanged)
STYLE.md 9ms (unchanged)
➜ uv git:(docs/contributing-md-formatting) git status
On branch docs/contributing-md-formatting
nothing to commit, working tree clean
➜ uv git:(docs/contributing-md-formatting)
```
Docker based
```console
➜ uv git:(docs/contributing-md-formatting) sudo docker run --rm -v .:/src/ -w /src/ node:alpine npx prettier --prose-wrap always --write "**/*.md"
npm warn exec The following package was not found and will be installed: prettier@3.6.0
.github/PULL_REQUEST_TEMPLATE.md 54ms (unchanged)
BENCHMARKS.md 41ms (unchanged)
changelogs/0.1.x.md 297ms (unchanged)
changelogs/0.2.x.md 306ms (unchanged)
changelogs/0.3.x.md 50ms (unchanged)
changelogs/0.4.x.md 137ms (unchanged)
changelogs/0.5.x.md 217ms (unchanged)
changelogs/0.6.x.md 114ms (unchanged)
CONTRIBUTING.md 12ms (unchanged)
crates/README.md 8ms (unchanged)
crates/uv-build/README.md 2ms (unchanged)
crates/uv-client/README.md 2ms (unchanged)
crates/uv-globfilter/README.md 6ms (unchanged)
crates/uv-pep440/Readme.md 8ms (unchanged)
crates/uv-pep508/Readme.md 5ms (unchanged)
crates/uv-python/python/packaging/README.md 2ms (unchanged)
crates/uv-trampoline/README.md 17ms (unchanged)
crates/uv-virtualenv/README.md 2ms (unchanged)
docs/concepts/authentication.md 20ms (unchanged)
docs/concepts/build-backend.md 20ms (unchanged)
docs/concepts/cache.md 35ms (unchanged)
docs/concepts/configuration-files.md 11ms (unchanged)
docs/concepts/index.md 3ms (unchanged)
docs/concepts/indexes.md 24ms (unchanged)
docs/concepts/projects/build.md 5ms (unchanged)
docs/concepts/projects/config.md 25ms (unchanged)
docs/concepts/projects/dependencies.md 38ms (unchanged)
docs/concepts/projects/index.md 3ms (unchanged)
docs/concepts/projects/init.md 15ms (unchanged)
docs/concepts/projects/layout.md 11ms (unchanged)
docs/concepts/projects/run.md 7ms (unchanged)
docs/concepts/projects/sync.md 15ms (unchanged)
docs/concepts/projects/workspaces.md 15ms (unchanged)
docs/concepts/python-versions.md 30ms (unchanged)
docs/concepts/resolution.md 52ms (unchanged)
docs/concepts/tools.md 20ms (unchanged)
docs/getting-started/features.md 10ms (unchanged)
docs/getting-started/first-steps.md 2ms (unchanged)
docs/getting-started/help.md 5ms (unchanged)
docs/getting-started/index.md 3ms (unchanged)
docs/getting-started/installation.md 8ms (unchanged)
docs/guides/index.md 2ms (unchanged)
docs/guides/install-python.md 49ms (unchanged)
docs/guides/integration/alternative-indexes.md 29ms (unchanged)
docs/guides/integration/aws-lambda.md 102ms (unchanged)
docs/guides/integration/dependency-bots.md 20ms (unchanged)
docs/guides/integration/docker.md 38ms (unchanged)
docs/guides/integration/fastapi.md 7ms (unchanged)
docs/guides/integration/github.md 46ms (unchanged)
docs/guides/integration/index.md 3ms (unchanged)
docs/guides/integration/jupyter.md 16ms (unchanged)
docs/guides/integration/marimo.md 6ms (unchanged)
docs/guides/integration/pre-commit.md 14ms (unchanged)
docs/guides/integration/pytorch.md 18ms (unchanged)
docs/guides/package.md 9ms (unchanged)
docs/guides/projects.md 11ms (unchanged)
docs/guides/scripts.md 13ms (unchanged)
docs/guides/tools.md 13ms (unchanged)
docs/index.md 11ms (unchanged)
docs/pip/compatibility.md 40ms (unchanged)
docs/pip/compile.md 12ms (unchanged)
docs/pip/dependencies.md 4ms (unchanged)
docs/pip/environments.md 10ms (unchanged)
docs/pip/index.md 4ms (unchanged)
docs/pip/inspection.md 2ms (unchanged)
docs/pip/packages.md 5ms (unchanged)
docs/reference/benchmarks.md 2ms (unchanged)
docs/reference/index.md 3ms (unchanged)
docs/reference/installer.md 3ms (unchanged)
docs/reference/policies/index.md 1ms (unchanged)
docs/reference/policies/license.md 3ms (unchanged)
docs/reference/policies/platforms.md 5ms (unchanged)
docs/reference/policies/versioning.md 4ms (unchanged)
docs/reference/resolver-internals.md 29ms (unchanged)
docs/reference/troubleshooting/build-failures.md 19ms (unchanged)
docs/reference/troubleshooting/index.md 2ms (unchanged)
docs/reference/troubleshooting/reproducible-examples.md 9ms (unchanged)
PIP_COMPATIBILITY.md 1ms (unchanged)
README.md 15ms (unchanged)
scripts/benchmark/README.md 1ms (unchanged)
scripts/packages/built-by-uv/README.md 1ms (unchanged)
scripts/packages/dependent_locals/first_local/README.md 0ms (unchanged)
scripts/packages/dependent_locals/second_local/README.md 0ms (unchanged)
scripts/packages/hatchling_editable/README.md 1ms (unchanged)
scripts/packages/README.md 1ms (unchanged)
scripts/packages/root_editable/README.md 0ms (unchanged)
scripts/workspaces/albatross-virtual-workspace/packages/Unrelated.md 2ms (unchanged)
SECURITY.md 3ms (unchanged)
STYLE.md 16ms (unchanged)
npm notice
npm notice New minor version of npm available! 11.3.0 -> 11.4.2
npm notice Changelog: https://github.com/npm/cli/releases/tag/v11.4.2
npm notice To update run: npm install -g npm@11.4.2
npm notice
➜ uv git:(docs/contributing-md-formatting) git status
On branch docs/contributing-md-formatting
nothing to commit, working tree clean
➜ uv git:(docs/contributing-md-formatting)
```
Co-authored-by: Dmitry Bychkov
---
CONTRIBUTING.md | 7 +++++++
1 file changed, 7 insertions(+)
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
index 14b5197fe..f7be958a4 100644
--- a/CONTRIBUTING.md
+++ b/CONTRIBUTING.md
@@ -165,6 +165,13 @@ After making changes to the documentation, format the markdown files with:
npx prettier --prose-wrap always --write "**/*.md"
```
+Note that the command above requires Node.js and npm to be installed on your system. As an
+alternative, you can run this command using Docker:
+
+```console
+$ docker run --rm -v .:/src/ -w /src/ node:alpine npx prettier --prose-wrap always --write "**/*.md"
+```
+
## Releases
Releases can only be performed by Astral team members.
From 081e2010df63c561bdf56f5d6e34b102dd035d94 Mon Sep 17 00:00:00 2001
From: Zanie Blue
Date: Fri, 11 Jul 2025 12:13:35 -0500
Subject: [PATCH 012/130] Isolate `install_git_public_rate_limited...` test
from `UV_HTTP_RETRIES` (#14567)
Blocking https://github.com/astral-sh/uv/pull/14565
This also makes the test 5x faster, from 5s to 1s.
---
crates/uv/tests/it/edit.rs | 4 +++-
crates/uv/tests/it/pip_install.rs | 4 +++-
2 files changed, 6 insertions(+), 2 deletions(-)
diff --git a/crates/uv/tests/it/edit.rs b/crates/uv/tests/it/edit.rs
index c1a74541f..18170cff9 100644
--- a/crates/uv/tests/it/edit.rs
+++ b/crates/uv/tests/it/edit.rs
@@ -561,7 +561,9 @@ async fn add_git_private_rate_limited_by_github_rest_api_429_response() -> Resul
uv_snapshot!(context.filters(), context
.add()
.arg(format!("uv-private-pypackage @ git+https://{token}@github.com/astral-test/uv-private-pypackage"))
- .env("UV_GITHUB_FAST_PATH_URL", server.uri()), @r"
+ .env(EnvVars::UV_GITHUB_FAST_PATH_URL, server.uri())
+ .env(EnvVars::UV_TEST_NO_HTTP_RETRY_DELAY, "true")
+ .env_remove(EnvVars::UV_HTTP_RETRIES), @r"
success: true
exit_code: 0
----- stdout -----
diff --git a/crates/uv/tests/it/pip_install.rs b/crates/uv/tests/it/pip_install.rs
index f142beefa..bc27228c7 100644
--- a/crates/uv/tests/it/pip_install.rs
+++ b/crates/uv/tests/it/pip_install.rs
@@ -2168,7 +2168,9 @@ async fn install_git_public_rate_limited_by_github_rest_api_429_response() {
uv_snapshot!(context.filters(), context
.pip_install()
.arg("uv-public-pypackage @ git+https://github.com/astral-test/uv-public-pypackage")
- .env("UV_GITHUB_FAST_PATH_URL", server.uri()), @r"
+ .env(EnvVars::UV_GITHUB_FAST_PATH_URL, server.uri())
+ .env(EnvVars::UV_TEST_NO_HTTP_RETRY_DELAY, "true")
+ .env_remove(EnvVars::UV_HTTP_RETRIES), @r"
success: true
exit_code: 0
----- stdout -----
From ee35fe34ab90adb99505581daeb63edd5c3f827f Mon Sep 17 00:00:00 2001
From: Zanie Blue
Date: Fri, 11 Jul 2025 13:59:47 -0500
Subject: [PATCH 013/130] Increase the number of retries during test runs in CI
(#14565)
---
.github/workflows/ci.yml | 8 ++++++++
crates/uv/tests/it/edit.rs | 4 +++-
crates/uv/tests/it/network.rs | 33 +++++++++++++++++++++++++--------
3 files changed, 36 insertions(+), 9 deletions(-)
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index b0d8e18a3..ba7a4b4d1 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -223,6 +223,9 @@ jobs:
tool: cargo-nextest
- name: "Cargo test"
+ env:
+ # Retry more than default to reduce flakes in CI
+ UV_HTTP_RETRIES: 5
run: |
cargo nextest run \
--features python-patch \
@@ -256,6 +259,9 @@ jobs:
tool: cargo-nextest
- name: "Cargo test"
+ env:
+ # Retry more than default to reduce flakes in CI
+ UV_HTTP_RETRIES: 5
run: |
cargo nextest run \
--no-default-features \
@@ -300,6 +306,8 @@ jobs:
- name: "Cargo test"
working-directory: ${{ env.UV_WORKSPACE }}
env:
+ # Retry more than default to reduce flakes in CI
+ UV_HTTP_RETRIES: 5
# Avoid permission errors during concurrent tests
# See https://github.com/astral-sh/uv/issues/6940
UV_LINK_MODE: copy
diff --git a/crates/uv/tests/it/edit.rs b/crates/uv/tests/it/edit.rs
index 18170cff9..ddaed434f 100644
--- a/crates/uv/tests/it/edit.rs
+++ b/crates/uv/tests/it/edit.rs
@@ -11877,7 +11877,9 @@ async fn add_unexpected_error_code() -> Result<()> {
"#
})?;
- uv_snapshot!(context.filters(), context.add().arg("anyio").arg("--index").arg(server.uri()), @r"
+ uv_snapshot!(context.filters(), context.add().arg("anyio").arg("--index").arg(server.uri())
+ .env_remove(EnvVars::UV_HTTP_RETRIES)
+ .env(EnvVars::UV_TEST_NO_HTTP_RETRY_DELAY, "true"), @r"
success: false
exit_code: 2
----- stdout -----
diff --git a/crates/uv/tests/it/network.rs b/crates/uv/tests/it/network.rs
index 1a5805970..a9376e07e 100644
--- a/crates/uv/tests/it/network.rs
+++ b/crates/uv/tests/it/network.rs
@@ -3,6 +3,7 @@ use std::{env, io};
use assert_fs::fixture::{ChildPath, FileWriteStr, PathChild};
use http::StatusCode;
use serde_json::json;
+use uv_static::EnvVars;
use wiremock::matchers::method;
use wiremock::{Mock, MockServer, ResponseTemplate};
@@ -48,7 +49,9 @@ async fn simple_http_500() {
.pip_install()
.arg("tqdm")
.arg("--index-url")
- .arg(&mock_server_uri), @r"
+ .arg(&mock_server_uri)
+ .env_remove(EnvVars::UV_HTTP_RETRIES)
+ .env(EnvVars::UV_TEST_NO_HTTP_RETRY_DELAY, "true"), @r"
success: false
exit_code: 2
----- stdout -----
@@ -72,7 +75,9 @@ async fn simple_io_err() {
.pip_install()
.arg("tqdm")
.arg("--index-url")
- .arg(&mock_server_uri), @r"
+ .arg(&mock_server_uri)
+ .env_remove(EnvVars::UV_HTTP_RETRIES)
+ .env(EnvVars::UV_TEST_NO_HTTP_RETRY_DELAY, "true"), @r"
success: false
exit_code: 2
----- stdout -----
@@ -99,7 +104,9 @@ async fn find_links_http_500() {
.arg("tqdm")
.arg("--no-index")
.arg("--find-links")
- .arg(&mock_server_uri), @r"
+ .arg(&mock_server_uri)
+ .env_remove(EnvVars::UV_HTTP_RETRIES)
+ .env(EnvVars::UV_TEST_NO_HTTP_RETRY_DELAY, "true"), @r"
success: false
exit_code: 2
----- stdout -----
@@ -125,7 +132,9 @@ async fn find_links_io_error() {
.arg("tqdm")
.arg("--no-index")
.arg("--find-links")
- .arg(&mock_server_uri), @r"
+ .arg(&mock_server_uri)
+ .env_remove(EnvVars::UV_HTTP_RETRIES)
+ .env(EnvVars::UV_TEST_NO_HTTP_RETRY_DELAY, "true"), @r"
success: false
exit_code: 2
----- stdout -----
@@ -154,7 +163,9 @@ async fn direct_url_http_500() {
let filters = vec![(mock_server_uri.as_str(), "[SERVER]")];
uv_snapshot!(filters, context
.pip_install()
- .arg(format!("tqdm @ {tqdm_url}")), @r"
+ .arg(format!("tqdm @ {tqdm_url}"))
+ .env_remove(EnvVars::UV_HTTP_RETRIES)
+ .env(EnvVars::UV_TEST_NO_HTTP_RETRY_DELAY, "true"), @r"
success: false
exit_code: 1
----- stdout -----
@@ -180,7 +191,9 @@ async fn direct_url_io_error() {
let filters = vec![(mock_server_uri.as_str(), "[SERVER]")];
uv_snapshot!(filters, context
.pip_install()
- .arg(format!("tqdm @ {tqdm_url}")), @r"
+ .arg(format!("tqdm @ {tqdm_url}"))
+ .env_remove(EnvVars::UV_HTTP_RETRIES)
+ .env(EnvVars::UV_TEST_NO_HTTP_RETRY_DELAY, "true"), @r"
success: false
exit_code: 1
----- stdout -----
@@ -239,7 +252,9 @@ async fn python_install_http_500() {
.python_install()
.arg("cpython-3.10.0-darwin-aarch64-none")
.arg("--python-downloads-json-url")
- .arg(python_downloads_json.path()), @r"
+ .arg(python_downloads_json.path())
+ .env_remove(EnvVars::UV_HTTP_RETRIES)
+ .env(EnvVars::UV_TEST_NO_HTTP_RETRY_DELAY, "true"), @r"
success: false
exit_code: 1
----- stdout -----
@@ -269,7 +284,9 @@ async fn python_install_io_error() {
.python_install()
.arg("cpython-3.10.0-darwin-aarch64-none")
.arg("--python-downloads-json-url")
- .arg(python_downloads_json.path()), @r"
+ .arg(python_downloads_json.path())
+ .env_remove(EnvVars::UV_HTTP_RETRIES)
+ .env(EnvVars::UV_TEST_NO_HTTP_RETRY_DELAY, "true"), @r"
success: false
exit_code: 1
----- stdout -----
From 7ea030a1a854680001205037ae0959c1750365a2 Mon Sep 17 00:00:00 2001
From: Geoffrey Thomas
Date: Sat, 12 Jul 2025 12:46:40 -0400
Subject: [PATCH 014/130] Bump Python releases to pick up
python-build-standalone 20250712 (#14578)
This is primarily a regression fix for missing SQLite extensions
(astral-sh/python-build-standalone#694).
---
crates/uv-python/download-metadata.json | 936 ++++++++++++------------
1 file changed, 468 insertions(+), 468 deletions(-)
diff --git a/crates/uv-python/download-metadata.json b/crates/uv-python/download-metadata.json
index 4e2d98846..8c7ffec4c 100644
--- a/crates/uv-python/download-metadata.json
+++ b/crates/uv-python/download-metadata.json
@@ -11,8 +11,8 @@
"minor": 14,
"patch": 0,
"prerelease": "b4",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.14.0b4%2B20250708-aarch64-apple-darwin-install_only_stripped.tar.gz",
- "sha256": "7a69c986243f4e7ed70c1a97d4a524253d3fb4f042ae68eb688f9fafe5dbb714",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-aarch64-apple-darwin-install_only_stripped.tar.gz",
+ "sha256": "94b80254a7e50dd2d82d323a0bffdc59772b2f04b0f0c044bc4d56d696249eb2",
"variant": null
},
"cpython-3.14.0b4-darwin-x86_64-none": {
@@ -27,8 +27,8 @@
"minor": 14,
"patch": 0,
"prerelease": "b4",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.14.0b4%2B20250708-x86_64-apple-darwin-install_only_stripped.tar.gz",
- "sha256": "8c100fe3bfef08b046051c4183c9ca4542317729c466982783fabea996fcb97f",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-x86_64-apple-darwin-install_only_stripped.tar.gz",
+ "sha256": "2155f60b2a8a1448b2c4852a27887be2e9fe8e910bac1a75b342e44884a191b5",
"variant": null
},
"cpython-3.14.0b4-linux-aarch64-gnu": {
@@ -43,8 +43,8 @@
"minor": 14,
"patch": 0,
"prerelease": "b4",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.14.0b4%2B20250708-aarch64-unknown-linux-gnu-install_only_stripped.tar.gz",
- "sha256": "930e8ecf6c89de145cf49171d98e089af7007752e8e7652c1ea73460fec0d07c",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-aarch64-unknown-linux-gnu-install_only_stripped.tar.gz",
+ "sha256": "f76fb1a88e722f9cae8b82b9851b736968582527d8a1212ab3b918b2012ce0a6",
"variant": null
},
"cpython-3.14.0b4-linux-armv7-gnueabi": {
@@ -59,8 +59,8 @@
"minor": 14,
"patch": 0,
"prerelease": "b4",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.14.0b4%2B20250708-armv7-unknown-linux-gnueabi-install_only_stripped.tar.gz",
- "sha256": "5b489148c56a0a9772568706cf6c716e14b1d93e52f54d76f71f14783f659d13",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-armv7-unknown-linux-gnueabi-install_only_stripped.tar.gz",
+ "sha256": "c358e87ac84d228e191a22d2447c60e1cb15e6cbb753c397b0e9b9da9c557ce0",
"variant": null
},
"cpython-3.14.0b4-linux-armv7-gnueabihf": {
@@ -75,8 +75,8 @@
"minor": 14,
"patch": 0,
"prerelease": "b4",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.14.0b4%2B20250708-armv7-unknown-linux-gnueabihf-install_only_stripped.tar.gz",
- "sha256": "2b4474ebc495b64374339acf58d22793f8f55ce1a40e31d61a988af7cf2c8085",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-armv7-unknown-linux-gnueabihf-install_only_stripped.tar.gz",
+ "sha256": "a426e05b3d8a20dfbda84162ef75ed3590e7137436623b93d136c084d0688690",
"variant": null
},
"cpython-3.14.0b4-linux-powerpc64le-gnu": {
@@ -91,8 +91,8 @@
"minor": 14,
"patch": 0,
"prerelease": "b4",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.14.0b4%2B20250708-ppc64le-unknown-linux-gnu-install_only_stripped.tar.gz",
- "sha256": "abc24237c270f248b5b2990091209a60c23d5bef8476796cf5b0c16c34a24e54",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-ppc64le-unknown-linux-gnu-install_only_stripped.tar.gz",
+ "sha256": "b835aac7264b64652007f5210369d5fe1b8d1629befbb8d00e40a891cd039f67",
"variant": null
},
"cpython-3.14.0b4-linux-riscv64-gnu": {
@@ -107,8 +107,8 @@
"minor": 14,
"patch": 0,
"prerelease": "b4",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.14.0b4%2B20250708-riscv64-unknown-linux-gnu-install_only_stripped.tar.gz",
- "sha256": "fd25c2de82d3ea004831c543591195f3790c93d5df7f5f1a39b0e5f9e1716039",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-riscv64-unknown-linux-gnu-install_only_stripped.tar.gz",
+ "sha256": "0ad96a96ae32f5979f2bd9e6992ecf122819ceb06711439c66b9f8a3dc1eaba4",
"variant": null
},
"cpython-3.14.0b4-linux-s390x-gnu": {
@@ -123,8 +123,8 @@
"minor": 14,
"patch": 0,
"prerelease": "b4",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.14.0b4%2B20250708-s390x-unknown-linux-gnu-install_only_stripped.tar.gz",
- "sha256": "35f93fd3336dcfd2612fb2945937221f81af9a65369efb81afa1d89784029e61",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-s390x-unknown-linux-gnu-install_only_stripped.tar.gz",
+ "sha256": "18763ccce35baeb1960e043f9bd4be3a36a511acc6844b91381532ee5b7c6da8",
"variant": null
},
"cpython-3.14.0b4-linux-x86_64-gnu": {
@@ -139,8 +139,8 @@
"minor": 14,
"patch": 0,
"prerelease": "b4",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.14.0b4%2B20250708-x86_64-unknown-linux-gnu-install_only_stripped.tar.gz",
- "sha256": "a76999ca5b8c6e219750b016870fc85cc395dd992de1d702576d1c831585aa95",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-x86_64-unknown-linux-gnu-install_only_stripped.tar.gz",
+ "sha256": "3d07868b329c7c9b7ae5a52af35c27d0b20b5a7f6f574a3bedb5836b4bb337d7",
"variant": null
},
"cpython-3.14.0b4-linux-x86_64-musl": {
@@ -155,8 +155,8 @@
"minor": 14,
"patch": 0,
"prerelease": "b4",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.14.0b4%2B20250708-x86_64-unknown-linux-musl-install_only_stripped.tar.gz",
- "sha256": "a8f12323bd6c10f1ecadbe424e64c2429434e59e69314966a422c9a7eb5f13a0",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-x86_64-unknown-linux-musl-install_only_stripped.tar.gz",
+ "sha256": "82ee7827c1f75a7b5150f731ddf1dc312c7958c741a6746967fb8a5656c85b91",
"variant": null
},
"cpython-3.14.0b4-linux-x86_64_v2-gnu": {
@@ -171,8 +171,8 @@
"minor": 14,
"patch": 0,
"prerelease": "b4",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.14.0b4%2B20250708-x86_64_v2-unknown-linux-gnu-install_only_stripped.tar.gz",
- "sha256": "64649a18cee348ba72b42ec46aa548dca3d79ed37a2abeea17f5b5fea4ad67b4",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-x86_64_v2-unknown-linux-gnu-install_only_stripped.tar.gz",
+ "sha256": "c96dd14927c89392bd0ff3264e4b7bdfeea76979f544ee30260151c913046396",
"variant": null
},
"cpython-3.14.0b4-linux-x86_64_v2-musl": {
@@ -187,8 +187,8 @@
"minor": 14,
"patch": 0,
"prerelease": "b4",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.14.0b4%2B20250708-x86_64_v2-unknown-linux-musl-install_only_stripped.tar.gz",
- "sha256": "352b97d9c5634787cdfe11b00a4ac83e0a254f70dc2887780fa93b52a8cdbec8",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-x86_64_v2-unknown-linux-musl-install_only_stripped.tar.gz",
+ "sha256": "ae82acb77c69c506a799bd7022fe9a22508814fe76d0d7e53c1f2f60b5fc77d6",
"variant": null
},
"cpython-3.14.0b4-linux-x86_64_v3-gnu": {
@@ -203,8 +203,8 @@
"minor": 14,
"patch": 0,
"prerelease": "b4",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.14.0b4%2B20250708-x86_64_v3-unknown-linux-gnu-install_only_stripped.tar.gz",
- "sha256": "d780f46da4c2ae2400cb08c6e5900d976d46572c1fb2dc6a9494a4c309f913f2",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-x86_64_v3-unknown-linux-gnu-install_only_stripped.tar.gz",
+ "sha256": "9fdb71600bbdcae5dd47426972d1d0af03a2f7d98ac44fbb63284203738fda2c",
"variant": null
},
"cpython-3.14.0b4-linux-x86_64_v3-musl": {
@@ -219,8 +219,8 @@
"minor": 14,
"patch": 0,
"prerelease": "b4",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.14.0b4%2B20250708-x86_64_v3-unknown-linux-musl-install_only_stripped.tar.gz",
- "sha256": "4ef7c85e6a6788f1838a80a23463ee36fdfd50c909c784bc6ed7011725220288",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-x86_64_v3-unknown-linux-musl-install_only_stripped.tar.gz",
+ "sha256": "f864428b9b6b5938efeb93526d52ec685377672ad292e4b2eee62cb6107933e1",
"variant": null
},
"cpython-3.14.0b4-linux-x86_64_v4-gnu": {
@@ -235,8 +235,8 @@
"minor": 14,
"patch": 0,
"prerelease": "b4",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.14.0b4%2B20250708-x86_64_v4-unknown-linux-gnu-install_only_stripped.tar.gz",
- "sha256": "cd91301114d7ebfcfccbb3377a09c8d8537dc460de629ec6e64d3880aeb7ab0c",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-x86_64_v4-unknown-linux-gnu-install_only_stripped.tar.gz",
+ "sha256": "0d3f7f0c8b881bcdff08d14a0999c736f13e309e663edd0739a2db327c43e4c2",
"variant": null
},
"cpython-3.14.0b4-linux-x86_64_v4-musl": {
@@ -251,8 +251,8 @@
"minor": 14,
"patch": 0,
"prerelease": "b4",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.14.0b4%2B20250708-x86_64_v4-unknown-linux-musl-install_only_stripped.tar.gz",
- "sha256": "ff8cba3869c879717c6aae2931398b1c30ab761008483a49cc5d93899a2eeb8c",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-x86_64_v4-unknown-linux-musl-install_only_stripped.tar.gz",
+ "sha256": "11443f91bbda5f3d440908f20bfafd549dad5357e705f1e85273ebb6db0206f3",
"variant": null
},
"cpython-3.14.0b4-windows-aarch64-none": {
@@ -267,8 +267,8 @@
"minor": 14,
"patch": 0,
"prerelease": "b4",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.14.0b4%2B20250708-aarch64-pc-windows-msvc-install_only_stripped.tar.gz",
- "sha256": "c21eb7a109ec8b980735aee5ca5c3b7522479919d12078f046a05114de428ff0",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-aarch64-pc-windows-msvc-install_only_stripped.tar.gz",
+ "sha256": "61bef0ff22c3117795c55d5e8e2c87956a94fbb4725e03231f360b7c68ba5358",
"variant": null
},
"cpython-3.14.0b4-windows-i686-none": {
@@ -283,8 +283,8 @@
"minor": 14,
"patch": 0,
"prerelease": "b4",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.14.0b4%2B20250708-i686-pc-windows-msvc-install_only_stripped.tar.gz",
- "sha256": "29ebdc7899a947e29aba6376477d059871698b712cf0dfb75b8e96af2e8b23cb",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-i686-pc-windows-msvc-install_only_stripped.tar.gz",
+ "sha256": "bcf229f25c12f81169b1f1d207a719fc2908f4e6ba5b61404787710d3b1e2120",
"variant": null
},
"cpython-3.14.0b4-windows-x86_64-none": {
@@ -299,8 +299,8 @@
"minor": 14,
"patch": 0,
"prerelease": "b4",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.14.0b4%2B20250708-x86_64-pc-windows-msvc-install_only_stripped.tar.gz",
- "sha256": "072b97a1850f11bc350c1abfa5c08024ce4fe008022d634e23d4647e47cc005f",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-x86_64-pc-windows-msvc-install_only_stripped.tar.gz",
+ "sha256": "8255b31a40867eb52ff1a2e476f56c697a717e6193d313413c788b0fbdd28a3c",
"variant": null
},
"cpython-3.14.0b4+freethreaded-darwin-aarch64-none": {
@@ -315,8 +315,8 @@
"minor": 14,
"patch": 0,
"prerelease": "b4",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.14.0b4%2B20250708-aarch64-apple-darwin-freethreaded%2Bpgo%2Blto-full.tar.zst",
- "sha256": "f4a28e1d77003d6cd955f2a436a244ec03bb64f142a9afc79246634d3dec5da3",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-aarch64-apple-darwin-freethreaded%2Bpgo%2Blto-full.tar.zst",
+ "sha256": "ce28498dcf2c5c4d3c964e6e44ff44e5b1b72a4234f807e2ff121393ed40442e",
"variant": "freethreaded"
},
"cpython-3.14.0b4+freethreaded-darwin-x86_64-none": {
@@ -331,8 +331,8 @@
"minor": 14,
"patch": 0,
"prerelease": "b4",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.14.0b4%2B20250708-x86_64-apple-darwin-freethreaded%2Bpgo%2Blto-full.tar.zst",
- "sha256": "f1ea70b041fa5862124980b7fe34362987243a7ecc34fde881357503e47f32ab",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-x86_64-apple-darwin-freethreaded%2Bpgo%2Blto-full.tar.zst",
+ "sha256": "a7d63512a17522d7c76c7bafa27b49a35f4f5f74b5140be209ca17c0cad15737",
"variant": "freethreaded"
},
"cpython-3.14.0b4+freethreaded-linux-aarch64-gnu": {
@@ -347,8 +347,8 @@
"minor": 14,
"patch": 0,
"prerelease": "b4",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.14.0b4%2B20250708-aarch64-unknown-linux-gnu-freethreaded%2Bpgo%2Blto-full.tar.zst",
- "sha256": "2a92a108a3fbd5c439408fe9f3b62bf569ef06dbc2b5b657de301f14a537231a",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-aarch64-unknown-linux-gnu-freethreaded%2Bpgo%2Blto-full.tar.zst",
+ "sha256": "0250288ab21cfd14caa826056de7203baa19ed7e85198c19e6dcdd8b2124ae0e",
"variant": "freethreaded"
},
"cpython-3.14.0b4+freethreaded-linux-armv7-gnueabi": {
@@ -363,8 +363,8 @@
"minor": 14,
"patch": 0,
"prerelease": "b4",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.14.0b4%2B20250708-armv7-unknown-linux-gnueabi-freethreaded%2Blto-full.tar.zst",
- "sha256": "f1d52c12f6908f6dc0658bf9d5cf1068272b4f9026aa33b59ded9f17e1d51f9f",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-armv7-unknown-linux-gnueabi-freethreaded%2Blto-full.tar.zst",
+ "sha256": "c0bd17a6409c21fb10b075449511c09940b53438bf785cd20db1f2e5d15ade30",
"variant": "freethreaded"
},
"cpython-3.14.0b4+freethreaded-linux-armv7-gnueabihf": {
@@ -379,8 +379,8 @@
"minor": 14,
"patch": 0,
"prerelease": "b4",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.14.0b4%2B20250708-armv7-unknown-linux-gnueabihf-freethreaded%2Blto-full.tar.zst",
- "sha256": "418741c7de3c53323d9ae8a42a450f0f612fa5fbea1bedeea57dee0647c82a8d",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-armv7-unknown-linux-gnueabihf-freethreaded%2Blto-full.tar.zst",
+ "sha256": "d747055b6b5878dcf6b9d425b0a7ea3fa7b33fe241b31681e28f56d5ed86ed5d",
"variant": "freethreaded"
},
"cpython-3.14.0b4+freethreaded-linux-powerpc64le-gnu": {
@@ -395,8 +395,8 @@
"minor": 14,
"patch": 0,
"prerelease": "b4",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.14.0b4%2B20250708-ppc64le-unknown-linux-gnu-freethreaded%2Blto-full.tar.zst",
- "sha256": "5823a07c957162d6d675488d5306ac3f35a3f458e946cd74da6d1ac69bc97ce3",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-ppc64le-unknown-linux-gnu-freethreaded%2Blto-full.tar.zst",
+ "sha256": "756376b22bf237646f7bb519bee69b1704d369a6ca5941b5ff83d5b2d022612b",
"variant": "freethreaded"
},
"cpython-3.14.0b4+freethreaded-linux-riscv64-gnu": {
@@ -411,8 +411,8 @@
"minor": 14,
"patch": 0,
"prerelease": "b4",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.14.0b4%2B20250708-riscv64-unknown-linux-gnu-freethreaded%2Blto-full.tar.zst",
- "sha256": "f48843e0f1c13ddeaaf9180bc105475873d924638969bc9256a2ac170faeb933",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-riscv64-unknown-linux-gnu-freethreaded%2Blto-full.tar.zst",
+ "sha256": "25dbe52c44b42914d9343d456dc17fbcbf234ab1f0fd0be00cae27c6e336546b",
"variant": "freethreaded"
},
"cpython-3.14.0b4+freethreaded-linux-s390x-gnu": {
@@ -427,8 +427,8 @@
"minor": 14,
"patch": 0,
"prerelease": "b4",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.14.0b4%2B20250708-s390x-unknown-linux-gnu-freethreaded%2Blto-full.tar.zst",
- "sha256": "a1e6f843d533c88e290d1e757d4c7953c4f4ccfb5380fef5405aceab938c6f57",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-s390x-unknown-linux-gnu-freethreaded%2Blto-full.tar.zst",
+ "sha256": "7ebb845ee94ae870e13146de0052251d48d584363c1b374f84fbdeb8e7936350",
"variant": "freethreaded"
},
"cpython-3.14.0b4+freethreaded-linux-x86_64-gnu": {
@@ -443,8 +443,8 @@
"minor": 14,
"patch": 0,
"prerelease": "b4",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.14.0b4%2B20250708-x86_64-unknown-linux-gnu-freethreaded%2Bpgo%2Blto-full.tar.zst",
- "sha256": "7f5ab66a563f48f169bdb1d216eed8c4126698583d21fa191ab4d995ca8b5506",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-x86_64-unknown-linux-gnu-freethreaded%2Bpgo%2Blto-full.tar.zst",
+ "sha256": "0df5305c3b95f53f7f2db762be2badf752477c359146155f8b9658b71aff2128",
"variant": "freethreaded"
},
"cpython-3.14.0b4+freethreaded-linux-x86_64-musl": {
@@ -459,8 +459,8 @@
"minor": 14,
"patch": 0,
"prerelease": "b4",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.14.0b4%2B20250708-x86_64-unknown-linux-musl-freethreaded%2Blto-full.tar.zst",
- "sha256": "180249191d6e84b5dd61f6f7ba7215582b1296ef4d8bd048439cd981363cd2b2",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-x86_64-unknown-linux-musl-freethreaded%2Blto-full.tar.zst",
+ "sha256": "c6beef48f6a2ca49da0b2798e5dc9c45233a8f0b6fa778616ba7cfdcd66f85a6",
"variant": "freethreaded"
},
"cpython-3.14.0b4+freethreaded-linux-x86_64_v2-gnu": {
@@ -475,8 +475,8 @@
"minor": 14,
"patch": 0,
"prerelease": "b4",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.14.0b4%2B20250708-x86_64_v2-unknown-linux-gnu-freethreaded%2Bpgo%2Blto-full.tar.zst",
- "sha256": "bc9c0f25680f1f3c3104aef3144f1cd8c72d31e4cbf45a7c6f89ddb5c1b0e952",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-x86_64_v2-unknown-linux-gnu-freethreaded%2Bpgo%2Blto-full.tar.zst",
+ "sha256": "31587432be64d6913317919c239ef84ae4c78a7b11f95e8d48b81dc820021be3",
"variant": "freethreaded"
},
"cpython-3.14.0b4+freethreaded-linux-x86_64_v2-musl": {
@@ -491,8 +491,8 @@
"minor": 14,
"patch": 0,
"prerelease": "b4",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.14.0b4%2B20250708-x86_64_v2-unknown-linux-musl-freethreaded%2Blto-full.tar.zst",
- "sha256": "b30a2004c89d79256926bb4d87bec6100b669d967d336cb9df1aa5ae9a9106cf",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-x86_64_v2-unknown-linux-musl-freethreaded%2Blto-full.tar.zst",
+ "sha256": "302a23af192207337db2c2268a3fed98f13845ad5324f1ff97baa68807098513",
"variant": "freethreaded"
},
"cpython-3.14.0b4+freethreaded-linux-x86_64_v3-gnu": {
@@ -507,8 +507,8 @@
"minor": 14,
"patch": 0,
"prerelease": "b4",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.14.0b4%2B20250708-x86_64_v3-unknown-linux-gnu-freethreaded%2Bpgo%2Blto-full.tar.zst",
- "sha256": "6941b1d02adb12cd875c2320e0d30380b7837c705333336b8d295440d93d3668",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-x86_64_v3-unknown-linux-gnu-freethreaded%2Bpgo%2Blto-full.tar.zst",
+ "sha256": "39747d608a5400b0fa37fbddef606678f8552fdf907f43b1d8a475436c413aa9",
"variant": "freethreaded"
},
"cpython-3.14.0b4+freethreaded-linux-x86_64_v3-musl": {
@@ -523,8 +523,8 @@
"minor": 14,
"patch": 0,
"prerelease": "b4",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.14.0b4%2B20250708-x86_64_v3-unknown-linux-musl-freethreaded%2Blto-full.tar.zst",
- "sha256": "b64f69cb58ac51e962080d6fa848d90dc24739bc94089a7975b3459b23ad5df3",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-x86_64_v3-unknown-linux-musl-freethreaded%2Blto-full.tar.zst",
+ "sha256": "9870447eb095027df97a1e412eff378fb78872a527dc6adeffc901fff8a40d70",
"variant": "freethreaded"
},
"cpython-3.14.0b4+freethreaded-linux-x86_64_v4-gnu": {
@@ -539,8 +539,8 @@
"minor": 14,
"patch": 0,
"prerelease": "b4",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.14.0b4%2B20250708-x86_64_v4-unknown-linux-gnu-freethreaded%2Bpgo%2Blto-full.tar.zst",
- "sha256": "b294b586bdcbc0b038e77999d4371c6fe3d90228b2b9aa632262ad3f5210487b",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-x86_64_v4-unknown-linux-gnu-freethreaded%2Bpgo%2Blto-full.tar.zst",
+ "sha256": "78adac3ab0696380ebdbceb96924d0f033e20b033e3a1633aa54df0295407292",
"variant": "freethreaded"
},
"cpython-3.14.0b4+freethreaded-linux-x86_64_v4-musl": {
@@ -555,8 +555,8 @@
"minor": 14,
"patch": 0,
"prerelease": "b4",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.14.0b4%2B20250708-x86_64_v4-unknown-linux-musl-freethreaded%2Blto-full.tar.zst",
- "sha256": "61ed61ed5052a7ca9d919194526486d7f973fd69bb97e70e95c917a984f723c7",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-x86_64_v4-unknown-linux-musl-freethreaded%2Blto-full.tar.zst",
+ "sha256": "59f92039b72eca4cfb4639699bc97bbb0de6b866a7894bac9cf132374cf5aa1a",
"variant": "freethreaded"
},
"cpython-3.14.0b4+freethreaded-windows-aarch64-none": {
@@ -571,8 +571,8 @@
"minor": 14,
"patch": 0,
"prerelease": "b4",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.14.0b4%2B20250708-aarch64-pc-windows-msvc-freethreaded%2Bpgo-full.tar.zst",
- "sha256": "d7396bafafc82b7e817f0d16208d0f37a88a97c0a71d91e477cbadc5b9d55f6d",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-aarch64-pc-windows-msvc-freethreaded%2Bpgo-full.tar.zst",
+ "sha256": "37fac713d3b25731f134c9c6b1c9021ffb2aacda630010ffa15497446655179f",
"variant": "freethreaded"
},
"cpython-3.14.0b4+freethreaded-windows-i686-none": {
@@ -587,8 +587,8 @@
"minor": 14,
"patch": 0,
"prerelease": "b4",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.14.0b4%2B20250708-i686-pc-windows-msvc-freethreaded%2Bpgo-full.tar.zst",
- "sha256": "7066fc54db97331fb25f52783f188d65f8868ad578f9e25cb9b1ae1f2c6dacc5",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-i686-pc-windows-msvc-freethreaded%2Bpgo-full.tar.zst",
+ "sha256": "5a7d61b1863960dab6f78027b5edc543ee41d0a45f7851413951389b842385c8",
"variant": "freethreaded"
},
"cpython-3.14.0b4+freethreaded-windows-x86_64-none": {
@@ -603,8 +603,8 @@
"minor": 14,
"patch": 0,
"prerelease": "b4",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.14.0b4%2B20250708-x86_64-pc-windows-msvc-freethreaded%2Bpgo-full.tar.zst",
- "sha256": "5de7968ba0e344562fcff0f9f7c9454966279f1e274b6e701edee253b4a6b565",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-x86_64-pc-windows-msvc-freethreaded%2Bpgo-full.tar.zst",
+ "sha256": "e503ec18fd8b1d0fcb94ded5a67be4a88334d5b101dc485b0281577ae84a6acc",
"variant": "freethreaded"
},
"cpython-3.14.0b4+debug-linux-aarch64-gnu": {
@@ -619,8 +619,8 @@
"minor": 14,
"patch": 0,
"prerelease": "b4",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.14.0b4%2B20250708-aarch64-unknown-linux-gnu-debug-full.tar.zst",
- "sha256": "9ac97f7531f9d74ccd1f7de8b558029094831a0be965fe9569ecc7547aeec445",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-aarch64-unknown-linux-gnu-debug-full.tar.zst",
+ "sha256": "6bf05e71ef3cf092d0f40d992ea192016327468992e5e0b7bde8ac48d6b9c145",
"variant": "debug"
},
"cpython-3.14.0b4+debug-linux-armv7-gnueabi": {
@@ -635,8 +635,8 @@
"minor": 14,
"patch": 0,
"prerelease": "b4",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.14.0b4%2B20250708-armv7-unknown-linux-gnueabi-debug-full.tar.zst",
- "sha256": "fcb0d09a7774b69ca7df3a954fedc32bd1935838c91918f1d08b9a19914f30ec",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-armv7-unknown-linux-gnueabi-debug-full.tar.zst",
+ "sha256": "9b73df95176c383e4af6027b78da060c69892914bfc195107084b21281f09bfd",
"variant": "debug"
},
"cpython-3.14.0b4+debug-linux-armv7-gnueabihf": {
@@ -651,8 +651,8 @@
"minor": 14,
"patch": 0,
"prerelease": "b4",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.14.0b4%2B20250708-armv7-unknown-linux-gnueabihf-debug-full.tar.zst",
- "sha256": "664a70a1f73eb0ca1299bf8b26ec0b696ea1a09a26b5a1956688c3e4004b0ce2",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-armv7-unknown-linux-gnueabihf-debug-full.tar.zst",
+ "sha256": "2d325c459c761b4bca5e2005aeccc889ef62ee4b0811d9252e22817f3037825e",
"variant": "debug"
},
"cpython-3.14.0b4+debug-linux-powerpc64le-gnu": {
@@ -667,8 +667,8 @@
"minor": 14,
"patch": 0,
"prerelease": "b4",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.14.0b4%2B20250708-ppc64le-unknown-linux-gnu-debug-full.tar.zst",
- "sha256": "71ac17708fd382292c5dbc77b11646b9ee52230381c2f7067bc5f22a2e2fd9cf",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-ppc64le-unknown-linux-gnu-debug-full.tar.zst",
+ "sha256": "1c49311aae1ade3afd9d39091897d2b1307aeadfdde87e5099e07b0fdc32bc2f",
"variant": "debug"
},
"cpython-3.14.0b4+debug-linux-riscv64-gnu": {
@@ -683,8 +683,8 @@
"minor": 14,
"patch": 0,
"prerelease": "b4",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.14.0b4%2B20250708-riscv64-unknown-linux-gnu-debug-full.tar.zst",
- "sha256": "2916572ff670885b38860861fceb395711831ac2a36e0830fe0ee029a91cec56",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-riscv64-unknown-linux-gnu-debug-full.tar.zst",
+ "sha256": "ad52ff04ef3fc78430b8b0623a0442088dc4e8c6835fce6957e251676942ebbf",
"variant": "debug"
},
"cpython-3.14.0b4+debug-linux-s390x-gnu": {
@@ -699,8 +699,8 @@
"minor": 14,
"patch": 0,
"prerelease": "b4",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.14.0b4%2B20250708-s390x-unknown-linux-gnu-debug-full.tar.zst",
- "sha256": "4086605066914c6fb1944932e59585c328c3a688379d2c061df8e963e65e04dd",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-s390x-unknown-linux-gnu-debug-full.tar.zst",
+ "sha256": "6865d4830ef7beaa99dd817df0c49bb0d380b9a0c822be6f8ca090f9a568df81",
"variant": "debug"
},
"cpython-3.14.0b4+debug-linux-x86_64-gnu": {
@@ -715,8 +715,8 @@
"minor": 14,
"patch": 0,
"prerelease": "b4",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.14.0b4%2B20250708-x86_64-unknown-linux-gnu-debug-full.tar.zst",
- "sha256": "c91fa37d96f46a4f58ac6d3b2d9e0178288e2fb21a05131c874abfbfae404f71",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-x86_64-unknown-linux-gnu-debug-full.tar.zst",
+ "sha256": "db9c32e119c58d9f25745599efaa383be06323ca8d8524a6c50b62367b058b93",
"variant": "debug"
},
"cpython-3.14.0b4+debug-linux-x86_64-musl": {
@@ -731,8 +731,8 @@
"minor": 14,
"patch": 0,
"prerelease": "b4",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.14.0b4%2B20250708-x86_64-unknown-linux-musl-debug-full.tar.zst",
- "sha256": "ab08748b50a7df1e6231fab1bf59a7e0b26cfb44ff2c811a9f249fe141332d21",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-x86_64-unknown-linux-musl-debug-full.tar.zst",
+ "sha256": "39dece02d5b286e7d9ffbbacdd730db0d64b881bb2b2edd3b721be23c4e89609",
"variant": "debug"
},
"cpython-3.14.0b4+debug-linux-x86_64_v2-gnu": {
@@ -747,8 +747,8 @@
"minor": 14,
"patch": 0,
"prerelease": "b4",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.14.0b4%2B20250708-x86_64_v2-unknown-linux-gnu-debug-full.tar.zst",
- "sha256": "64dd678f10b3bb86bd047cf585651d323c80e34da840ca8ed49507f3959acc90",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-x86_64_v2-unknown-linux-gnu-debug-full.tar.zst",
+ "sha256": "90453b5f3d982604a950e5f362b192889f82524257d2fa8bf979b270e8bdb370",
"variant": "debug"
},
"cpython-3.14.0b4+debug-linux-x86_64_v2-musl": {
@@ -763,8 +763,8 @@
"minor": 14,
"patch": 0,
"prerelease": "b4",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.14.0b4%2B20250708-x86_64_v2-unknown-linux-musl-debug-full.tar.zst",
- "sha256": "3e057342e72555a4934e05037423f2b68f42d62a6f10b36d48150ca5110d603e",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-x86_64_v2-unknown-linux-musl-debug-full.tar.zst",
+ "sha256": "d070ef11038828a1326c230c45782c70f02a6b89504af76cc95f0778db20caac",
"variant": "debug"
},
"cpython-3.14.0b4+debug-linux-x86_64_v3-gnu": {
@@ -779,8 +779,8 @@
"minor": 14,
"patch": 0,
"prerelease": "b4",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.14.0b4%2B20250708-x86_64_v3-unknown-linux-gnu-debug-full.tar.zst",
- "sha256": "265b07a17fedc8ca32a8ebd6763946c21bb472346ac65efb89d1e045e4772abd",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-x86_64_v3-unknown-linux-gnu-debug-full.tar.zst",
+ "sha256": "baf92ab8fa281f72a8e8b4a1975a931876866b69aebed1eb94dafeaa219f788d",
"variant": "debug"
},
"cpython-3.14.0b4+debug-linux-x86_64_v3-musl": {
@@ -795,8 +795,8 @@
"minor": 14,
"patch": 0,
"prerelease": "b4",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.14.0b4%2B20250708-x86_64_v3-unknown-linux-musl-debug-full.tar.zst",
- "sha256": "5860fc768bf7c7d2051ee80109f0fd5a4d89f045ca26562f88e5f93978979abe",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-x86_64_v3-unknown-linux-musl-debug-full.tar.zst",
+ "sha256": "3a92a638ef08b058eebf806ecb0134aa9467c554512fd2082e6ecd1a6c517fdd",
"variant": "debug"
},
"cpython-3.14.0b4+debug-linux-x86_64_v4-gnu": {
@@ -811,8 +811,8 @@
"minor": 14,
"patch": 0,
"prerelease": "b4",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.14.0b4%2B20250708-x86_64_v4-unknown-linux-gnu-debug-full.tar.zst",
- "sha256": "ae0cf5352a594ce1dfd287fb49684490128a7f89b3dfbcd43f1b8d84083c8ead",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-x86_64_v4-unknown-linux-gnu-debug-full.tar.zst",
+ "sha256": "7144cb9ac62b0084b8421b83e90aab0ed6e704cc5f63ba1c16f8216971d11857",
"variant": "debug"
},
"cpython-3.14.0b4+debug-linux-x86_64_v4-musl": {
@@ -827,8 +827,8 @@
"minor": 14,
"patch": 0,
"prerelease": "b4",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.14.0b4%2B20250708-x86_64_v4-unknown-linux-musl-debug-full.tar.zst",
- "sha256": "5e2b1a537aa9cc6e1c77e6050f31aacd866c50b16b603b54c485b8f8cfeebb4a",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-x86_64_v4-unknown-linux-musl-debug-full.tar.zst",
+ "sha256": "bef1d2f0e3f32667366655e8333ef1f92ab07cd7b988da110f3970a5d671e3a3",
"variant": "debug"
},
"cpython-3.14.0b3-darwin-aarch64-none": {
@@ -6395,8 +6395,8 @@
"minor": 13,
"patch": 5,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.13.5%2B20250708-aarch64-apple-darwin-install_only_stripped.tar.gz",
- "sha256": "71c9af8648001c4a09943305a890339a4cfff0bd260aa5a9d8c8e82e7ef32583",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-aarch64-apple-darwin-install_only_stripped.tar.gz",
+ "sha256": "08d840adc7dd1724bd7c25141a0207f8343808749fa67e608d8007b46429c196",
"variant": null
},
"cpython-3.13.5-darwin-x86_64-none": {
@@ -6411,8 +6411,8 @@
"minor": 13,
"patch": 5,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.13.5%2B20250708-x86_64-apple-darwin-install_only_stripped.tar.gz",
- "sha256": "65b171888e34d0a904ee0a6adef1a5366bdedcd9fca990ec06717a68eef2c4ff",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-x86_64-apple-darwin-install_only_stripped.tar.gz",
+ "sha256": "5277dc381e94abde80989841f3015df2aba33894893c4a31d63400887bdefd2d",
"variant": null
},
"cpython-3.13.5-linux-aarch64-gnu": {
@@ -6427,8 +6427,8 @@
"minor": 13,
"patch": 5,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.13.5%2B20250708-aarch64-unknown-linux-gnu-install_only_stripped.tar.gz",
- "sha256": "e0d2322a92b9bb8e39442cbcfa6ee9590fd035de2a6199d4e6903dcbc0b6542a",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-aarch64-unknown-linux-gnu-install_only_stripped.tar.gz",
+ "sha256": "82d8a025b43c9127d47490a7070aa5d8bfede2d1deb5161c0f4c2355396f9e5d",
"variant": null
},
"cpython-3.13.5-linux-armv7-gnueabi": {
@@ -6443,8 +6443,8 @@
"minor": 13,
"patch": 5,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.13.5%2B20250708-armv7-unknown-linux-gnueabi-install_only_stripped.tar.gz",
- "sha256": "59442502a4eebff23a49503a9cbe92a6b813a756bf36a299ced55fb705d5fe73",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-armv7-unknown-linux-gnueabi-install_only_stripped.tar.gz",
+ "sha256": "6aa50bf3245364091a7e5ca6b88166f960c2268586c33e295069645815f16195",
"variant": null
},
"cpython-3.13.5-linux-armv7-gnueabihf": {
@@ -6459,8 +6459,8 @@
"minor": 13,
"patch": 5,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.13.5%2B20250708-armv7-unknown-linux-gnueabihf-install_only_stripped.tar.gz",
- "sha256": "c3de5a89b71ef3dc8ee53777a9fda3f2d7f381abc0b4a6f6f890de55d3620293",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-armv7-unknown-linux-gnueabihf-install_only_stripped.tar.gz",
+ "sha256": "5f776b18951b9a0507e64e890796113a16b18adb93a01d4f84c922e2564dab43",
"variant": null
},
"cpython-3.13.5-linux-powerpc64le-gnu": {
@@ -6475,8 +6475,8 @@
"minor": 13,
"patch": 5,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.13.5%2B20250708-ppc64le-unknown-linux-gnu-install_only_stripped.tar.gz",
- "sha256": "c17e73fe07de36a506ffc400173739d2802f30bdc5f5b6443891bbcee926edac",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-ppc64le-unknown-linux-gnu-install_only_stripped.tar.gz",
+ "sha256": "b74b79e5a65c84ed732071fd7b445a51b86c03ef18643b87c0fe5c96242e629b",
"variant": null
},
"cpython-3.13.5-linux-riscv64-gnu": {
@@ -6491,8 +6491,8 @@
"minor": 13,
"patch": 5,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.13.5%2B20250708-riscv64-unknown-linux-gnu-install_only_stripped.tar.gz",
- "sha256": "1b5da1585dca39a15452c891ff16f468ce984f76500c262f08c4aeae75e79c3c",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-riscv64-unknown-linux-gnu-install_only_stripped.tar.gz",
+ "sha256": "652416183693219b1f0f1f2a8d2a595f75f8c94e8c7b8b25ecd312ec1fdbb36e",
"variant": null
},
"cpython-3.13.5-linux-s390x-gnu": {
@@ -6507,8 +6507,8 @@
"minor": 13,
"patch": 5,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.13.5%2B20250708-s390x-unknown-linux-gnu-install_only_stripped.tar.gz",
- "sha256": "d47e645034432fce6d107835c07d5fe38fd53232a66e0a9d63ead48b42da3539",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-s390x-unknown-linux-gnu-install_only_stripped.tar.gz",
+ "sha256": "29a7140db0cbd1426f450cd419a8b5892a4a72d7ef74c1760940dd656f8eaded",
"variant": null
},
"cpython-3.13.5-linux-x86_64-gnu": {
@@ -6523,8 +6523,8 @@
"minor": 13,
"patch": 5,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.13.5%2B20250708-x86_64-unknown-linux-gnu-install_only_stripped.tar.gz",
- "sha256": "2f57c58edc385fe9958d2c6e41ecd389cfed3f882515a1813f1d2ba4c964f399",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-x86_64-unknown-linux-gnu-install_only_stripped.tar.gz",
+ "sha256": "e42827755c227d3ea31b0c887230db1cd411e8bddf84f16341a989de2d352c51",
"variant": null
},
"cpython-3.13.5-linux-x86_64-musl": {
@@ -6539,8 +6539,8 @@
"minor": 13,
"patch": 5,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.13.5%2B20250708-x86_64-unknown-linux-musl-install_only_stripped.tar.gz",
- "sha256": "13cf16ef2008adf36a812add953317a4359945468dbcaece38b2b71466d05502",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-x86_64-unknown-linux-musl-install_only_stripped.tar.gz",
+ "sha256": "a652ff101318b7bd7a06181df679e2e76d592ebe70dbc4ca5db97b572889d93f",
"variant": null
},
"cpython-3.13.5-linux-x86_64_v2-gnu": {
@@ -6555,8 +6555,8 @@
"minor": 13,
"patch": 5,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.13.5%2B20250708-x86_64_v2-unknown-linux-gnu-install_only_stripped.tar.gz",
- "sha256": "416d3a7bd64c3ee047b37d91ce1a58ec308733292c0268bfd860984c21eb7377",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-x86_64_v2-unknown-linux-gnu-install_only_stripped.tar.gz",
+ "sha256": "dd945e6178236e2eee27b9de8e6d0b2ef9c6f905185a177676d608e42d81bebb",
"variant": null
},
"cpython-3.13.5-linux-x86_64_v2-musl": {
@@ -6571,8 +6571,8 @@
"minor": 13,
"patch": 5,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.13.5%2B20250708-x86_64_v2-unknown-linux-musl-install_only_stripped.tar.gz",
- "sha256": "c32aee456cb150a8c105c213dc4afa8a409fba1aced890a4f58001ae70074922",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-x86_64_v2-unknown-linux-musl-install_only_stripped.tar.gz",
+ "sha256": "af86120b3c3c48afdd512a798c1df2e01e7404875d5b54fc7bbde23f8b004265",
"variant": null
},
"cpython-3.13.5-linux-x86_64_v3-gnu": {
@@ -6587,8 +6587,8 @@
"minor": 13,
"patch": 5,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.13.5%2B20250708-x86_64_v3-unknown-linux-gnu-install_only_stripped.tar.gz",
- "sha256": "f2d3a4aa566ce5a505a82357c766ccfc60f6bb4e255fab8725da2fbc28a199d3",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-x86_64_v3-unknown-linux-gnu-install_only_stripped.tar.gz",
+ "sha256": "c13783eae63223bced84ec976be9ad87d5b2ab3d9ba80c4f678520a4763410ba",
"variant": null
},
"cpython-3.13.5-linux-x86_64_v3-musl": {
@@ -6603,8 +6603,8 @@
"minor": 13,
"patch": 5,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.13.5%2B20250708-x86_64_v3-unknown-linux-musl-install_only_stripped.tar.gz",
- "sha256": "f8d1c8f82a6cd694ca453e1c5e96e7415232be288a832b17bd5a4e9b7a5c09fe",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-x86_64_v3-unknown-linux-musl-install_only_stripped.tar.gz",
+ "sha256": "5e7433fd471a8d2a5dfa9b062b3c1af108eef5958e74d123de963c5d018b3086",
"variant": null
},
"cpython-3.13.5-linux-x86_64_v4-gnu": {
@@ -6619,8 +6619,8 @@
"minor": 13,
"patch": 5,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.13.5%2B20250708-x86_64_v4-unknown-linux-gnu-install_only_stripped.tar.gz",
- "sha256": "a46b315e40f93ce673fb5ff9193c1f9dee550fe6f494fe1bba41885ef19ee094",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-x86_64_v4-unknown-linux-gnu-install_only_stripped.tar.gz",
+ "sha256": "14a4301952bf11ddf023e27ff5810963bf5a165946009f72c18bdd53f22450c0",
"variant": null
},
"cpython-3.13.5-linux-x86_64_v4-musl": {
@@ -6635,8 +6635,8 @@
"minor": 13,
"patch": 5,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.13.5%2B20250708-x86_64_v4-unknown-linux-musl-install_only_stripped.tar.gz",
- "sha256": "4efeb9cd7c96f3b157478bb3037597b56334f14aad519eddc64da29849cc8031",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-x86_64_v4-unknown-linux-musl-install_only_stripped.tar.gz",
+ "sha256": "583b793e600a9d55b941092de2f4f7426acaac7e7430ed9a36586f7a1754a8ea",
"variant": null
},
"cpython-3.13.5-windows-aarch64-none": {
@@ -6651,8 +6651,8 @@
"minor": 13,
"patch": 5,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.13.5%2B20250708-aarch64-pc-windows-msvc-install_only_stripped.tar.gz",
- "sha256": "22b73edc3afc256b58bb41b5a660aa835500781ef5b187de0c941748b1f38e3a",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-aarch64-pc-windows-msvc-install_only_stripped.tar.gz",
+ "sha256": "0e95119f5d018ec18bcf9ee57c91e13c9ffda2a5da5fa14f578498f8ec6e4ac0",
"variant": null
},
"cpython-3.13.5-windows-i686-none": {
@@ -6667,8 +6667,8 @@
"minor": 13,
"patch": 5,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.13.5%2B20250708-i686-pc-windows-msvc-install_only_stripped.tar.gz",
- "sha256": "fffdf2a1a16b9a24ef8489008a4a08927b202d7b79401913bbe1363e4180ad3a",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-i686-pc-windows-msvc-install_only_stripped.tar.gz",
+ "sha256": "a877e912a7fc298e2b8ee349ed86bee00ac551232faebf258b790e334208f9d2",
"variant": null
},
"cpython-3.13.5-windows-x86_64-none": {
@@ -6683,8 +6683,8 @@
"minor": 13,
"patch": 5,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.13.5%2B20250708-x86_64-pc-windows-msvc-install_only_stripped.tar.gz",
- "sha256": "0871127fcf73c79479f36b2f34177565f6e97b87b4dd9cdafe4d6c37b54c153a",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-x86_64-pc-windows-msvc-install_only_stripped.tar.gz",
+ "sha256": "bf9d014f24aa15f2ae37814e748773e395cbec111e368a91cdbcb4372bdff7c5",
"variant": null
},
"cpython-3.13.5+freethreaded-darwin-aarch64-none": {
@@ -6699,8 +6699,8 @@
"minor": 13,
"patch": 5,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.13.5%2B20250708-aarch64-apple-darwin-freethreaded%2Bpgo%2Blto-full.tar.zst",
- "sha256": "b7764ec1b41a7018c67c83ce3c98f47b0eeac9c4039f3cd50b5bcde4e86bde96",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-aarch64-apple-darwin-freethreaded%2Bpgo%2Blto-full.tar.zst",
+ "sha256": "61862be1c897fff1d5ec772be045d1af44846ffd4a6186247cc11e5e9ae3d247",
"variant": "freethreaded"
},
"cpython-3.13.5+freethreaded-darwin-x86_64-none": {
@@ -6715,8 +6715,8 @@
"minor": 13,
"patch": 5,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.13.5%2B20250708-x86_64-apple-darwin-freethreaded%2Bpgo%2Blto-full.tar.zst",
- "sha256": "f15f0700b64fb3475c4dcc2a41540b47857da0c777544c10eb510f71f552e8ec",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-x86_64-apple-darwin-freethreaded%2Bpgo%2Blto-full.tar.zst",
+ "sha256": "a51777a7a3d4b4860dd761dbcce85a8e9589031293a2f91f4a6a3679c3d0f5a8",
"variant": "freethreaded"
},
"cpython-3.13.5+freethreaded-linux-aarch64-gnu": {
@@ -6731,8 +6731,8 @@
"minor": 13,
"patch": 5,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.13.5%2B20250708-aarch64-unknown-linux-gnu-freethreaded%2Bpgo%2Blto-full.tar.zst",
- "sha256": "ced03b7ba62d2864df87ae86ecc50512fbfed66897602ae6f7aacbfb8d7eab38",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-aarch64-unknown-linux-gnu-freethreaded%2Bpgo%2Blto-full.tar.zst",
+ "sha256": "e907a33d468de5f3936e73a0e6281a40307207acf62d59a34a1ef5a703816810",
"variant": "freethreaded"
},
"cpython-3.13.5+freethreaded-linux-armv7-gnueabi": {
@@ -6747,8 +6747,8 @@
"minor": 13,
"patch": 5,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.13.5%2B20250708-armv7-unknown-linux-gnueabi-freethreaded%2Blto-full.tar.zst",
- "sha256": "0eafdd313352b0cda5cbfa872610cae8f47cfcba72da5a4267c7a1ef4dab8ccd",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-armv7-unknown-linux-gnueabi-freethreaded%2Blto-full.tar.zst",
+ "sha256": "fa495608f0bb7debc53a5d7e9bd10a328e7f087bba5b14203512902ead9e6142",
"variant": "freethreaded"
},
"cpython-3.13.5+freethreaded-linux-armv7-gnueabihf": {
@@ -6763,8 +6763,8 @@
"minor": 13,
"patch": 5,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.13.5%2B20250708-armv7-unknown-linux-gnueabihf-freethreaded%2Blto-full.tar.zst",
- "sha256": "1a7c93ed247a564836416cbb008837059fb4e66468d1770a9b2ba2d12a415450",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-armv7-unknown-linux-gnueabihf-freethreaded%2Blto-full.tar.zst",
+ "sha256": "5316526a325b72a7e6a75f5c0ba8f2f4d1cbab8c8f0516f76055f7a178666f21",
"variant": "freethreaded"
},
"cpython-3.13.5+freethreaded-linux-powerpc64le-gnu": {
@@ -6779,8 +6779,8 @@
"minor": 13,
"patch": 5,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.13.5%2B20250708-ppc64le-unknown-linux-gnu-freethreaded%2Blto-full.tar.zst",
- "sha256": "9c943e130a9893c9f6f375c02b34c0b7e62d186d283fc7950d0ee20d7e2f6821",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-ppc64le-unknown-linux-gnu-freethreaded%2Blto-full.tar.zst",
+ "sha256": "23770a0b9e176b8ca1bbbecd86029d4c9961fa8b88d0b0d584b14f0ad7a5dccc",
"variant": "freethreaded"
},
"cpython-3.13.5+freethreaded-linux-riscv64-gnu": {
@@ -6795,8 +6795,8 @@
"minor": 13,
"patch": 5,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.13.5%2B20250708-riscv64-unknown-linux-gnu-freethreaded%2Blto-full.tar.zst",
- "sha256": "8075ed7b5f8c8a7c7c65563d2a1d5c20622a46416fb2e5b8d746592527472ea7",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-riscv64-unknown-linux-gnu-freethreaded%2Blto-full.tar.zst",
+ "sha256": "0f111d4619843451a0edd13e145fc3b1ea44aecf8d7a92184dcd4a9ed0a063c4",
"variant": "freethreaded"
},
"cpython-3.13.5+freethreaded-linux-s390x-gnu": {
@@ -6811,8 +6811,8 @@
"minor": 13,
"patch": 5,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.13.5%2B20250708-s390x-unknown-linux-gnu-freethreaded%2Blto-full.tar.zst",
- "sha256": "a8dbcbe79f7603d82a3640dfd05f9dbff07264f14a6a9a616d277f19d113222c",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-s390x-unknown-linux-gnu-freethreaded%2Blto-full.tar.zst",
+ "sha256": "0a6df4acd93d29b0d94aa92fa46482f10bbcfe1b1e608e26909f608691c7f512",
"variant": "freethreaded"
},
"cpython-3.13.5+freethreaded-linux-x86_64-gnu": {
@@ -6827,8 +6827,8 @@
"minor": 13,
"patch": 5,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.13.5%2B20250708-x86_64-unknown-linux-gnu-freethreaded%2Bpgo%2Blto-full.tar.zst",
- "sha256": "e21a8d49749ffd40a439349f62fc59cb9e6424a22b40da0242bb8af6e964ba04",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-x86_64-unknown-linux-gnu-freethreaded%2Bpgo%2Blto-full.tar.zst",
+ "sha256": "2c49314909be249c90071a54168f80d4cbf27ecbec7d464f8743d84427c5b7b1",
"variant": "freethreaded"
},
"cpython-3.13.5+freethreaded-linux-x86_64-musl": {
@@ -6843,8 +6843,8 @@
"minor": 13,
"patch": 5,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.13.5%2B20250708-x86_64-unknown-linux-musl-freethreaded%2Blto-full.tar.zst",
- "sha256": "625ae3e251cf7f310078f3f77bfdae8bbe3f1fe2c64f0d8c2c60939cb71b99d4",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-x86_64-unknown-linux-musl-freethreaded%2Blto-full.tar.zst",
+ "sha256": "e27a15c987d616763619413b2d7122d1f4ba66a66c564c2ab4a22fb1f95c826d",
"variant": "freethreaded"
},
"cpython-3.13.5+freethreaded-linux-x86_64_v2-gnu": {
@@ -6859,8 +6859,8 @@
"minor": 13,
"patch": 5,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.13.5%2B20250708-x86_64_v2-unknown-linux-gnu-freethreaded%2Bpgo%2Blto-full.tar.zst",
- "sha256": "7b9bc02fc1eb08ba78145946644fe81bc6353e2e28e74890ff93378daffa9547",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-x86_64_v2-unknown-linux-gnu-freethreaded%2Bpgo%2Blto-full.tar.zst",
+ "sha256": "6882afc2e308561b8c1a23187c0439116434aae8573fd6e6dbdce60e3af79db5",
"variant": "freethreaded"
},
"cpython-3.13.5+freethreaded-linux-x86_64_v2-musl": {
@@ -6875,8 +6875,8 @@
"minor": 13,
"patch": 5,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.13.5%2B20250708-x86_64_v2-unknown-linux-musl-freethreaded%2Blto-full.tar.zst",
- "sha256": "4e163edf7e6a6a104f19213f3ad1b767f4d33a950ca8ea51f7b9ce04ba5a4c16",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-x86_64_v2-unknown-linux-musl-freethreaded%2Blto-full.tar.zst",
+ "sha256": "a8ef0d7a50a2616b2a1f8a5d7a3b52fa69085e6a75a6f7d3f318f7c132abfe16",
"variant": "freethreaded"
},
"cpython-3.13.5+freethreaded-linux-x86_64_v3-gnu": {
@@ -6891,8 +6891,8 @@
"minor": 13,
"patch": 5,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.13.5%2B20250708-x86_64_v3-unknown-linux-gnu-freethreaded%2Bpgo%2Blto-full.tar.zst",
- "sha256": "f1390326557df5562639bccaaaad4edcebf4e710696a2948b2aa00db2abdde5a",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-x86_64_v3-unknown-linux-gnu-freethreaded%2Bpgo%2Blto-full.tar.zst",
+ "sha256": "ab2e44c83245d18226f1fce26b09218de866048ecb515b50b8174ba75c182b4e",
"variant": "freethreaded"
},
"cpython-3.13.5+freethreaded-linux-x86_64_v3-musl": {
@@ -6907,8 +6907,8 @@
"minor": 13,
"patch": 5,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.13.5%2B20250708-x86_64_v3-unknown-linux-musl-freethreaded%2Blto-full.tar.zst",
- "sha256": "d5751f3b8af6d06e06a0ce5ea18307c1b6c38508b3879442c504eca3047d4ae2",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-x86_64_v3-unknown-linux-musl-freethreaded%2Blto-full.tar.zst",
+ "sha256": "bad372bd5e38ff42064907b95273736137485ffdc6ff1d90b2e49f8df2829abb",
"variant": "freethreaded"
},
"cpython-3.13.5+freethreaded-linux-x86_64_v4-gnu": {
@@ -6923,8 +6923,8 @@
"minor": 13,
"patch": 5,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.13.5%2B20250708-x86_64_v4-unknown-linux-gnu-freethreaded%2Bpgo%2Blto-full.tar.zst",
- "sha256": "88d8e7dfed818877158ede9b22342d9ce0fd3f49116954ca0eae7540e675d235",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-x86_64_v4-unknown-linux-gnu-freethreaded%2Bpgo%2Blto-full.tar.zst",
+ "sha256": "d12f4ecb61ae7ced3723173aa0a5ddaea395e098bfede57497426c65b5776b82",
"variant": "freethreaded"
},
"cpython-3.13.5+freethreaded-linux-x86_64_v4-musl": {
@@ -6939,8 +6939,8 @@
"minor": 13,
"patch": 5,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.13.5%2B20250708-x86_64_v4-unknown-linux-musl-freethreaded%2Blto-full.tar.zst",
- "sha256": "2a6de48306f788910b33c54e1640d3b9fe29ccb3c44dcdc0b0ba6d6a89213d9e",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-x86_64_v4-unknown-linux-musl-freethreaded%2Blto-full.tar.zst",
+ "sha256": "734233279cbab1f882f6e6b7d1a403695379aaba7473ba865b9741b860833076",
"variant": "freethreaded"
},
"cpython-3.13.5+freethreaded-windows-aarch64-none": {
@@ -6955,8 +6955,8 @@
"minor": 13,
"patch": 5,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.13.5%2B20250708-aarch64-pc-windows-msvc-freethreaded%2Bpgo-full.tar.zst",
- "sha256": "accb608c75ba9d6487fa3c611e1b8038873675cb058423a23fa7e30fc849cf69",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-aarch64-pc-windows-msvc-freethreaded%2Bpgo-full.tar.zst",
+ "sha256": "51d116a7f93654d602d7e503e3c7132ae4f10e5a8e8fbe7e2ceb9e550f11051a",
"variant": "freethreaded"
},
"cpython-3.13.5+freethreaded-windows-i686-none": {
@@ -6971,8 +6971,8 @@
"minor": 13,
"patch": 5,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.13.5%2B20250708-i686-pc-windows-msvc-freethreaded%2Bpgo-full.tar.zst",
- "sha256": "5cba33c38d25519b4c55a5b0015865771e604a2d331c7d335f52753b09d5b667",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-i686-pc-windows-msvc-freethreaded%2Bpgo-full.tar.zst",
+ "sha256": "d4461149a95fd6d9c97d01afb42561c4b687d08526c84e8ff9658d26514450eb",
"variant": "freethreaded"
},
"cpython-3.13.5+freethreaded-windows-x86_64-none": {
@@ -6987,8 +6987,8 @@
"minor": 13,
"patch": 5,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.13.5%2B20250708-x86_64-pc-windows-msvc-freethreaded%2Bpgo-full.tar.zst",
- "sha256": "75acd65c9a44afae432abfd83db648256ac89122f31e21a59310b0c373b147f1",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-x86_64-pc-windows-msvc-freethreaded%2Bpgo-full.tar.zst",
+ "sha256": "eb704f14608176fc8d8d8d08ca5b7e7de14c982b12cd447727bf79b1d2b72ac7",
"variant": "freethreaded"
},
"cpython-3.13.5+debug-linux-aarch64-gnu": {
@@ -7003,8 +7003,8 @@
"minor": 13,
"patch": 5,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.13.5%2B20250708-aarch64-unknown-linux-gnu-debug-full.tar.zst",
- "sha256": "bd73726128747a991d39bbc2c1a1792d97c6d2f4c7b6ed4b2db9254dd16d4ea6",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-aarch64-unknown-linux-gnu-debug-full.tar.zst",
+ "sha256": "defdf6ddc233f8e97cc26afaa341651791c6085a59e02a1ab14cf8a981cdc7bf",
"variant": "debug"
},
"cpython-3.13.5+debug-linux-armv7-gnueabi": {
@@ -7019,8 +7019,8 @@
"minor": 13,
"patch": 5,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.13.5%2B20250708-armv7-unknown-linux-gnueabi-debug-full.tar.zst",
- "sha256": "bbc1e704e4a2466cd52785e52f075e1b10ef5628879620b9461c6af2072e7036",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-armv7-unknown-linux-gnueabi-debug-full.tar.zst",
+ "sha256": "69308c195ebc63543efa8f09fabb4a6fa2fc575019bd1afbc36c66858d2122c4",
"variant": "debug"
},
"cpython-3.13.5+debug-linux-armv7-gnueabihf": {
@@ -7035,8 +7035,8 @@
"minor": 13,
"patch": 5,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.13.5%2B20250708-armv7-unknown-linux-gnueabihf-debug-full.tar.zst",
- "sha256": "60389c2db232050357f24d7858ff019bb9cb37295465196275ec999e1d85f7db",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-armv7-unknown-linux-gnueabihf-debug-full.tar.zst",
+ "sha256": "ad3c911764e60a94c073c57361dc44ed1e04885652cabb1d1f3a1d11d466650d",
"variant": "debug"
},
"cpython-3.13.5+debug-linux-powerpc64le-gnu": {
@@ -7051,8 +7051,8 @@
"minor": 13,
"patch": 5,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.13.5%2B20250708-ppc64le-unknown-linux-gnu-debug-full.tar.zst",
- "sha256": "e93c5832c3c6e39a2131d69de2e700bddab3a4f8bce74039e69276cec645f3a8",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-ppc64le-unknown-linux-gnu-debug-full.tar.zst",
+ "sha256": "bd91893c42edc3b23ee45df6fff77250dab8f94646bbdf2087c0a209231f210d",
"variant": "debug"
},
"cpython-3.13.5+debug-linux-riscv64-gnu": {
@@ -7067,8 +7067,8 @@
"minor": 13,
"patch": 5,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.13.5%2B20250708-riscv64-unknown-linux-gnu-debug-full.tar.zst",
- "sha256": "6fb1da6dd6ccc40eea19062cb494f7cf0207c1e99a0a8cf9cae8fdc9cc30a4b6",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-riscv64-unknown-linux-gnu-debug-full.tar.zst",
+ "sha256": "7f3e649685358af0d78c8d7dcc4d357d5674e24aeaecbcc309ce83d5694821ce",
"variant": "debug"
},
"cpython-3.13.5+debug-linux-s390x-gnu": {
@@ -7083,8 +7083,8 @@
"minor": 13,
"patch": 5,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.13.5%2B20250708-s390x-unknown-linux-gnu-debug-full.tar.zst",
- "sha256": "a62a131ed07e9ef322ded45fb5257aa58502b10cb6e2a18298145838a041637b",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-s390x-unknown-linux-gnu-debug-full.tar.zst",
+ "sha256": "fc013b0375c357286bf6886c0160c9a7fca774869c8a5896114ac1bf338f0b2e",
"variant": "debug"
},
"cpython-3.13.5+debug-linux-x86_64-gnu": {
@@ -7099,8 +7099,8 @@
"minor": 13,
"patch": 5,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.13.5%2B20250708-x86_64-unknown-linux-gnu-debug-full.tar.zst",
- "sha256": "a054dca4b204562ae34cd38f7b31ff53f035acd012310f9f7c8817eac9852db2",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-x86_64-unknown-linux-gnu-debug-full.tar.zst",
+ "sha256": "3502c7c36500fa1a84096f0e9c04dc036f3dbbae117d6b86d05b0a71a65e53cb",
"variant": "debug"
},
"cpython-3.13.5+debug-linux-x86_64-musl": {
@@ -7115,8 +7115,8 @@
"minor": 13,
"patch": 5,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.13.5%2B20250708-x86_64-unknown-linux-musl-debug-full.tar.zst",
- "sha256": "5da37b4623286ed7283277ec6288d0be88fcd3d208e98c075a140385734f0056",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-x86_64-unknown-linux-musl-debug-full.tar.zst",
+ "sha256": "b42647c29dca10e55ceeaa10b6425f4ff851721376b4b9de82ce10c21da2b5f2",
"variant": "debug"
},
"cpython-3.13.5+debug-linux-x86_64_v2-gnu": {
@@ -7131,8 +7131,8 @@
"minor": 13,
"patch": 5,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.13.5%2B20250708-x86_64_v2-unknown-linux-gnu-debug-full.tar.zst",
- "sha256": "83faa4f0a92287a55887ef402bb138ca7aa46848afb7c9a30ebc337f8cb4b86c",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-x86_64_v2-unknown-linux-gnu-debug-full.tar.zst",
+ "sha256": "5dee021b1e82ddeacae72fdee5ba6d2727faf1b39b8d4b9361a7961e5321c347",
"variant": "debug"
},
"cpython-3.13.5+debug-linux-x86_64_v2-musl": {
@@ -7147,8 +7147,8 @@
"minor": 13,
"patch": 5,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.13.5%2B20250708-x86_64_v2-unknown-linux-musl-debug-full.tar.zst",
- "sha256": "8caaba837f778d2da1b041f15f0f46a3c117a531a55d6e79f5aaca836ecfb84f",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-x86_64_v2-unknown-linux-musl-debug-full.tar.zst",
+ "sha256": "623e2fedb44f5c8c123371a9e82771792d1a64ea11cb963259947679c1bb7027",
"variant": "debug"
},
"cpython-3.13.5+debug-linux-x86_64_v3-gnu": {
@@ -7163,8 +7163,8 @@
"minor": 13,
"patch": 5,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.13.5%2B20250708-x86_64_v3-unknown-linux-gnu-debug-full.tar.zst",
- "sha256": "d4d2e746af77d16428d8168d11f8bf5b90424667949af7895413cdc18ebcaee8",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-x86_64_v3-unknown-linux-gnu-debug-full.tar.zst",
+ "sha256": "f24df9f31d052c4e9cabec7a897d78ceccf9fb90a6edaa6f4f128e49d5f27162",
"variant": "debug"
},
"cpython-3.13.5+debug-linux-x86_64_v3-musl": {
@@ -7179,8 +7179,8 @@
"minor": 13,
"patch": 5,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.13.5%2B20250708-x86_64_v3-unknown-linux-musl-debug-full.tar.zst",
- "sha256": "f76628dc2447a1fc55f463623c81f9a19002b5f968afe77b57136fdc41833993",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-x86_64_v3-unknown-linux-musl-debug-full.tar.zst",
+ "sha256": "2821ef432b962ab4968e339f8d55a790eb64e266ccba674837589d58fb40f0d0",
"variant": "debug"
},
"cpython-3.13.5+debug-linux-x86_64_v4-gnu": {
@@ -7195,8 +7195,8 @@
"minor": 13,
"patch": 5,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.13.5%2B20250708-x86_64_v4-unknown-linux-gnu-debug-full.tar.zst",
- "sha256": "4620c454e6ae9ad0093785b54790ddb68c2d3f2d868aa79a5aa678b98e1138a3",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-x86_64_v4-unknown-linux-gnu-debug-full.tar.zst",
+ "sha256": "8f9f953c202e0f6b5f7e7abff2b34beaff7a627d1f7ff8cdfe4d29f4fc12f067",
"variant": "debug"
},
"cpython-3.13.5+debug-linux-x86_64_v4-musl": {
@@ -7211,8 +7211,8 @@
"minor": 13,
"patch": 5,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.13.5%2B20250708-x86_64_v4-unknown-linux-musl-debug-full.tar.zst",
- "sha256": "e1f4f398dadd9cd83e351ea08a068bc3ea24f870ccddbeb3b65ce65a3bc5c106",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-x86_64_v4-unknown-linux-musl-debug-full.tar.zst",
+ "sha256": "5c0740e8df7d69b4e2ead4f11db97e3d884e77377d84cbf6fba58077043388fb",
"variant": "debug"
},
"cpython-3.13.4-darwin-aarch64-none": {
@@ -11435,8 +11435,8 @@
"minor": 12,
"patch": 11,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.12.11%2B20250708-aarch64-apple-darwin-install_only_stripped.tar.gz",
- "sha256": "3c948bee581f42c4a3b072a5e1ff261e0eb1636c00d5474c28a13fa627c95578",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.12.11%2B20250712-aarch64-apple-darwin-install_only_stripped.tar.gz",
+ "sha256": "0a5748a455ebd0ef0419bffa0b239c1596ea021937fa4c9eb3b8893cf7b46d48",
"variant": null
},
"cpython-3.12.11-darwin-x86_64-none": {
@@ -11451,8 +11451,8 @@
"minor": 12,
"patch": 11,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.12.11%2B20250708-x86_64-apple-darwin-install_only_stripped.tar.gz",
- "sha256": "c81794121d513b7eab710a210202e78393400460251a6878c85b927977098b38",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.12.11%2B20250712-x86_64-apple-darwin-install_only_stripped.tar.gz",
+ "sha256": "1154b0be69bdd8c144272cee596181f096577d535bff1548f8df49e0d7d9c721",
"variant": null
},
"cpython-3.12.11-linux-aarch64-gnu": {
@@ -11467,8 +11467,8 @@
"minor": 12,
"patch": 11,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.12.11%2B20250708-aarch64-unknown-linux-gnu-install_only_stripped.tar.gz",
- "sha256": "7ac6956ce9119a44531e9cbe3fe4d0beadcf244e02be81a863b95aa69041314f",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.12.11%2B20250712-aarch64-unknown-linux-gnu-install_only_stripped.tar.gz",
+ "sha256": "73a22b9fa275682f326393df8f8afe82c302330e760bf9b4667378a3a98613ba",
"variant": null
},
"cpython-3.12.11-linux-armv7-gnueabi": {
@@ -11483,8 +11483,8 @@
"minor": 12,
"patch": 11,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.12.11%2B20250708-armv7-unknown-linux-gnueabi-install_only_stripped.tar.gz",
- "sha256": "4cc102db1b315425d2feda63407ee0e737902d94eaecf52e3ec8ea6f6d7cee4d",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.12.11%2B20250712-armv7-unknown-linux-gnueabi-install_only_stripped.tar.gz",
+ "sha256": "6a60953cc821d673bf67724d05a430576d0921a60cfceeca11af5a758bd3ae71",
"variant": null
},
"cpython-3.12.11-linux-armv7-gnueabihf": {
@@ -11499,8 +11499,8 @@
"minor": 12,
"patch": 11,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.12.11%2B20250708-armv7-unknown-linux-gnueabihf-install_only_stripped.tar.gz",
- "sha256": "c62d2c512b4e35dfb40d29246ed02cf0049e645bf333eca0a9e703da51f64597",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.12.11%2B20250712-armv7-unknown-linux-gnueabihf-install_only_stripped.tar.gz",
+ "sha256": "1f8b03c8bf51f36f659961364f9d78a093af84305bbe416f95b5ecb64a11314d",
"variant": null
},
"cpython-3.12.11-linux-powerpc64le-gnu": {
@@ -11515,8 +11515,8 @@
"minor": 12,
"patch": 11,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.12.11%2B20250708-ppc64le-unknown-linux-gnu-install_only_stripped.tar.gz",
- "sha256": "1025a919ad5170f76c58fb73f4b2b3a5e2ed910d1f802390f032b4da91152f23",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.12.11%2B20250712-ppc64le-unknown-linux-gnu-install_only_stripped.tar.gz",
+ "sha256": "10164c4c0e7f9a29024677226bc5f7c0b8b2b6ac5109a0d51a0fb7963f4bec48",
"variant": null
},
"cpython-3.12.11-linux-riscv64-gnu": {
@@ -11531,8 +11531,8 @@
"minor": 12,
"patch": 11,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.12.11%2B20250708-riscv64-unknown-linux-gnu-install_only_stripped.tar.gz",
- "sha256": "0178724fd0ce4712092c2afb66094e12d1f7e07744cf9d0c462aad516a82b984",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.12.11%2B20250712-riscv64-unknown-linux-gnu-install_only_stripped.tar.gz",
+ "sha256": "f47a3ad7d96ba16b8b38f68f69296e0dca1e910b8ff9b89dd9e9309fab9aa379",
"variant": null
},
"cpython-3.12.11-linux-s390x-gnu": {
@@ -11547,8 +11547,8 @@
"minor": 12,
"patch": 11,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.12.11%2B20250708-s390x-unknown-linux-gnu-install_only_stripped.tar.gz",
- "sha256": "cb480b2fd0fefcdf71e07ab6a321e878bbc6d2c855356575db29fcbb48d5eae1",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.12.11%2B20250712-s390x-unknown-linux-gnu-install_only_stripped.tar.gz",
+ "sha256": "0714bccd13e1bfd7cce812255f4ba960b9ac5eb0a8b876daef7f8796dbd79c7a",
"variant": null
},
"cpython-3.12.11-linux-x86_64-gnu": {
@@ -11563,8 +11563,8 @@
"minor": 12,
"patch": 11,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.12.11%2B20250708-x86_64-unknown-linux-gnu-install_only_stripped.tar.gz",
- "sha256": "50f2684ecd4dfdff732d091f0e3d383261a9d524a850784cd01a1c0839ece3e7",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.12.11%2B20250712-x86_64-unknown-linux-gnu-install_only_stripped.tar.gz",
+ "sha256": "e42c16fe50fda85dad3f5042b6d507476ea8e88c0f039018fef0680038d87c17",
"variant": null
},
"cpython-3.12.11-linux-x86_64-musl": {
@@ -11579,8 +11579,8 @@
"minor": 12,
"patch": 11,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.12.11%2B20250708-x86_64-unknown-linux-musl-install_only_stripped.tar.gz",
- "sha256": "f7ef6763b79a50da594fd1e03a6ee39017db6002c552539dbe0edffefc453804",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.12.11%2B20250712-x86_64-unknown-linux-musl-install_only_stripped.tar.gz",
+ "sha256": "3676e47a82e674878b986a6ba05d5e2829cb8061bfda3c72258c232ad2a5c9f1",
"variant": null
},
"cpython-3.12.11-linux-x86_64_v2-gnu": {
@@ -11595,8 +11595,8 @@
"minor": 12,
"patch": 11,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.12.11%2B20250708-x86_64_v2-unknown-linux-gnu-install_only_stripped.tar.gz",
- "sha256": "32320209ab9b187b142a81bc4063c8aab9aa05ddb9833ca921c17eefdd2f1509",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.12.11%2B20250712-x86_64_v2-unknown-linux-gnu-install_only_stripped.tar.gz",
+ "sha256": "ddf0c26a2df22156672e7476fda10845056d13d4b5223de6ba054d25bfcd9d3c",
"variant": null
},
"cpython-3.12.11-linux-x86_64_v2-musl": {
@@ -11611,8 +11611,8 @@
"minor": 12,
"patch": 11,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.12.11%2B20250708-x86_64_v2-unknown-linux-musl-install_only_stripped.tar.gz",
- "sha256": "14abfef4e25db478db20dd15627576f47ff012a0eb3f7de3f9d1101ea409d02c",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.12.11%2B20250712-x86_64_v2-unknown-linux-musl-install_only_stripped.tar.gz",
+ "sha256": "2be8e228b2a698b66f9d96819bcc6f31ac5bdc773f6ec6dbd917ab351d665da2",
"variant": null
},
"cpython-3.12.11-linux-x86_64_v3-gnu": {
@@ -11627,8 +11627,8 @@
"minor": 12,
"patch": 11,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.12.11%2B20250708-x86_64_v3-unknown-linux-gnu-install_only_stripped.tar.gz",
- "sha256": "4f71291857a656cf4b780d7c5bd2667ecde14f9ec093e026cf28d2c8727d69ad",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.12.11%2B20250712-x86_64_v3-unknown-linux-gnu-install_only_stripped.tar.gz",
+ "sha256": "820174fbb713495a1beecd087cc651d2d4f1d10b1bb2e308c61aecec006fea0a",
"variant": null
},
"cpython-3.12.11-linux-x86_64_v3-musl": {
@@ -11643,8 +11643,8 @@
"minor": 12,
"patch": 11,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.12.11%2B20250708-x86_64_v3-unknown-linux-musl-install_only_stripped.tar.gz",
- "sha256": "b3530c771104b7765241b87b2ac749f6fce1886b4d2b677a1fc46aaca9378019",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.12.11%2B20250712-x86_64_v3-unknown-linux-musl-install_only_stripped.tar.gz",
+ "sha256": "5cfc247d6ee2303c98fecddfbdf6ddd2e0d44c59a033cb47a3eb6ab4bd236933",
"variant": null
},
"cpython-3.12.11-linux-x86_64_v4-gnu": {
@@ -11659,8 +11659,8 @@
"minor": 12,
"patch": 11,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.12.11%2B20250708-x86_64_v4-unknown-linux-gnu-install_only_stripped.tar.gz",
- "sha256": "0de444c0e4ac45f2f4863889e57f2dbbe79f01593afcc21f63b4ddb5832edd61",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.12.11%2B20250712-x86_64_v4-unknown-linux-gnu-install_only_stripped.tar.gz",
+ "sha256": "01519be2a0930f86a43ac93f25fb0f44b3dbf8077ecd23c98c5b3011150ef16a",
"variant": null
},
"cpython-3.12.11-linux-x86_64_v4-musl": {
@@ -11675,8 +11675,8 @@
"minor": 12,
"patch": 11,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.12.11%2B20250708-x86_64_v4-unknown-linux-musl-install_only_stripped.tar.gz",
- "sha256": "3d761bb79ef0946ee76b659c9bcf034dc8a67e1d414bef51ecb498c595a2b262",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.12.11%2B20250712-x86_64_v4-unknown-linux-musl-install_only_stripped.tar.gz",
+ "sha256": "93a9714ef88ece8575707e1841369b753f9d320c42832efffda8df8dfcbd9ca7",
"variant": null
},
"cpython-3.12.11-windows-aarch64-none": {
@@ -11691,8 +11691,8 @@
"minor": 12,
"patch": 11,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.12.11%2B20250708-aarch64-pc-windows-msvc-install_only_stripped.tar.gz",
- "sha256": "c2b541cd75cd12d7b1d52ebee724cc1b1f4d7367901d06b2f3f4a2e3ded4145e",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.12.11%2B20250712-aarch64-pc-windows-msvc-install_only_stripped.tar.gz",
+ "sha256": "512ae77ca0afe3a81d990c975548f052b9cde78187190eb5457b3b9cdad37a9c",
"variant": null
},
"cpython-3.12.11-windows-i686-none": {
@@ -11707,8 +11707,8 @@
"minor": 12,
"patch": 11,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.12.11%2B20250708-i686-pc-windows-msvc-install_only_stripped.tar.gz",
- "sha256": "cbf9c2bd5f182f6fc6da969729d0d4a5683d5f392f3a9bed3d7240cbe7385c11",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.12.11%2B20250712-i686-pc-windows-msvc-install_only_stripped.tar.gz",
+ "sha256": "c815e6eadc40013227269d4999d5aef856c4967e175beedadef60e429275be57",
"variant": null
},
"cpython-3.12.11-windows-x86_64-none": {
@@ -11723,8 +11723,8 @@
"minor": 12,
"patch": 11,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.12.11%2B20250708-x86_64-pc-windows-msvc-install_only_stripped.tar.gz",
- "sha256": "79e5d97e543975309fe3a22e27f2d83d7b08cff462d699bfa721854971773ec6",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.12.11%2B20250712-x86_64-pc-windows-msvc-install_only_stripped.tar.gz",
+ "sha256": "49911a479230f9a0ad33fc6742229128249f695502360dab3f5fd9096585e9a5",
"variant": null
},
"cpython-3.12.11+debug-linux-aarch64-gnu": {
@@ -11739,8 +11739,8 @@
"minor": 12,
"patch": 11,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.12.11%2B20250708-aarch64-unknown-linux-gnu-debug-full.tar.zst",
- "sha256": "a8d1e10b91253cf528c9233c314e6958de7d9380c5e949a2ce1b1b4dc8538ebd",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.12.11%2B20250712-aarch64-unknown-linux-gnu-debug-full.tar.zst",
+ "sha256": "aed96d0c279ff78619991fadf2ef85539d9ca208f2204ea252d3197b82092e37",
"variant": "debug"
},
"cpython-3.12.11+debug-linux-armv7-gnueabi": {
@@ -11755,8 +11755,8 @@
"minor": 12,
"patch": 11,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.12.11%2B20250708-armv7-unknown-linux-gnueabi-debug-full.tar.zst",
- "sha256": "46a11e0955ea444a0fe3fabbe9b1f36be4a72c804b8265d90f84f26a3de3199e",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.12.11%2B20250712-armv7-unknown-linux-gnueabi-debug-full.tar.zst",
+ "sha256": "360e6b2b9bf34d8fb086c43f3b0ce95e7918a458b491c6d85bf2624ab7e75ae3",
"variant": "debug"
},
"cpython-3.12.11+debug-linux-armv7-gnueabihf": {
@@ -11771,8 +11771,8 @@
"minor": 12,
"patch": 11,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.12.11%2B20250708-armv7-unknown-linux-gnueabihf-debug-full.tar.zst",
- "sha256": "ccc5fbb01a83f1a264e90d8f92324c64d3dc2b2bdc4568340bb58dc62b061cce",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.12.11%2B20250712-armv7-unknown-linux-gnueabihf-debug-full.tar.zst",
+ "sha256": "fffb9b6c2e81b03aa8a1d8932a351da172cd6069bbdc192f020c8862d262eab5",
"variant": "debug"
},
"cpython-3.12.11+debug-linux-powerpc64le-gnu": {
@@ -11787,8 +11787,8 @@
"minor": 12,
"patch": 11,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.12.11%2B20250708-ppc64le-unknown-linux-gnu-debug-full.tar.zst",
- "sha256": "0f334bbaa774e7b98f264e04456dfb6130519294ac0c25593cebb41c92571e34",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.12.11%2B20250712-ppc64le-unknown-linux-gnu-debug-full.tar.zst",
+ "sha256": "a8bed95f73ccd6451cad69163ef7097bfc17eda984d2932a93e2dda639f06ff2",
"variant": "debug"
},
"cpython-3.12.11+debug-linux-riscv64-gnu": {
@@ -11803,8 +11803,8 @@
"minor": 12,
"patch": 11,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.12.11%2B20250708-riscv64-unknown-linux-gnu-debug-full.tar.zst",
- "sha256": "f048e364a7895b535c9e68f987cf17e3ee5f3bd3b7189b95cc7db30cd8a7b9b5",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.12.11%2B20250712-riscv64-unknown-linux-gnu-debug-full.tar.zst",
+ "sha256": "395d73e73ff0d0085ddb83f15d51375c655756e28b0e44c0266eb49f8d2b2f27",
"variant": "debug"
},
"cpython-3.12.11+debug-linux-s390x-gnu": {
@@ -11819,8 +11819,8 @@
"minor": 12,
"patch": 11,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.12.11%2B20250708-s390x-unknown-linux-gnu-debug-full.tar.zst",
- "sha256": "c48068f9f02f16314265567acb56e411e9936abc9b18c9d67811f5faade66031",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.12.11%2B20250712-s390x-unknown-linux-gnu-debug-full.tar.zst",
+ "sha256": "097dc82abc3805b8e1721e67869fd4ae6419fb9089d7289aec4dd61b9c834db4",
"variant": "debug"
},
"cpython-3.12.11+debug-linux-x86_64-gnu": {
@@ -11835,8 +11835,8 @@
"minor": 12,
"patch": 11,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.12.11%2B20250708-x86_64-unknown-linux-gnu-debug-full.tar.zst",
- "sha256": "ef2fe47be6b147bc376ce8f2949cc3d193c9c1d2e362fa9dcbabf0e7c60f8a19",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.12.11%2B20250712-x86_64-unknown-linux-gnu-debug-full.tar.zst",
+ "sha256": "d11f20d2adaa582ac3e3ab6f56a3c1f4e468e1aa4712d6fe76dd2776fdb28330",
"variant": "debug"
},
"cpython-3.12.11+debug-linux-x86_64-musl": {
@@ -11851,8 +11851,8 @@
"minor": 12,
"patch": 11,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.12.11%2B20250708-x86_64-unknown-linux-musl-debug-full.tar.zst",
- "sha256": "a88306d6b3a09b85f93514d43b2c8bd35dff417cf861bd2a1ead4d87c5666f8a",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.12.11%2B20250712-x86_64-unknown-linux-musl-debug-full.tar.zst",
+ "sha256": "a4cfaa4c7915c35ecf4a15a3f25cdda68b1e2de06280cfe98680b4eed3e11ac1",
"variant": "debug"
},
"cpython-3.12.11+debug-linux-x86_64_v2-gnu": {
@@ -11867,8 +11867,8 @@
"minor": 12,
"patch": 11,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.12.11%2B20250708-x86_64_v2-unknown-linux-gnu-debug-full.tar.zst",
- "sha256": "b8637c81f61f41d49bf95699cc4c295579d671912f81b5446c3ba2496dac2627",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.12.11%2B20250712-x86_64_v2-unknown-linux-gnu-debug-full.tar.zst",
+ "sha256": "e040fa65666bd109534c8ed4c70d198954a28e87dffbab1b138a55c8c98c4db5",
"variant": "debug"
},
"cpython-3.12.11+debug-linux-x86_64_v2-musl": {
@@ -11883,8 +11883,8 @@
"minor": 12,
"patch": 11,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.12.11%2B20250708-x86_64_v2-unknown-linux-musl-debug-full.tar.zst",
- "sha256": "5dab0c1eb4ce013826a462247629263eae7726b635d868408152444cbf83a778",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.12.11%2B20250712-x86_64_v2-unknown-linux-musl-debug-full.tar.zst",
+ "sha256": "89504b7f5fba85aa2644be63aa9377e69e56f6c6f4c57a96e0a6050e95e2b8d8",
"variant": "debug"
},
"cpython-3.12.11+debug-linux-x86_64_v3-gnu": {
@@ -11899,8 +11899,8 @@
"minor": 12,
"patch": 11,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.12.11%2B20250708-x86_64_v3-unknown-linux-gnu-debug-full.tar.zst",
- "sha256": "eef2733d40a9511a2af9d83808ad640993c5d8b6fb436bc240cd9bac6be4ffc5",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.12.11%2B20250712-x86_64_v3-unknown-linux-gnu-debug-full.tar.zst",
+ "sha256": "5eb9cb98d4528045f1e03373373ddb783fbbf6646e3d0e683fb563e5f1d198e6",
"variant": "debug"
},
"cpython-3.12.11+debug-linux-x86_64_v3-musl": {
@@ -11915,8 +11915,8 @@
"minor": 12,
"patch": 11,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.12.11%2B20250708-x86_64_v3-unknown-linux-musl-debug-full.tar.zst",
- "sha256": "9f7fbd3712e13f91414e7a498a58160d8745fa02b9d2898db8f6f3c589920b6d",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.12.11%2B20250712-x86_64_v3-unknown-linux-musl-debug-full.tar.zst",
+ "sha256": "0d463ebb5c0886e019c54e07963965ee53c52d01e42b3ca8a994e8599c2d7242",
"variant": "debug"
},
"cpython-3.12.11+debug-linux-x86_64_v4-gnu": {
@@ -11931,8 +11931,8 @@
"minor": 12,
"patch": 11,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.12.11%2B20250708-x86_64_v4-unknown-linux-gnu-debug-full.tar.zst",
- "sha256": "acf0037e25e80cbc3e8a1ff1e3b83da10ed2b00d8ff7df0ff1d207d896e2225f",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.12.11%2B20250712-x86_64_v4-unknown-linux-gnu-debug-full.tar.zst",
+ "sha256": "94924bb8ca1f03bf06c87554be2ea50ff8db47f2a3b02c5ff3b27d5a502d5fe4",
"variant": "debug"
},
"cpython-3.12.11+debug-linux-x86_64_v4-musl": {
@@ -11947,8 +11947,8 @@
"minor": 12,
"patch": 11,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.12.11%2B20250708-x86_64_v4-unknown-linux-musl-debug-full.tar.zst",
- "sha256": "fefe36ed014e3a6baf0eb122161b42262c1a00ae403de18fb03353cf80d46c1f",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.12.11%2B20250712-x86_64_v4-unknown-linux-musl-debug-full.tar.zst",
+ "sha256": "47d315cae2b1cd67155cd072410e4a6c0f428e78f09bb5da9ff7eb08480c05c4",
"variant": "debug"
},
"cpython-3.12.10-darwin-aarch64-none": {
@@ -15995,8 +15995,8 @@
"minor": 11,
"patch": 13,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.11.13%2B20250708-aarch64-apple-darwin-install_only_stripped.tar.gz",
- "sha256": "f35b94b5aaefaff34b59f4aab09a5eec02c93e3b61a46c6694f4e93fb2aea86c",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.11.13%2B20250712-aarch64-apple-darwin-install_only_stripped.tar.gz",
+ "sha256": "cb07230fc0946bab64762b2a97cca278c32c0fa4b1cf5c5c3eb848f08757498a",
"variant": null
},
"cpython-3.11.13-darwin-x86_64-none": {
@@ -16011,8 +16011,8 @@
"minor": 11,
"patch": 13,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.11.13%2B20250708-x86_64-apple-darwin-install_only_stripped.tar.gz",
- "sha256": "c2a6b3053af4354d74b70d25ccf744bea7c545ee00da38a93e8b392ec9f062f1",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.11.13%2B20250712-x86_64-apple-darwin-install_only_stripped.tar.gz",
+ "sha256": "1eec204b5dffad8a430c2380fd14895fad2b47406f6d69e07f00b954ffdb8064",
"variant": null
},
"cpython-3.11.13-linux-aarch64-gnu": {
@@ -16027,8 +16027,8 @@
"minor": 11,
"patch": 13,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.11.13%2B20250708-aarch64-unknown-linux-gnu-install_only_stripped.tar.gz",
- "sha256": "a05521f2fa75e60920cb1172722920262c73d7ead3045a2a5b4844d287a1dfdd",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.11.13%2B20250712-aarch64-unknown-linux-gnu-install_only_stripped.tar.gz",
+ "sha256": "c5155a27d8e8df696eff8c39b1b37e5330f12a764fdf79b5f52ea2deb98a73a0",
"variant": null
},
"cpython-3.11.13-linux-armv7-gnueabi": {
@@ -16043,8 +16043,8 @@
"minor": 11,
"patch": 13,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.11.13%2B20250708-armv7-unknown-linux-gnueabi-install_only_stripped.tar.gz",
- "sha256": "a4bb388a080d1dc4a7d381d2bc7f74d00311d5fc6ef66d457178b5c62d7e0ac1",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.11.13%2B20250712-armv7-unknown-linux-gnueabi-install_only_stripped.tar.gz",
+ "sha256": "680ecfd9fc09d62dbe68cfb201e567086e3df9a27d061d9bcde78fad4f7f4d94",
"variant": null
},
"cpython-3.11.13-linux-armv7-gnueabihf": {
@@ -16059,8 +16059,8 @@
"minor": 11,
"patch": 13,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.11.13%2B20250708-armv7-unknown-linux-gnueabihf-install_only_stripped.tar.gz",
- "sha256": "80444ffb9f33d39a9462e2efa04ba7edbef6af2e957457a71a0710344972f0ba",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.11.13%2B20250712-armv7-unknown-linux-gnueabihf-install_only_stripped.tar.gz",
+ "sha256": "af2508bfab6c90a28d7e271e9c1cede875769556f3537fc7b0e3b6dd1f1c92b7",
"variant": null
},
"cpython-3.11.13-linux-powerpc64le-gnu": {
@@ -16075,8 +16075,8 @@
"minor": 11,
"patch": 13,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.11.13%2B20250708-ppc64le-unknown-linux-gnu-install_only_stripped.tar.gz",
- "sha256": "40e5fcea272e4a8253cf2bc392fbad36ca4260de75a12ef3c95711eb86f57a0c",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.11.13%2B20250712-ppc64le-unknown-linux-gnu-install_only_stripped.tar.gz",
+ "sha256": "c83b749e3908140dec9ffadbf6b3f98bacaf4ca2230ead6adbd8a0923eebf362",
"variant": null
},
"cpython-3.11.13-linux-riscv64-gnu": {
@@ -16091,8 +16091,8 @@
"minor": 11,
"patch": 13,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.11.13%2B20250708-riscv64-unknown-linux-gnu-install_only_stripped.tar.gz",
- "sha256": "eae2bbaf28b1f5886408e6cae4c5d393f3065dbd3293231b93bd0122f5f0543d",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.11.13%2B20250712-riscv64-unknown-linux-gnu-install_only_stripped.tar.gz",
+ "sha256": "7f0dfc489925e04ba015f170f4f30309330fae711d28bc4ed11ff13b9c3d9443",
"variant": null
},
"cpython-3.11.13-linux-s390x-gnu": {
@@ -16107,8 +16107,8 @@
"minor": 11,
"patch": 13,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.11.13%2B20250708-s390x-unknown-linux-gnu-install_only_stripped.tar.gz",
- "sha256": "702fd03db386a6711afbf14778a5b2aca6d4c3e47ff26e85a4d85991023ee0db",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.11.13%2B20250712-s390x-unknown-linux-gnu-install_only_stripped.tar.gz",
+ "sha256": "603e7bad4e81cee7d4c1c9ca3cb5573036fb1d226a9a9634ca0763120740d8ff",
"variant": null
},
"cpython-3.11.13-linux-x86_64-gnu": {
@@ -16123,8 +16123,8 @@
"minor": 11,
"patch": 13,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.11.13%2B20250708-x86_64-unknown-linux-gnu-install_only_stripped.tar.gz",
- "sha256": "f730f5d09fc41e2573b0092ef143dd8976a8f6593ad31b833ea1d0adbc5562dd",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.11.13%2B20250712-x86_64-unknown-linux-gnu-install_only_stripped.tar.gz",
+ "sha256": "e50197b0784baaf2d47c8c8773daa4600b2809330829565e9f31e6cfbc657eae",
"variant": null
},
"cpython-3.11.13-linux-x86_64-musl": {
@@ -16139,8 +16139,8 @@
"minor": 11,
"patch": 13,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.11.13%2B20250708-x86_64-unknown-linux-musl-install_only_stripped.tar.gz",
- "sha256": "337e164de474fefe5a2bf63c5d836093eae3532be80ed54b8d1abfd6dcb1b742",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.11.13%2B20250712-x86_64-unknown-linux-musl-install_only_stripped.tar.gz",
+ "sha256": "a233b0492531f187ac33ecfd466debf21537a8b3ae90d799758808d74af09162",
"variant": null
},
"cpython-3.11.13-linux-x86_64_v2-gnu": {
@@ -16155,8 +16155,8 @@
"minor": 11,
"patch": 13,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.11.13%2B20250708-x86_64_v2-unknown-linux-gnu-install_only_stripped.tar.gz",
- "sha256": "3903459242e57e9979ca6e581c06f3e4c573cf1d3e2d3eb62ce2cba8e3d83fd9",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.11.13%2B20250712-x86_64_v2-unknown-linux-gnu-install_only_stripped.tar.gz",
+ "sha256": "5f970ce2eecd824c367132c4fd8d066a0af3d079e46acf972e672588a578b246",
"variant": null
},
"cpython-3.11.13-linux-x86_64_v2-musl": {
@@ -16171,8 +16171,8 @@
"minor": 11,
"patch": 13,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.11.13%2B20250708-x86_64_v2-unknown-linux-musl-install_only_stripped.tar.gz",
- "sha256": "624494b5583fcec1f75464797686ffeb4727cf0ccdc54cf9c73f0b45888d5274",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.11.13%2B20250712-x86_64_v2-unknown-linux-musl-install_only_stripped.tar.gz",
+ "sha256": "a2df9657ecbecce2a50f8bb27cb8755d54c478195d49558de1c9c56f5de84033",
"variant": null
},
"cpython-3.11.13-linux-x86_64_v3-gnu": {
@@ -16187,8 +16187,8 @@
"minor": 11,
"patch": 13,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.11.13%2B20250708-x86_64_v3-unknown-linux-gnu-install_only_stripped.tar.gz",
- "sha256": "d5898a58943ed9f770a94125e7af85fbfd50b87e19135628708e8dbc6c8bd0b4",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.11.13%2B20250712-x86_64_v3-unknown-linux-gnu-install_only_stripped.tar.gz",
+ "sha256": "c30fd4073a10ac6ee0b8719d106bb6195ca73b7f85340aac6e33069869ae4ee8",
"variant": null
},
"cpython-3.11.13-linux-x86_64_v3-musl": {
@@ -16203,8 +16203,8 @@
"minor": 11,
"patch": 13,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.11.13%2B20250708-x86_64_v3-unknown-linux-musl-install_only_stripped.tar.gz",
- "sha256": "8bc18b17a9f8d36271dca160d402c18a42552b0e50708bf3732d0e2b1985235d",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.11.13%2B20250712-x86_64_v3-unknown-linux-musl-install_only_stripped.tar.gz",
+ "sha256": "cd15f24848c848b058a41dd0b05c4e5beca692d2c60c962fcb912fffc690afef",
"variant": null
},
"cpython-3.11.13-linux-x86_64_v4-gnu": {
@@ -16219,8 +16219,8 @@
"minor": 11,
"patch": 13,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.11.13%2B20250708-x86_64_v4-unknown-linux-gnu-install_only_stripped.tar.gz",
- "sha256": "257e29dc405d10062184da4078e1d46a787e19a04cba2a1c1831c21e52d0a557",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.11.13%2B20250712-x86_64_v4-unknown-linux-gnu-install_only_stripped.tar.gz",
+ "sha256": "8c390cae0b2d163f18117cae43bcbe430e58146d97e0c39b4afe72842e55f5fc",
"variant": null
},
"cpython-3.11.13-linux-x86_64_v4-musl": {
@@ -16235,8 +16235,8 @@
"minor": 11,
"patch": 13,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.11.13%2B20250708-x86_64_v4-unknown-linux-musl-install_only_stripped.tar.gz",
- "sha256": "4b7dec009dbdfb4821aebdb5ca082ac7765ecdb67980dc86adebd57febaf1aec",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.11.13%2B20250712-x86_64_v4-unknown-linux-musl-install_only_stripped.tar.gz",
+ "sha256": "f2ac3addbdf3c08ccf2320bdbed20213b45acd3399d44a990046f09dd883824e",
"variant": null
},
"cpython-3.11.13-windows-aarch64-none": {
@@ -16251,8 +16251,8 @@
"minor": 11,
"patch": 13,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.11.13%2B20250708-aarch64-pc-windows-msvc-install_only_stripped.tar.gz",
- "sha256": "d45d2a6009dc50a76e4630c39ea36ba85e51555b7a17e1683d1bcf01c3bf7e1a",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.11.13%2B20250712-aarch64-pc-windows-msvc-install_only_stripped.tar.gz",
+ "sha256": "84058f18798534e76f6b9d15b96c41116aad0055e01c6e3ab2ab02db24826b9a",
"variant": null
},
"cpython-3.11.13-windows-i686-none": {
@@ -16267,8 +16267,8 @@
"minor": 11,
"patch": 13,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.11.13%2B20250708-i686-pc-windows-msvc-install_only_stripped.tar.gz",
- "sha256": "892f215501ae1cfe36e210224f4de106e5825f34f41ad8d458ef73f3012be61f",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.11.13%2B20250712-i686-pc-windows-msvc-install_only_stripped.tar.gz",
+ "sha256": "8044a253950315481784b9f4764e1025b0d4a7a2760b7a82df849f4667113f80",
"variant": null
},
"cpython-3.11.13-windows-x86_64-none": {
@@ -16283,8 +16283,8 @@
"minor": 11,
"patch": 13,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.11.13%2B20250708-x86_64-pc-windows-msvc-install_only_stripped.tar.gz",
- "sha256": "d19baf214caf1ad3d1b34c6931dcd6d915abedd419ba4aecb0cacb7e1ec7884a",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.11.13%2B20250712-x86_64-pc-windows-msvc-install_only_stripped.tar.gz",
+ "sha256": "43a574437fb7e11c439e13d84dd094fa25c741d32f9245c5ffc0e5f9523aafa9",
"variant": null
},
"cpython-3.11.13+debug-linux-aarch64-gnu": {
@@ -16299,8 +16299,8 @@
"minor": 11,
"patch": 13,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.11.13%2B20250708-aarch64-unknown-linux-gnu-debug-full.tar.zst",
- "sha256": "bf9e9c0295634d5ead7d3756651898d6af8d1bfdd8cc410769f9354d3e0871e4",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.11.13%2B20250712-aarch64-unknown-linux-gnu-debug-full.tar.zst",
+ "sha256": "b6ca253ced82c9575935a32d327d29dcffa9cb15963b9331c621ac91aa151933",
"variant": "debug"
},
"cpython-3.11.13+debug-linux-armv7-gnueabi": {
@@ -16315,8 +16315,8 @@
"minor": 11,
"patch": 13,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.11.13%2B20250708-armv7-unknown-linux-gnueabi-debug-full.tar.zst",
- "sha256": "c0a5f208bbb1d51dfc3e98919f7856ae3a5643d2e6a6b5edfcbfa7ea41bb822e",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.11.13%2B20250712-armv7-unknown-linux-gnueabi-debug-full.tar.zst",
+ "sha256": "3e02d8ff6b63bb83a9b4cbf428d75c90d06f79df211fa176d291f3864c1e77df",
"variant": "debug"
},
"cpython-3.11.13+debug-linux-armv7-gnueabihf": {
@@ -16331,8 +16331,8 @@
"minor": 11,
"patch": 13,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.11.13%2B20250708-armv7-unknown-linux-gnueabihf-debug-full.tar.zst",
- "sha256": "3d091a03c7d5fb47ac6050bffff371ce3904978ca3dc3c49f2bfacdc6b434a1d",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.11.13%2B20250712-armv7-unknown-linux-gnueabihf-debug-full.tar.zst",
+ "sha256": "c7f9429f877d9e78a1b7e71c83b2beea38a727f239899ed325b3648e4e4cc1bf",
"variant": "debug"
},
"cpython-3.11.13+debug-linux-powerpc64le-gnu": {
@@ -16347,8 +16347,8 @@
"minor": 11,
"patch": 13,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.11.13%2B20250708-ppc64le-unknown-linux-gnu-debug-full.tar.zst",
- "sha256": "5c2be36a8aa027761b6c5da5bc4bb7ef92c6a8fa70a166f45fcc6f1c8b78330c",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.11.13%2B20250712-ppc64le-unknown-linux-gnu-debug-full.tar.zst",
+ "sha256": "1f47dd100661489bf86befae148ce290009b91a7b62994f087136916ba4cfe4f",
"variant": "debug"
},
"cpython-3.11.13+debug-linux-riscv64-gnu": {
@@ -16363,8 +16363,8 @@
"minor": 11,
"patch": 13,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.11.13%2B20250708-riscv64-unknown-linux-gnu-debug-full.tar.zst",
- "sha256": "609cd34b0f86f576eec2e55a917d07e4d322e2c58309d6ae2243470207ed369b",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.11.13%2B20250712-riscv64-unknown-linux-gnu-debug-full.tar.zst",
+ "sha256": "47c5cae609e683e59bf6aff225c06216305b939374476a4cf796d65888a00436",
"variant": "debug"
},
"cpython-3.11.13+debug-linux-s390x-gnu": {
@@ -16379,8 +16379,8 @@
"minor": 11,
"patch": 13,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.11.13%2B20250708-s390x-unknown-linux-gnu-debug-full.tar.zst",
- "sha256": "dd849e7e5308066f03d1f2be307cdfd95d5c815aec9dc743bf53c98731005cd5",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.11.13%2B20250712-s390x-unknown-linux-gnu-debug-full.tar.zst",
+ "sha256": "7c16d22e0eeddfec0275f413ccca73c62ba55736230e889e5e78213e456bae1c",
"variant": "debug"
},
"cpython-3.11.13+debug-linux-x86_64-gnu": {
@@ -16395,8 +16395,8 @@
"minor": 11,
"patch": 13,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.11.13%2B20250708-x86_64-unknown-linux-gnu-debug-full.tar.zst",
- "sha256": "6ed2ab536fce32ba93ddf3ea572c92aee3a5c12575f9096defbab858011a9810",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.11.13%2B20250712-x86_64-unknown-linux-gnu-debug-full.tar.zst",
+ "sha256": "22b0309a7232568c054790a23979f490143c2a65f5b4638b52ebfa2e02ad7b20",
"variant": "debug"
},
"cpython-3.11.13+debug-linux-x86_64-musl": {
@@ -16411,8 +16411,8 @@
"minor": 11,
"patch": 13,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.11.13%2B20250708-x86_64-unknown-linux-musl-debug-full.tar.zst",
- "sha256": "a4df9df180fa29800467eef491b3d22019aec3eca8160f9babd27b24cf6ebf39",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.11.13%2B20250712-x86_64-unknown-linux-musl-debug-full.tar.zst",
+ "sha256": "6a3c83db95e39a68ace7515787be03e77993f023bb0c908eaed4cf79480f24d4",
"variant": "debug"
},
"cpython-3.11.13+debug-linux-x86_64_v2-gnu": {
@@ -16427,8 +16427,8 @@
"minor": 11,
"patch": 13,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.11.13%2B20250708-x86_64_v2-unknown-linux-gnu-debug-full.tar.zst",
- "sha256": "b27f28286c97e589521c496fe327e940c5ab99a406d652fe470008c2a525a159",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.11.13%2B20250712-x86_64_v2-unknown-linux-gnu-debug-full.tar.zst",
+ "sha256": "0d7a5be35f70db94f151656a912fd66e0c001c515969007906b3f97c3fe46364",
"variant": "debug"
},
"cpython-3.11.13+debug-linux-x86_64_v2-musl": {
@@ -16443,8 +16443,8 @@
"minor": 11,
"patch": 13,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.11.13%2B20250708-x86_64_v2-unknown-linux-musl-debug-full.tar.zst",
- "sha256": "9ffcf6f5b69805c47fb39c43810030cf1ff0fefab4b858734da75130f2184f7e",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.11.13%2B20250712-x86_64_v2-unknown-linux-musl-debug-full.tar.zst",
+ "sha256": "7c4ae94fe3f488027f1a97f304ef4dbe2d83f4b97381b5d6dd5552ce01065027",
"variant": "debug"
},
"cpython-3.11.13+debug-linux-x86_64_v3-gnu": {
@@ -16459,8 +16459,8 @@
"minor": 11,
"patch": 13,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.11.13%2B20250708-x86_64_v3-unknown-linux-gnu-debug-full.tar.zst",
- "sha256": "c546e8dc6d21eb9e3fc8a849b67fe5564ebd69456c800e1e9ba685a6450e1db3",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.11.13%2B20250712-x86_64_v3-unknown-linux-gnu-debug-full.tar.zst",
+ "sha256": "5fec7d7868079bd9107c190a3187d3bffe8e3a0214d09f8ce7fbe02788f6030d",
"variant": "debug"
},
"cpython-3.11.13+debug-linux-x86_64_v3-musl": {
@@ -16475,8 +16475,8 @@
"minor": 11,
"patch": 13,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.11.13%2B20250708-x86_64_v3-unknown-linux-musl-debug-full.tar.zst",
- "sha256": "190734e9714c4041a160d50240a1e5489fd416091bb2f4f0ae1e17e46a67f641",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.11.13%2B20250712-x86_64_v3-unknown-linux-musl-debug-full.tar.zst",
+ "sha256": "ac5f52aca1051354e336448634b8e544476198d1f8db73f0bcd6dff64267cf9e",
"variant": "debug"
},
"cpython-3.11.13+debug-linux-x86_64_v4-gnu": {
@@ -16491,8 +16491,8 @@
"minor": 11,
"patch": 13,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.11.13%2B20250708-x86_64_v4-unknown-linux-gnu-debug-full.tar.zst",
- "sha256": "629c39a382faed464041836b9299a2f3159e3cc5d07844f5cb5be8d579898166",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.11.13%2B20250712-x86_64_v4-unknown-linux-gnu-debug-full.tar.zst",
+ "sha256": "467cee90b4081db0ddfef98e213bf9b69355068c2899853c7cf38bea44661fd5",
"variant": "debug"
},
"cpython-3.11.13+debug-linux-x86_64_v4-musl": {
@@ -16507,8 +16507,8 @@
"minor": 11,
"patch": 13,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.11.13%2B20250708-x86_64_v4-unknown-linux-musl-debug-full.tar.zst",
- "sha256": "7909d1992f8bc7346b081f46a0d4c37e7ccabd041a947d89c17caa1cc497007b",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.11.13%2B20250712-x86_64_v4-unknown-linux-musl-debug-full.tar.zst",
+ "sha256": "1ac6812cca22b1d3c70b932d5f6f6da0bc693a532e78132661f856bafcd40e2b",
"variant": "debug"
},
"cpython-3.11.12-darwin-aarch64-none": {
@@ -20299,8 +20299,8 @@
"minor": 10,
"patch": 18,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.10.18%2B20250708-aarch64-apple-darwin-install_only_stripped.tar.gz",
- "sha256": "5076f23af532e6225b85106393a092c1e43c67605f5038a2687efe2608e999b0",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.10.18%2B20250712-aarch64-apple-darwin-install_only_stripped.tar.gz",
+ "sha256": "73939b9c93d50163cd0f1af8b3ce751c941a3a8d6eba9c08edcc9235dc5888c7",
"variant": null
},
"cpython-3.10.18-darwin-x86_64-none": {
@@ -20315,8 +20315,8 @@
"minor": 10,
"patch": 18,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.10.18%2B20250708-x86_64-apple-darwin-install_only_stripped.tar.gz",
- "sha256": "8e9436c3aec957de1e79fd670b7c7801ad59f174a178a7e92964e4642ade8eda",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.10.18%2B20250712-x86_64-apple-darwin-install_only_stripped.tar.gz",
+ "sha256": "1ba1523d81d042a516068b98ded99d3490d3f4bb6c214fc468b62dadde88e5ac",
"variant": null
},
"cpython-3.10.18-linux-aarch64-gnu": {
@@ -20331,8 +20331,8 @@
"minor": 10,
"patch": 18,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.10.18%2B20250708-aarch64-unknown-linux-gnu-install_only_stripped.tar.gz",
- "sha256": "9e7581dc4e6e75135650551040d1ad9529bb1b7b2b6c2dbf9b80483507284a50",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.10.18%2B20250712-aarch64-unknown-linux-gnu-install_only_stripped.tar.gz",
+ "sha256": "54c490a7f22ac03171334e5265081ca90d75ca0525b154b001f0ee96ad961c18",
"variant": null
},
"cpython-3.10.18-linux-armv7-gnueabi": {
@@ -20347,8 +20347,8 @@
"minor": 10,
"patch": 18,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.10.18%2B20250708-armv7-unknown-linux-gnueabi-install_only_stripped.tar.gz",
- "sha256": "973db52fb00257045a4d3ea13c59c50588bc6f708b0a0230a2adb2154f710009",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.10.18%2B20250712-armv7-unknown-linux-gnueabi-install_only_stripped.tar.gz",
+ "sha256": "56ca1369651cb56221053676d206aa675ee91ddad5de71cb8de7e357f213ff59",
"variant": null
},
"cpython-3.10.18-linux-armv7-gnueabihf": {
@@ -20363,8 +20363,8 @@
"minor": 10,
"patch": 18,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.10.18%2B20250708-armv7-unknown-linux-gnueabihf-install_only_stripped.tar.gz",
- "sha256": "87368650aa19e173da8b365231f75f1584f2d9e8b95d763b9c47f7fc053a644a",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.10.18%2B20250712-armv7-unknown-linux-gnueabihf-install_only_stripped.tar.gz",
+ "sha256": "eacff45758c90b3cdd4456a31b1217d665e122df8b5a0b8b238efcc59b8d8867",
"variant": null
},
"cpython-3.10.18-linux-powerpc64le-gnu": {
@@ -20379,8 +20379,8 @@
"minor": 10,
"patch": 18,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.10.18%2B20250708-ppc64le-unknown-linux-gnu-install_only_stripped.tar.gz",
- "sha256": "cc3079246949bcef9be0118f58e6713fc8af2ba49927db015bc6f4d8fca6ab26",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.10.18%2B20250712-ppc64le-unknown-linux-gnu-install_only_stripped.tar.gz",
+ "sha256": "6e4180591050ec321a76ac278f9eab9c80017136293ce965229f3cbea3a1a855",
"variant": null
},
"cpython-3.10.18-linux-riscv64-gnu": {
@@ -20395,8 +20395,8 @@
"minor": 10,
"patch": 18,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.10.18%2B20250708-riscv64-unknown-linux-gnu-install_only_stripped.tar.gz",
- "sha256": "73c6d8cf8eb865595ef232f5bb7d7a55cb0c861e2ee72a6b23e61409010bf6ee",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.10.18%2B20250712-riscv64-unknown-linux-gnu-install_only_stripped.tar.gz",
+ "sha256": "ef176d45d3199989df3563e8a578fb00084190fa139ecc752debdee7d9acc77d",
"variant": null
},
"cpython-3.10.18-linux-s390x-gnu": {
@@ -20411,8 +20411,8 @@
"minor": 10,
"patch": 18,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.10.18%2B20250708-s390x-unknown-linux-gnu-install_only_stripped.tar.gz",
- "sha256": "668f8d911eec50bdd36996f3c0c098255fd90360e83d73efc383c136a93cbd30",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.10.18%2B20250712-s390x-unknown-linux-gnu-install_only_stripped.tar.gz",
+ "sha256": "f744cbebf0cc0236fd234aa99ae799105ed2edb0a01cf3fe9991d6dd85bd157c",
"variant": null
},
"cpython-3.10.18-linux-x86_64-gnu": {
@@ -20427,8 +20427,8 @@
"minor": 10,
"patch": 18,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.10.18%2B20250708-x86_64-unknown-linux-gnu-install_only_stripped.tar.gz",
- "sha256": "c6e79f2c78b893339c4fbb4f337647f5e14d491ca2c05ecec8f78187bfd9480c",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.10.18%2B20250712-x86_64-unknown-linux-gnu-install_only_stripped.tar.gz",
+ "sha256": "ba282bc7e494c38c7f5483437fd1108e1d55f0b24effb3eb5b28e03966667d7c",
"variant": null
},
"cpython-3.10.18-linux-x86_64-musl": {
@@ -20443,8 +20443,8 @@
"minor": 10,
"patch": 18,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.10.18%2B20250708-x86_64-unknown-linux-musl-install_only_stripped.tar.gz",
- "sha256": "cb6f4ea6cb5eef904d5a8fb4bcfee77bc34bca4946f8a12bab70c103f503f676",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.10.18%2B20250712-x86_64-unknown-linux-musl-install_only_stripped.tar.gz",
+ "sha256": "0502186e5ccc85134a2c7d11913198eb5319477da1702deb5d4b89c3f692b166",
"variant": null
},
"cpython-3.10.18-linux-x86_64_v2-gnu": {
@@ -20459,8 +20459,8 @@
"minor": 10,
"patch": 18,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.10.18%2B20250708-x86_64_v2-unknown-linux-gnu-install_only_stripped.tar.gz",
- "sha256": "dbc05eadb1cdf504718688bb29367ab16fc0868c3b873031ea49b85e919a3bee",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.10.18%2B20250712-x86_64_v2-unknown-linux-gnu-install_only_stripped.tar.gz",
+ "sha256": "ddd7ff4a13131c29011dd508d2f398c95977dc5c055be891835a3aa12df7acfa",
"variant": null
},
"cpython-3.10.18-linux-x86_64_v2-musl": {
@@ -20475,8 +20475,8 @@
"minor": 10,
"patch": 18,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.10.18%2B20250708-x86_64_v2-unknown-linux-musl-install_only_stripped.tar.gz",
- "sha256": "5c7ac0653d42d1ab391fec12c1f1f1d940c7ebe20013979d91d4651c3fcb62b9",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.10.18%2B20250712-x86_64_v2-unknown-linux-musl-install_only_stripped.tar.gz",
+ "sha256": "feb3d0c6ddfa959948321d6ac3de32d5cde32fe50135862c65165c9415cafedf",
"variant": null
},
"cpython-3.10.18-linux-x86_64_v3-gnu": {
@@ -20491,8 +20491,8 @@
"minor": 10,
"patch": 18,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.10.18%2B20250708-x86_64_v3-unknown-linux-gnu-install_only_stripped.tar.gz",
- "sha256": "1199924aba81e7475479b9e709e91f5cbb5cf3dc269cc0c30c27cf25cbfe8f01",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.10.18%2B20250712-x86_64_v3-unknown-linux-gnu-install_only_stripped.tar.gz",
+ "sha256": "69c634bf5c979ca3d6fac7e5a34613915e55fc6671bfb0dee7470f3960a649ee",
"variant": null
},
"cpython-3.10.18-linux-x86_64_v3-musl": {
@@ -20507,8 +20507,8 @@
"minor": 10,
"patch": 18,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.10.18%2B20250708-x86_64_v3-unknown-linux-musl-install_only_stripped.tar.gz",
- "sha256": "66a78c15f1f2cd0cfd0196edf323bdffe77481e6904751e125d4db23db78bad0",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.10.18%2B20250712-x86_64_v3-unknown-linux-musl-install_only_stripped.tar.gz",
+ "sha256": "dbe2e101bb60277ef0f9354b7f0b1aaa85b07dec3a12ca72ae133baa080deeca",
"variant": null
},
"cpython-3.10.18-linux-x86_64_v4-gnu": {
@@ -20523,8 +20523,8 @@
"minor": 10,
"patch": 18,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.10.18%2B20250708-x86_64_v4-unknown-linux-gnu-install_only_stripped.tar.gz",
- "sha256": "7c0aaa49f3a5b15689ae43d6cd4f418732ee95070aaa96dabf968bb3ac45b29e",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.10.18%2B20250712-x86_64_v4-unknown-linux-gnu-install_only_stripped.tar.gz",
+ "sha256": "a6b2530a580061eb9d08168ac5e8808b8df1d2e7b8dd683c424b59cc9124a3a2",
"variant": null
},
"cpython-3.10.18-linux-x86_64_v4-musl": {
@@ -20539,8 +20539,8 @@
"minor": 10,
"patch": 18,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.10.18%2B20250708-x86_64_v4-unknown-linux-musl-install_only_stripped.tar.gz",
- "sha256": "b14649f4bdb22cf8b2c3656034687b9854f0ad0489018a65a1d44e886a000e96",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.10.18%2B20250712-x86_64_v4-unknown-linux-musl-install_only_stripped.tar.gz",
+ "sha256": "3a2abc86a8e740d4e7dddcd697781630d9d9e6ce538095b43a4789a531f8239b",
"variant": null
},
"cpython-3.10.18-windows-i686-none": {
@@ -20555,8 +20555,8 @@
"minor": 10,
"patch": 18,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.10.18%2B20250708-i686-pc-windows-msvc-install_only_stripped.tar.gz",
- "sha256": "e633c5093644502c477ba2391bde9bf23fb5d695aaa7de0e727b363592d81edf",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.10.18%2B20250712-i686-pc-windows-msvc-install_only_stripped.tar.gz",
+ "sha256": "1326fb50a7f39ff80b338a95c47acbeda30f484ee28ff168c3e395320345ee01",
"variant": null
},
"cpython-3.10.18-windows-x86_64-none": {
@@ -20571,8 +20571,8 @@
"minor": 10,
"patch": 18,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.10.18%2B20250708-x86_64-pc-windows-msvc-install_only_stripped.tar.gz",
- "sha256": "9b168333744e676d221d0e47b73328e38a78a080bbeff009db72d0eae201a3a7",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.10.18%2B20250712-x86_64-pc-windows-msvc-install_only_stripped.tar.gz",
+ "sha256": "0dec10054eefa76d4e47e8f53d9993e51a6d76252d9f8e5162b1b9805e6ffc20",
"variant": null
},
"cpython-3.10.18+debug-linux-aarch64-gnu": {
@@ -20587,8 +20587,8 @@
"minor": 10,
"patch": 18,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.10.18%2B20250708-aarch64-unknown-linux-gnu-debug-full.tar.zst",
- "sha256": "2997824229577882eb7f0000118c93d0fb12f97bee10bd7c41ed46b7123c6d5d",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.10.18%2B20250712-aarch64-unknown-linux-gnu-debug-full.tar.zst",
+ "sha256": "ed4d68544efef0d7c158c4464d8e3b4407a02e2ea014e76dfa65fddfd49384af",
"variant": "debug"
},
"cpython-3.10.18+debug-linux-armv7-gnueabi": {
@@ -20603,8 +20603,8 @@
"minor": 10,
"patch": 18,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.10.18%2B20250708-armv7-unknown-linux-gnueabi-debug-full.tar.zst",
- "sha256": "5650962a60d540d9a71b6af917f78386ae69f4368f9b3537828b8368400aee8f",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.10.18%2B20250712-armv7-unknown-linux-gnueabi-debug-full.tar.zst",
+ "sha256": "39fdc60b2645262ef658ebbf5edfaffd655524855d3aa35bfb05a149a271e4f5",
"variant": "debug"
},
"cpython-3.10.18+debug-linux-armv7-gnueabihf": {
@@ -20619,8 +20619,8 @@
"minor": 10,
"patch": 18,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.10.18%2B20250708-armv7-unknown-linux-gnueabihf-debug-full.tar.zst",
- "sha256": "891540ab2a6e2534115787c95e06111176c2630dc261bad2169251924ec41fc6",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.10.18%2B20250712-armv7-unknown-linux-gnueabihf-debug-full.tar.zst",
+ "sha256": "cf0c02ab4b46c9b6a0854e5bd9da9b322d8d91ae5803190b798ff15cb25ab153",
"variant": "debug"
},
"cpython-3.10.18+debug-linux-powerpc64le-gnu": {
@@ -20635,8 +20635,8 @@
"minor": 10,
"patch": 18,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.10.18%2B20250708-ppc64le-unknown-linux-gnu-debug-full.tar.zst",
- "sha256": "7266278b47151f48b7b57790cda43aeb12bb1a776711fbb552a60ace2d9e68fc",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.10.18%2B20250712-ppc64le-unknown-linux-gnu-debug-full.tar.zst",
+ "sha256": "e9f346d7fa001e85cea92cf027b924c2095d54f7db297287b2df550f04e6c304",
"variant": "debug"
},
"cpython-3.10.18+debug-linux-riscv64-gnu": {
@@ -20651,8 +20651,8 @@
"minor": 10,
"patch": 18,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.10.18%2B20250708-riscv64-unknown-linux-gnu-debug-full.tar.zst",
- "sha256": "ba07bece860b8f98da3740860f4e91de18d0e05a30f1970203f0d5f98489210c",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.10.18%2B20250712-riscv64-unknown-linux-gnu-debug-full.tar.zst",
+ "sha256": "c11eba8055c7bb643f55694fb1828d8d13e4ade2cb3ec60d8d9bb38fbf7500d8",
"variant": "debug"
},
"cpython-3.10.18+debug-linux-s390x-gnu": {
@@ -20667,8 +20667,8 @@
"minor": 10,
"patch": 18,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.10.18%2B20250708-s390x-unknown-linux-gnu-debug-full.tar.zst",
- "sha256": "217a35c1c9ef9bfef37970587245ce06c3e63f92322b083e0baa7da2a82587cf",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.10.18%2B20250712-s390x-unknown-linux-gnu-debug-full.tar.zst",
+ "sha256": "c7b407062dc86e011c2e3d8f5f0e1db8d8eac3124e4d0b597f561d7f7b2a8723",
"variant": "debug"
},
"cpython-3.10.18+debug-linux-x86_64-gnu": {
@@ -20683,8 +20683,8 @@
"minor": 10,
"patch": 18,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.10.18%2B20250708-x86_64-unknown-linux-gnu-debug-full.tar.zst",
- "sha256": "1d485c1882d0ecefe858ef8db3864fb6b91a938941f3d7350c06f3b6a03734db",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.10.18%2B20250712-x86_64-unknown-linux-gnu-debug-full.tar.zst",
+ "sha256": "1ba2a0159629d92207966cbf2038774afd0f78cc59e94efb8a86e88a32563bdd",
"variant": "debug"
},
"cpython-3.10.18+debug-linux-x86_64-musl": {
@@ -20699,8 +20699,8 @@
"minor": 10,
"patch": 18,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.10.18%2B20250708-x86_64-unknown-linux-musl-debug-full.tar.zst",
- "sha256": "cdbead37d85fff493e6eb3e6adf3d6935a721315b4711666db56d157e796396b",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.10.18%2B20250712-x86_64-unknown-linux-musl-debug-full.tar.zst",
+ "sha256": "ebee02e3380e50e394962697dc4d4c845f60ac356da88f671be563ef0dafaa9b",
"variant": "debug"
},
"cpython-3.10.18+debug-linux-x86_64_v2-gnu": {
@@ -20715,8 +20715,8 @@
"minor": 10,
"patch": 18,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.10.18%2B20250708-x86_64_v2-unknown-linux-gnu-debug-full.tar.zst",
- "sha256": "5ae93dac6ae65c7f13c355ce1fe28b78a0a9b272c428bb27f5dbf2a357275bc2",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.10.18%2B20250712-x86_64_v2-unknown-linux-gnu-debug-full.tar.zst",
+ "sha256": "4de984931af2c4a2b18139ff123843671c5037900524065c2fef26ff3d1a5771",
"variant": "debug"
},
"cpython-3.10.18+debug-linux-x86_64_v2-musl": {
@@ -20731,8 +20731,8 @@
"minor": 10,
"patch": 18,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.10.18%2B20250708-x86_64_v2-unknown-linux-musl-debug-full.tar.zst",
- "sha256": "a588754cd0e959123c5beedd1d50cc849f8c3bed4908174a6f55730951a10241",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.10.18%2B20250712-x86_64_v2-unknown-linux-musl-debug-full.tar.zst",
+ "sha256": "fd97d5565e0fb98ad78db65f107789e287f84c53f4d9f3ccb37fdd5f3849288b",
"variant": "debug"
},
"cpython-3.10.18+debug-linux-x86_64_v3-gnu": {
@@ -20747,8 +20747,8 @@
"minor": 10,
"patch": 18,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.10.18%2B20250708-x86_64_v3-unknown-linux-gnu-debug-full.tar.zst",
- "sha256": "65976255591b39e428ae750050e398521a32bcdefb96053dd2cf9007165411da",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.10.18%2B20250712-x86_64_v3-unknown-linux-gnu-debug-full.tar.zst",
+ "sha256": "ea450da681ab3fdef0da5181d90ebff7331ce1f7f827bb3b56657badc4127fad",
"variant": "debug"
},
"cpython-3.10.18+debug-linux-x86_64_v3-musl": {
@@ -20763,8 +20763,8 @@
"minor": 10,
"patch": 18,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.10.18%2B20250708-x86_64_v3-unknown-linux-musl-debug-full.tar.zst",
- "sha256": "fc8ba366396b3e6b5aca7e3ba449ad094350a533f31a0c99c6ed1ac0d41ef7d2",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.10.18%2B20250712-x86_64_v3-unknown-linux-musl-debug-full.tar.zst",
+ "sha256": "ff9fe8b880460ce9529db369e2becca20a7e6a042df2deba2277e35c5cdcd35a",
"variant": "debug"
},
"cpython-3.10.18+debug-linux-x86_64_v4-gnu": {
@@ -20779,8 +20779,8 @@
"minor": 10,
"patch": 18,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.10.18%2B20250708-x86_64_v4-unknown-linux-gnu-debug-full.tar.zst",
- "sha256": "2bf6024c48b82b667dc3bab77d9ff143ac3983e75be94c32cdc22b9cd7e50d15",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.10.18%2B20250712-x86_64_v4-unknown-linux-gnu-debug-full.tar.zst",
+ "sha256": "c1a1d9661cf1d45096478fefd1e70ff6d0cbc419194cf094414d24fa336f5116",
"variant": "debug"
},
"cpython-3.10.18+debug-linux-x86_64_v4-musl": {
@@ -20795,8 +20795,8 @@
"minor": 10,
"patch": 18,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.10.18%2B20250708-x86_64_v4-unknown-linux-musl-debug-full.tar.zst",
- "sha256": "41696205b706ea5b0ef89eefd695bfe87f44dae57f9318711892b1ceb144cff7",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.10.18%2B20250712-x86_64_v4-unknown-linux-musl-debug-full.tar.zst",
+ "sha256": "2bf809a85ffc45a37b32d5107f1a3ee8a6d12f07bb5fd3ad26ba16501418a8a7",
"variant": "debug"
},
"cpython-3.10.17-darwin-aarch64-none": {
@@ -25739,8 +25739,8 @@
"minor": 9,
"patch": 23,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.9.23%2B20250708-aarch64-apple-darwin-install_only_stripped.tar.gz",
- "sha256": "aff1156fa5be26caf1ac2d4029936eb9379dc4351bb1d32d2120b10f2ba61747",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.9.23%2B20250712-aarch64-apple-darwin-install_only_stripped.tar.gz",
+ "sha256": "3ab0d1885fee62dadc1123f0b23814e51b6abe5dcf6182a0c9af6cfc69764741",
"variant": null
},
"cpython-3.9.23-darwin-x86_64-none": {
@@ -25755,8 +25755,8 @@
"minor": 9,
"patch": 23,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.9.23%2B20250708-x86_64-apple-darwin-install_only_stripped.tar.gz",
- "sha256": "9de5325065b159e3e7daa53c133126df6b3eeed2316176d84e7761b01d16ba7f",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.9.23%2B20250712-x86_64-apple-darwin-install_only_stripped.tar.gz",
+ "sha256": "0fbb8bcc5d203b83ba1e63f9b8b1debe9162c22dd0f7481543f310b298255d6a",
"variant": null
},
"cpython-3.9.23-linux-aarch64-gnu": {
@@ -25771,8 +25771,8 @@
"minor": 9,
"patch": 23,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.9.23%2B20250708-aarch64-unknown-linux-gnu-install_only_stripped.tar.gz",
- "sha256": "51fe6b026253b9f9c83205d1907572d7618ea47216e40a351d30eaa55f879c3e",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.9.23%2B20250712-aarch64-unknown-linux-gnu-install_only_stripped.tar.gz",
+ "sha256": "da2e4a73d7318241031d87da2acb7da99070f94d715b8c9f8c973a5d586b20a6",
"variant": null
},
"cpython-3.9.23-linux-armv7-gnueabi": {
@@ -25787,8 +25787,8 @@
"minor": 9,
"patch": 23,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.9.23%2B20250708-armv7-unknown-linux-gnueabi-install_only_stripped.tar.gz",
- "sha256": "1faeec85e15cd17acb90683bc42cc8bccdb5250816501863d3407713deb6215e",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.9.23%2B20250712-armv7-unknown-linux-gnueabi-install_only_stripped.tar.gz",
+ "sha256": "41599a37d0f6fa48b44183d15a7c98a299839b83fa28774ff3f01d28500da9a6",
"variant": null
},
"cpython-3.9.23-linux-armv7-gnueabihf": {
@@ -25803,8 +25803,8 @@
"minor": 9,
"patch": 23,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.9.23%2B20250708-armv7-unknown-linux-gnueabihf-install_only_stripped.tar.gz",
- "sha256": "08261e7a2328c989409a7f0f4574bfca84adfab7e5db6556209642ebba55de5e",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.9.23%2B20250712-armv7-unknown-linux-gnueabihf-install_only_stripped.tar.gz",
+ "sha256": "2263daa7d9cda3e53449091dc86aa7931409721031bad1a1a160b214777c5cd6",
"variant": null
},
"cpython-3.9.23-linux-powerpc64le-gnu": {
@@ -25819,8 +25819,8 @@
"minor": 9,
"patch": 23,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.9.23%2B20250708-ppc64le-unknown-linux-gnu-install_only_stripped.tar.gz",
- "sha256": "2ab4c6c616b23b2220829420028f90d0aa4f767ae60fcdf5d2edff08644bb5af",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.9.23%2B20250712-ppc64le-unknown-linux-gnu-install_only_stripped.tar.gz",
+ "sha256": "fc068ac5cf5e4effc74e2b63e34c2618e5a838737a19ca8f7f17cc2f10e44f26",
"variant": null
},
"cpython-3.9.23-linux-riscv64-gnu": {
@@ -25835,8 +25835,8 @@
"minor": 9,
"patch": 23,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.9.23%2B20250708-riscv64-unknown-linux-gnu-install_only_stripped.tar.gz",
- "sha256": "bea6c21421b016ca03e786f0fb91a03cc9d3f39aa8069785632efe3666e90df5",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.9.23%2B20250712-riscv64-unknown-linux-gnu-install_only_stripped.tar.gz",
+ "sha256": "5475f1106abed1b1163fa7964f8f8e834cbdafc26ddb9ab79cc5c10fb8110457",
"variant": null
},
"cpython-3.9.23-linux-s390x-gnu": {
@@ -25851,8 +25851,8 @@
"minor": 9,
"patch": 23,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.9.23%2B20250708-s390x-unknown-linux-gnu-install_only_stripped.tar.gz",
- "sha256": "6715a5b8af51e76929c1f7a81c9085053243d2b4025bac29f8ec18301766d795",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.9.23%2B20250712-s390x-unknown-linux-gnu-install_only_stripped.tar.gz",
+ "sha256": "2d571c79b0722488b4980badb163ebd83e48b02b5a125239c67239df8dd37476",
"variant": null
},
"cpython-3.9.23-linux-x86_64-gnu": {
@@ -25867,8 +25867,8 @@
"minor": 9,
"patch": 23,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.9.23%2B20250708-x86_64-unknown-linux-gnu-install_only_stripped.tar.gz",
- "sha256": "ad39b79d0168f0f7cc5dbe14d99ff8d1068077f15cc2b03456fe3364630157e8",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.9.23%2B20250712-x86_64-unknown-linux-gnu-install_only_stripped.tar.gz",
+ "sha256": "7932256affbd8fe7e055fb54715dae47e4557919bfe84bb8f33260a7a792633a",
"variant": null
},
"cpython-3.9.23-linux-x86_64-musl": {
@@ -25883,8 +25883,8 @@
"minor": 9,
"patch": 23,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.9.23%2B20250708-x86_64-unknown-linux-musl-install_only_stripped.tar.gz",
- "sha256": "977af02740232123c385e7f8e70eb8acdcf8ffd4126526f9d3d8cb1bd20fd669",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.9.23%2B20250712-x86_64-unknown-linux-musl-install_only_stripped.tar.gz",
+ "sha256": "64c4bb8c76b50f264a6900f3391156efd0c39ad75447f1b561aa0b150069e361",
"variant": null
},
"cpython-3.9.23-linux-x86_64_v2-gnu": {
@@ -25899,8 +25899,8 @@
"minor": 9,
"patch": 23,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.9.23%2B20250708-x86_64_v2-unknown-linux-gnu-install_only_stripped.tar.gz",
- "sha256": "ffbb92f9213591ab7b253c89d34218c3adab25327668b89bc6120038cc2b0a37",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.9.23%2B20250712-x86_64_v2-unknown-linux-gnu-install_only_stripped.tar.gz",
+ "sha256": "c2bdab1548c60ed0bda4c69bea6dd17569c1d681065ed5ec5395175ed165f47a",
"variant": null
},
"cpython-3.9.23-linux-x86_64_v2-musl": {
@@ -25915,8 +25915,8 @@
"minor": 9,
"patch": 23,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.9.23%2B20250708-x86_64_v2-unknown-linux-musl-install_only_stripped.tar.gz",
- "sha256": "e53121074856e6ef4e8f3a865c2848d4287431a1d0ceef21fd389cc39649f917",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.9.23%2B20250712-x86_64_v2-unknown-linux-musl-install_only_stripped.tar.gz",
+ "sha256": "61b59f2c19575acd088e1d63ca95e810e8e2b1af20f37d7acebf90f864c22ca4",
"variant": null
},
"cpython-3.9.23-linux-x86_64_v3-gnu": {
@@ -25931,8 +25931,8 @@
"minor": 9,
"patch": 23,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.9.23%2B20250708-x86_64_v3-unknown-linux-gnu-install_only_stripped.tar.gz",
- "sha256": "1856f202d42555e8e8709db0291bbfac5a896724734314746ef20c014cca8552",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.9.23%2B20250712-x86_64_v3-unknown-linux-gnu-install_only_stripped.tar.gz",
+ "sha256": "f791037703a7370783c853bb406034532599ff561dfbf5bc67d44323d131b3c3",
"variant": null
},
"cpython-3.9.23-linux-x86_64_v3-musl": {
@@ -25947,8 +25947,8 @@
"minor": 9,
"patch": 23,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.9.23%2B20250708-x86_64_v3-unknown-linux-musl-install_only_stripped.tar.gz",
- "sha256": "94f94fa20477b5088a147936c565c2b0a5a18e353d954ad6bbd5048e933d9a67",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.9.23%2B20250712-x86_64_v3-unknown-linux-musl-install_only_stripped.tar.gz",
+ "sha256": "88c3ad43158942c232039752e4d269cd89e282795e4c7f863f76f3e307b852f4",
"variant": null
},
"cpython-3.9.23-linux-x86_64_v4-gnu": {
@@ -25963,8 +25963,8 @@
"minor": 9,
"patch": 23,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.9.23%2B20250708-x86_64_v4-unknown-linux-gnu-install_only_stripped.tar.gz",
- "sha256": "55209fe80fac7837837c5b4d310e71e1de822ca413465bf7589fabae5dd9ba7a",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.9.23%2B20250712-x86_64_v4-unknown-linux-gnu-install_only_stripped.tar.gz",
+ "sha256": "0a71dcb46a9ff949f7672f65090d210ee79d80846f10629e3f234eb7f5fe58e8",
"variant": null
},
"cpython-3.9.23-linux-x86_64_v4-musl": {
@@ -25979,8 +25979,8 @@
"minor": 9,
"patch": 23,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.9.23%2B20250708-x86_64_v4-unknown-linux-musl-install_only_stripped.tar.gz",
- "sha256": "41e1237774abf02a8c3b33c365d959ba8529f6a845d93789e3fe7ba4203fb8c2",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.9.23%2B20250712-x86_64_v4-unknown-linux-musl-install_only_stripped.tar.gz",
+ "sha256": "cd574a9a36a729aa964e1c52bb3084a36350d905c4d16427d85dd3f80e1b3dcd",
"variant": null
},
"cpython-3.9.23-windows-i686-none": {
@@ -25995,8 +25995,8 @@
"minor": 9,
"patch": 23,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.9.23%2B20250708-i686-pc-windows-msvc-install_only_stripped.tar.gz",
- "sha256": "f8d558d6d260cc970f02e04f5b6555acd5148b1b2bef25d2c945ab2b8dfd3ce2",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.9.23%2B20250712-i686-pc-windows-msvc-install_only_stripped.tar.gz",
+ "sha256": "f5b6a6185ed80463160cbd95e520d8d741873736d816ac314d3e08d61f4df222",
"variant": null
},
"cpython-3.9.23-windows-x86_64-none": {
@@ -26011,8 +26011,8 @@
"minor": 9,
"patch": 23,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.9.23%2B20250708-x86_64-pc-windows-msvc-install_only_stripped.tar.gz",
- "sha256": "3a150e1126b1b7645a95ba06992d886cd03dab524d7c2660bd94bcf51f499fa1",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.9.23%2B20250712-x86_64-pc-windows-msvc-install_only_stripped.tar.gz",
+ "sha256": "a8f80f8da7901fba2b271cdc5351a79b3d12fd95ee50cc4fe78410dc693eb150",
"variant": null
},
"cpython-3.9.23+debug-linux-aarch64-gnu": {
@@ -26027,8 +26027,8 @@
"minor": 9,
"patch": 23,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.9.23%2B20250708-aarch64-unknown-linux-gnu-debug-full.tar.zst",
- "sha256": "51cfb2db5abdd1e10d2998289fbf3235352a61b4b6a3ef8ac4fbf4252ae09c78",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.9.23%2B20250712-aarch64-unknown-linux-gnu-debug-full.tar.zst",
+ "sha256": "c00ba3d83356c187e39c9d6b1541733299a675663690dc1b49c62a152d2db191",
"variant": "debug"
},
"cpython-3.9.23+debug-linux-armv7-gnueabi": {
@@ -26043,8 +26043,8 @@
"minor": 9,
"patch": 23,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.9.23%2B20250708-armv7-unknown-linux-gnueabi-debug-full.tar.zst",
- "sha256": "369a0f68be191dbb45a3ca173c9589d77f973be3552f08225d03f5e013795d25",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.9.23%2B20250712-armv7-unknown-linux-gnueabi-debug-full.tar.zst",
+ "sha256": "eb4875c6220036fd1b40af4d885823057122d61fc60f0b2c364065259adad0cc",
"variant": "debug"
},
"cpython-3.9.23+debug-linux-armv7-gnueabihf": {
@@ -26059,8 +26059,8 @@
"minor": 9,
"patch": 23,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.9.23%2B20250708-armv7-unknown-linux-gnueabihf-debug-full.tar.zst",
- "sha256": "0821af742c0187823ae3194c53b7590e7bf0524a14b94580300391e0b13bdd8a",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.9.23%2B20250712-armv7-unknown-linux-gnueabihf-debug-full.tar.zst",
+ "sha256": "eca68cac8c0880f08de5c1bcae91ff0bd7fe64e5788a433fc182a5e037af671c",
"variant": "debug"
},
"cpython-3.9.23+debug-linux-powerpc64le-gnu": {
@@ -26075,8 +26075,8 @@
"minor": 9,
"patch": 23,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.9.23%2B20250708-ppc64le-unknown-linux-gnu-debug-full.tar.zst",
- "sha256": "45525a2d123981cb56f5fe4cd87e9bbe18c3fffe6b778313e8ef76f864315513",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.9.23%2B20250712-ppc64le-unknown-linux-gnu-debug-full.tar.zst",
+ "sha256": "5ffc8d84b6098cfa5e2e3aaedcc3e130809d5caa1958d5155995ed3df15d8cc7",
"variant": "debug"
},
"cpython-3.9.23+debug-linux-riscv64-gnu": {
@@ -26091,8 +26091,8 @@
"minor": 9,
"patch": 23,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.9.23%2B20250708-riscv64-unknown-linux-gnu-debug-full.tar.zst",
- "sha256": "9280d5f805d1f1ff992657af852a343f90cdaf7ef40287b55f48a73e409a4fe3",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.9.23%2B20250712-riscv64-unknown-linux-gnu-debug-full.tar.zst",
+ "sha256": "d7f38d5539d7a0b15ce6071ba3290ce1a4ac2da3bd490d023b4d7b36c6c33c89",
"variant": "debug"
},
"cpython-3.9.23+debug-linux-s390x-gnu": {
@@ -26107,8 +26107,8 @@
"minor": 9,
"patch": 23,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.9.23%2B20250708-s390x-unknown-linux-gnu-debug-full.tar.zst",
- "sha256": "340c153709d2d428d0604802983bd017079ea95f48ccbb8877e08c87b8c93f4f",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.9.23%2B20250712-s390x-unknown-linux-gnu-debug-full.tar.zst",
+ "sha256": "14250195a8c4c42fa9b22e7ca70ac5be3fe5e0ca81239c0672043eddeb6bb96e",
"variant": "debug"
},
"cpython-3.9.23+debug-linux-x86_64-gnu": {
@@ -26123,8 +26123,8 @@
"minor": 9,
"patch": 23,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.9.23%2B20250708-x86_64-unknown-linux-gnu-debug-full.tar.zst",
- "sha256": "e63909ea5cf383db126d5af9c3ba09fc68868104cf8db265723ad1220a5fafae",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.9.23%2B20250712-x86_64-unknown-linux-gnu-debug-full.tar.zst",
+ "sha256": "846ad94f04ca8413762e6cfaee752156bbaa75f3ec030bcc235453f708e3577c",
"variant": "debug"
},
"cpython-3.9.23+debug-linux-x86_64-musl": {
@@ -26139,8 +26139,8 @@
"minor": 9,
"patch": 23,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.9.23%2B20250708-x86_64-unknown-linux-musl-debug-full.tar.zst",
- "sha256": "1f58c434a2772e136506e517e412cc450359807a32742064d9ef3ec18ae1ef3e",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.9.23%2B20250712-x86_64-unknown-linux-musl-debug-full.tar.zst",
+ "sha256": "4ef30683e0dd6a08a6ef591ab37a218baa42a7352f5c3951131538ab0ef83865",
"variant": "debug"
},
"cpython-3.9.23+debug-linux-x86_64_v2-gnu": {
@@ -26155,8 +26155,8 @@
"minor": 9,
"patch": 23,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.9.23%2B20250708-x86_64_v2-unknown-linux-gnu-debug-full.tar.zst",
- "sha256": "6702268ce25da3f547ed1f48ee20144d0cdc1db967a467f25d097f43cb52a25e",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.9.23%2B20250712-x86_64_v2-unknown-linux-gnu-debug-full.tar.zst",
+ "sha256": "8964daf898c112bc5caa9499e8d1ba4c0d82911b4c3e07044c7f5abf489b97c6",
"variant": "debug"
},
"cpython-3.9.23+debug-linux-x86_64_v2-musl": {
@@ -26171,8 +26171,8 @@
"minor": 9,
"patch": 23,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.9.23%2B20250708-x86_64_v2-unknown-linux-musl-debug-full.tar.zst",
- "sha256": "606eeb49821a06fb874527494f6493606e5f837cf56dba8235e75149ec53297b",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.9.23%2B20250712-x86_64_v2-unknown-linux-musl-debug-full.tar.zst",
+ "sha256": "868f2f3e994992a1b68eb051fa2678a2e57bbbe1fcfc9f48461b0d2d87c5b6a8",
"variant": "debug"
},
"cpython-3.9.23+debug-linux-x86_64_v3-gnu": {
@@ -26187,8 +26187,8 @@
"minor": 9,
"patch": 23,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.9.23%2B20250708-x86_64_v3-unknown-linux-gnu-debug-full.tar.zst",
- "sha256": "11dcf8d92a18e609f32750ceb758a65855505a79907302142c8b70785c5c9a03",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.9.23%2B20250712-x86_64_v3-unknown-linux-gnu-debug-full.tar.zst",
+ "sha256": "1616c6f535b6edf4160ee97b9beca8146f9cd77a4de8c240a0a3f095a09795e9",
"variant": "debug"
},
"cpython-3.9.23+debug-linux-x86_64_v3-musl": {
@@ -26203,8 +26203,8 @@
"minor": 9,
"patch": 23,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.9.23%2B20250708-x86_64_v3-unknown-linux-musl-debug-full.tar.zst",
- "sha256": "d246a1a69cee5ec4bf467fb1ea42f6218925d3047afd3817b34fc3f8ad199200",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.9.23%2B20250712-x86_64_v3-unknown-linux-musl-debug-full.tar.zst",
+ "sha256": "1f9d7987734042d04badc60686f5503eb373ea8b7b7f3ade6a58a37f7d808265",
"variant": "debug"
},
"cpython-3.9.23+debug-linux-x86_64_v4-gnu": {
@@ -26219,8 +26219,8 @@
"minor": 9,
"patch": 23,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.9.23%2B20250708-x86_64_v4-unknown-linux-gnu-debug-full.tar.zst",
- "sha256": "05b81fde271d35e97d5e411a2d9e232baa424a55c8ea6e09a15e1606c08833f4",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.9.23%2B20250712-x86_64_v4-unknown-linux-gnu-debug-full.tar.zst",
+ "sha256": "4b8f925b20b6b74c1eb48fa869ee79cde20745fb93c83776e5c71924448e7e53",
"variant": "debug"
},
"cpython-3.9.23+debug-linux-x86_64_v4-musl": {
@@ -26235,8 +26235,8 @@
"minor": 9,
"patch": 23,
"prerelease": "",
- "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.9.23%2B20250708-x86_64_v4-unknown-linux-musl-debug-full.tar.zst",
- "sha256": "33e7411e88033865e8a4e9c995112cb3867f284102624b3ce1dbcdb4f4c03ea3",
+ "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.9.23%2B20250712-x86_64_v4-unknown-linux-musl-debug-full.tar.zst",
+ "sha256": "ecab1905698e5dd4a11c46a1dc6be49cf0e37f70b81191adbb7dad6e453906cb",
"variant": "debug"
},
"cpython-3.9.22-darwin-aarch64-none": {
From 4175e3eb4d7e484004d3eba6f0ecaded5810e03a Mon Sep 17 00:00:00 2001
From: "github-actions[bot]"
<41898282+github-actions[bot]@users.noreply.github.com>
Date: Sun, 13 Jul 2025 08:20:51 -0500
Subject: [PATCH 015/130] Sync latest Python releases (#14581)
Automated update for Python releases.
Co-authored-by: zanieb <2586601+zanieb@users.noreply.github.com>
---
crates/uv-dev/src/generate_sysconfig_mappings.rs | 4 ++--
crates/uv-python/src/sysconfig/generated_mappings.rs | 2 +-
2 files changed, 3 insertions(+), 3 deletions(-)
diff --git a/crates/uv-dev/src/generate_sysconfig_mappings.rs b/crates/uv-dev/src/generate_sysconfig_mappings.rs
index b9f58dd92..8357ee7fb 100644
--- a/crates/uv-dev/src/generate_sysconfig_mappings.rs
+++ b/crates/uv-dev/src/generate_sysconfig_mappings.rs
@@ -11,7 +11,7 @@ use crate::ROOT_DIR;
use crate::generate_all::Mode;
/// Contains current supported targets
-const TARGETS_YML_URL: &str = "https://raw.githubusercontent.com/astral-sh/python-build-standalone/refs/tags/20250708/cpython-unix/targets.yml";
+const TARGETS_YML_URL: &str = "https://raw.githubusercontent.com/astral-sh/python-build-standalone/refs/tags/20250712/cpython-unix/targets.yml";
#[derive(clap::Args)]
pub(crate) struct Args {
@@ -130,7 +130,7 @@ async fn generate() -> Result {
output.push_str("//! DO NOT EDIT\n");
output.push_str("//!\n");
output.push_str("//! Generated with `cargo run dev generate-sysconfig-metadata`\n");
- output.push_str("//! Targets from \n");
+ output.push_str("//! Targets from \n");
output.push_str("//!\n");
// Disable clippy/fmt
diff --git a/crates/uv-python/src/sysconfig/generated_mappings.rs b/crates/uv-python/src/sysconfig/generated_mappings.rs
index 54170aba5..646501b07 100644
--- a/crates/uv-python/src/sysconfig/generated_mappings.rs
+++ b/crates/uv-python/src/sysconfig/generated_mappings.rs
@@ -1,7 +1,7 @@
//! DO NOT EDIT
//!
//! Generated with `cargo run dev generate-sysconfig-metadata`
-//! Targets from
+//! Targets from
//!
#![allow(clippy::all)]
#![cfg_attr(any(), rustfmt::skip)]
From a57241c0d7b7412a3f2f7c9fadbb0bf55c50daea Mon Sep 17 00:00:00 2001
From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com>
Date: Sun, 13 Jul 2025 21:10:28 -0400
Subject: [PATCH 016/130] Update pre-commit hook astral-sh/ruff-pre-commit to
v0.12.3 (#14592)
---
.pre-commit-config.yaml | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index 1c8965c0f..5476c9dc8 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -42,7 +42,7 @@ repos:
types_or: [yaml, json5]
- repo: https://github.com/astral-sh/ruff-pre-commit
- rev: v0.12.2
+ rev: v0.12.3
hooks:
- id: ruff-format
- id: ruff
From e9509fde84ea84ea8d588c5a47eff1810e6b2f43 Mon Sep 17 00:00:00 2001
From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com>
Date: Mon, 14 Jul 2025 15:42:56 +0200
Subject: [PATCH 017/130] Update Rust crate clap to v4.5.41 (#14593)
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
This PR contains the following updates:
| Package | Type | Update | Change |
|---|---|---|---|
| [clap](https://redirect.github.com/clap-rs/clap) |
workspace.dependencies | patch | `4.5.40` -> `4.5.41` |
---
> [!WARNING]
> Some dependencies could not be looked up. Check the Dependency
Dashboard for more information.
---
### Release Notes
clap-rs/clap (clap)
###
[`v4.5.41`](https://redirect.github.com/clap-rs/clap/blob/HEAD/CHANGELOG.md#4541---2025-07-09)
[Compare
Source](https://redirect.github.com/clap-rs/clap/compare/v4.5.40...v4.5.41)
##### Features
- Add `Styles::context` and `Styles::context_value` to customize the
styling of `[default: value]` like notes in the `--help`
---
### Configuration
📅 **Schedule**: Branch creation - "before 4am on Monday" (UTC),
Automerge - At any time (no schedule defined).
🚦 **Automerge**: Disabled by config. Please merge this manually once you
are satisfied.
♻ **Rebasing**: Whenever PR becomes conflicted, or you tick the
rebase/retry checkbox.
🔕 **Ignore**: Close this PR and you won't be reminded about this update
again.
---
- [ ] If you want to rebase/retry this PR, check
this box
---
This PR was generated by [Mend Renovate](https://mend.io/renovate/).
View the [repository job
log](https://developer.mend.io/github/astral-sh/uv).
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
---
Cargo.lock | 30 +++++++++++++++---------------
1 file changed, 15 insertions(+), 15 deletions(-)
diff --git a/Cargo.lock b/Cargo.lock
index bc42e30af..4079390e9 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -619,9 +619,9 @@ dependencies = [
[[package]]
name = "clap"
-version = "4.5.40"
+version = "4.5.41"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "40b6887a1d8685cebccf115538db5c0efe625ccac9696ad45c409d96566e910f"
+checksum = "be92d32e80243a54711e5d7ce823c35c41c9d929dc4ab58e1276f625841aadf9"
dependencies = [
"clap_builder",
"clap_derive",
@@ -629,9 +629,9 @@ dependencies = [
[[package]]
name = "clap_builder"
-version = "4.5.40"
+version = "4.5.41"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e0c66c08ce9f0c698cbce5c0279d0bb6ac936d8674174fe48f736533b964f59e"
+checksum = "707eab41e9622f9139419d573eca0900137718000c517d47da73045f54331c3d"
dependencies = [
"anstream",
"anstyle",
@@ -672,9 +672,9 @@ dependencies = [
[[package]]
name = "clap_derive"
-version = "4.5.40"
+version = "4.5.41"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d2c7947ae4cc3d851207c1adb5b5e260ff0cca11446b1d6d1423788e442257ce"
+checksum = "ef4f52386a59ca4c860f7393bcf8abd8dfd91ecccc0f774635ff68e92eeef491"
dependencies = [
"heck",
"proc-macro2",
@@ -761,7 +761,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "117725a109d387c937a1533ce01b450cbde6b88abceea8473c4d7a85853cda3c"
dependencies = [
"lazy_static",
- "windows-sys 0.59.0",
+ "windows-sys 0.48.0",
]
[[package]]
@@ -1138,7 +1138,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "33d852cb9b869c2a9b3df2f71a3074817f01e1844f839a144f5fcef059a4eb5d"
dependencies = [
"libc",
- "windows-sys 0.59.0",
+ "windows-sys 0.52.0",
]
[[package]]
@@ -1966,7 +1966,7 @@ checksum = "e19b23d53f35ce9f56aebc7d1bb4e6ac1e9c0db7ac85c8d1760c04379edced37"
dependencies = [
"hermit-abi 0.4.0",
"libc",
- "windows-sys 0.59.0",
+ "windows-sys 0.52.0",
]
[[package]]
@@ -2026,7 +2026,7 @@ dependencies = [
"portable-atomic",
"portable-atomic-util",
"serde",
- "windows-sys 0.59.0",
+ "windows-sys 0.52.0",
]
[[package]]
@@ -2885,7 +2885,7 @@ dependencies = [
"once_cell",
"socket2",
"tracing",
- "windows-sys 0.59.0",
+ "windows-sys 0.52.0",
]
[[package]]
@@ -3317,7 +3317,7 @@ dependencies = [
"errno",
"libc",
"linux-raw-sys 0.4.15",
- "windows-sys 0.59.0",
+ "windows-sys 0.52.0",
]
[[package]]
@@ -3330,7 +3330,7 @@ dependencies = [
"errno",
"libc",
"linux-raw-sys 0.9.2",
- "windows-sys 0.59.0",
+ "windows-sys 0.52.0",
]
[[package]]
@@ -3913,7 +3913,7 @@ dependencies = [
"getrandom 0.3.1",
"once_cell",
"rustix 1.0.7",
- "windows-sys 0.59.0",
+ "windows-sys 0.52.0",
]
[[package]]
@@ -6293,7 +6293,7 @@ version = "0.1.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cf221c93e13a30d793f7645a0e7762c55d169dbb0a49671918a2319d289b10bb"
dependencies = [
- "windows-sys 0.59.0",
+ "windows-sys 0.48.0",
]
[[package]]
From 4c40dd341e68dfd69073cb4e7cc36cf1dcdb04c7 Mon Sep 17 00:00:00 2001
From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com>
Date: Mon, 14 Jul 2025 15:45:56 +0200
Subject: [PATCH 018/130] Update Rust crate hyper-util to v0.1.15 (#14595)
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
This PR contains the following updates:
| Package | Type | Update | Change |
|---|---|---|---|
| [hyper-util](https://hyper.rs)
([source](https://redirect.github.com/hyperium/hyper-util)) |
dev-dependencies | patch | `0.1.14` -> `0.1.15` |
---
> [!WARNING]
> Some dependencies could not be looked up. Check the Dependency
Dashboard for more information.
---
### Release Notes
hyperium/hyper-util (hyper-util)
###
[`v0.1.15`](https://redirect.github.com/hyperium/hyper-util/blob/HEAD/CHANGELOG.md#0115-2025-07-07)
[Compare
Source](https://redirect.github.com/hyperium/hyper-util/compare/v0.1.14...v0.1.15)
- Add header casing options to `auto::Builder`.
- Fix `proxy::Socksv5` to check for enough bytes before parsing ipv6
responses.
- Fix including `client-proxy` in the `full` feature set.
---
### Configuration
📅 **Schedule**: Branch creation - "before 4am on Monday" (UTC),
Automerge - At any time (no schedule defined).
🚦 **Automerge**: Disabled by config. Please merge this manually once you
are satisfied.
♻ **Rebasing**: Whenever PR becomes conflicted, or you tick the
rebase/retry checkbox.
🔕 **Ignore**: Close this PR and you won't be reminded about this update
again.
---
- [ ] If you want to rebase/retry this PR, check
this box
---
This PR was generated by [Mend Renovate](https://mend.io/renovate/).
View the [repository job
log](https://developer.mend.io/github/astral-sh/uv).
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
---
Cargo.lock | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/Cargo.lock b/Cargo.lock
index 4079390e9..f2ef0b4a3 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -1703,9 +1703,9 @@ dependencies = [
[[package]]
name = "hyper-util"
-version = "0.1.14"
+version = "0.1.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "dc2fdfdbff08affe55bb779f33b053aa1fe5dd5b54c257343c17edfa55711bdb"
+checksum = "7f66d5bd4c6f02bf0542fad85d626775bab9258cf795a4256dcaf3161114d1df"
dependencies = [
"base64 0.22.1",
"bytes",
From ef7ab76206e96bbc531a5ee9c065f6a2462cb651 Mon Sep 17 00:00:00 2001
From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com>
Date: Mon, 14 Jul 2025 15:48:47 +0200
Subject: [PATCH 019/130] Update Rust crate codspeed-criterion-compat to v3.0.3
(#14594)
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
This PR contains the following updates:
| Package | Type | Update | Change |
|---|---|---|---|
| [codspeed-criterion-compat](https://codspeed.io)
([source](https://redirect.github.com/CodSpeedHQ/codspeed-rust)) |
dependencies | patch | `3.0.2` -> `3.0.3` |
---
> [!WARNING]
> Some dependencies could not be looked up. Check the Dependency
Dashboard for more information.
---
### Release Notes
CodSpeedHQ/codspeed-rust (codspeed-criterion-compat)
###
[`v3.0.3`](https://redirect.github.com/CodSpeedHQ/codspeed-rust/releases/tag/v3.0.3)
[Compare
Source](https://redirect.github.com/CodSpeedHQ/codspeed-rust/compare/v3.0.2...v3.0.3)
#### What's Changed
- tests: cargo-bench should work with the compat layers by
[@art049](https://redirect.github.com/art049) in
[https://github.com/CodSpeedHQ/codspeed-rust/pull/110](https://redirect.github.com/CodSpeedHQ/codspeed-rust/pull/110)
- fix: handle rustflags from .cargo/config.toml by
[@GuillaumeLagrange](https://redirect.github.com/GuillaumeLagrange)
in
[https://github.com/CodSpeedHQ/codspeed-rust/pull/109](https://redirect.github.com/CodSpeedHQ/codspeed-rust/pull/109)
**Full Changelog**:
https://github.com/CodSpeedHQ/codspeed-rust/compare/v3.0.2...v3.0.3
---
### Configuration
📅 **Schedule**: Branch creation - "before 4am on Monday" (UTC),
Automerge - At any time (no schedule defined).
🚦 **Automerge**: Disabled by config. Please merge this manually once you
are satisfied.
♻ **Rebasing**: Whenever PR becomes conflicted, or you tick the
rebase/retry checkbox.
🔕 **Ignore**: Close this PR and you won't be reminded about this update
again.
---
- [ ] If you want to rebase/retry this PR, check
this box
---
This PR was generated by [Mend Renovate](https://mend.io/renovate/).
View the [repository job
log](https://developer.mend.io/github/astral-sh/uv).
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
---
Cargo.lock | 12 ++++++------
1 file changed, 6 insertions(+), 6 deletions(-)
diff --git a/Cargo.lock b/Cargo.lock
index f2ef0b4a3..53bd78f42 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -690,9 +690,9 @@ checksum = "f46ad14479a25103f283c0f10005961cf086d8dc42205bb44c46ac563475dca6"
[[package]]
name = "codspeed"
-version = "3.0.2"
+version = "3.0.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "922018102595f6668cdd09c03f4bff2d951ce2318c6dca4fe11bdcb24b65b2bf"
+checksum = "a7524e02ff6173bc143d9abc01b518711b77addb60de871bbe5686843f88fb48"
dependencies = [
"anyhow",
"bincode",
@@ -708,9 +708,9 @@ dependencies = [
[[package]]
name = "codspeed-criterion-compat"
-version = "3.0.2"
+version = "3.0.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "24d8ad82d2383cb74995f58993cbdd2914aed57b2f91f46580310dd81dc3d05a"
+checksum = "2f71662331c4f854131a42b95055f3f8cbca53640348985f699635b1f96d8c26"
dependencies = [
"codspeed",
"codspeed-criterion-compat-walltime",
@@ -719,9 +719,9 @@ dependencies = [
[[package]]
name = "codspeed-criterion-compat-walltime"
-version = "3.0.2"
+version = "3.0.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "61badaa6c452d192a29f8387147888f0ab358553597c3fe9bf8a162ef7c2fa64"
+checksum = "e3c9bd9e895e0aa263d139a8b5f58a4ea4abb86d5982ec7f58d3c7b8465c1e01"
dependencies = [
"anes",
"cast",
From d179c496ddbbc2c8434608dc60da1bbd9f18c30b Mon Sep 17 00:00:00 2001
From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com>
Date: Mon, 14 Jul 2025 15:49:11 +0200
Subject: [PATCH 020/130] Update Rust crate spdx to v0.10.9 (#14596)
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
This PR contains the following updates:
| Package | Type | Update | Change |
|---|---|---|---|
| [spdx](https://redirect.github.com/EmbarkStudios/spdx) |
workspace.dependencies | patch | `0.10.8` -> `0.10.9` |
---
> [!WARNING]
> Some dependencies could not be looked up. Check the Dependency
Dashboard for more information.
---
### Release Notes
EmbarkStudios/spdx (spdx)
###
[`v0.10.9`](https://redirect.github.com/EmbarkStudios/spdx/blob/HEAD/CHANGELOG.md#0109---2025-07-12)
[Compare
Source](https://redirect.github.com/EmbarkStudios/spdx/compare/0.10.8...0.10.9)
##### Changed
- [PR#74](https://redirect.github.com/EmbarkStudios/spdx/pull/76) update
SPDX license list to 3.27.0.
---
### Configuration
📅 **Schedule**: Branch creation - "before 4am on Monday" (UTC),
Automerge - At any time (no schedule defined).
🚦 **Automerge**: Disabled by config. Please merge this manually once you
are satisfied.
♻ **Rebasing**: Whenever PR becomes conflicted, or you tick the
rebase/retry checkbox.
🔕 **Ignore**: Close this PR and you won't be reminded about this update
again.
---
- [ ] If you want to rebase/retry this PR, check
this box
---
This PR was generated by [Mend Renovate](https://mend.io/renovate/).
View the [repository job
log](https://developer.mend.io/github/astral-sh/uv).
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
---
Cargo.lock | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/Cargo.lock b/Cargo.lock
index 53bd78f42..5dbdfaf65 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -3729,9 +3729,9 @@ dependencies = [
[[package]]
name = "spdx"
-version = "0.10.8"
+version = "0.10.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "58b69356da67e2fc1f542c71ea7e654a361a79c938e4424392ecf4fa065d2193"
+checksum = "c3e17e880bafaeb362a7b751ec46bdc5b61445a188f80e0606e68167cd540fa3"
dependencies = [
"smallvec",
]
From 9efd053d27882cdb763057f368b5f85b7448af67 Mon Sep 17 00:00:00 2001
From: Zanie Blue
Date: Mon, 14 Jul 2025 08:56:39 -0500
Subject: [PATCH 021/130] Add test case for `uv tool` Python re-resolves
(#14605)
A test case for https://github.com/astral-sh/uv/pull/10401 and
https://github.com/astral-sh/uv/pull/14606
---
crates/uv/tests/it/tool_run.rs | 70 ++++++++++++++++++++++++++++++++++
1 file changed, 70 insertions(+)
diff --git a/crates/uv/tests/it/tool_run.rs b/crates/uv/tests/it/tool_run.rs
index fb6287454..8bcf5c3d1 100644
--- a/crates/uv/tests/it/tool_run.rs
+++ b/crates/uv/tests/it/tool_run.rs
@@ -2975,3 +2975,73 @@ fn tool_run_windows_runnable_types() -> anyhow::Result<()> {
Ok(())
}
+
+#[test]
+fn tool_run_reresolve_python() -> anyhow::Result<()> {
+ let context = TestContext::new_with_versions(&["3.11", "3.12"]).with_filtered_counts();
+ let tool_dir = context.temp_dir.child("tools");
+ let bin_dir = context.temp_dir.child("bin");
+ let foo_dir = context.temp_dir.child("foo");
+ let foo_pyproject_toml = foo_dir.child("pyproject.toml");
+
+ foo_pyproject_toml.write_str(indoc! { r#"
+ [project]
+ name = "foo"
+ version = "1.0.0"
+ requires-python = ">=3.12"
+ dependencies = []
+
+ [project.scripts]
+ foo = "foo:run"
+ "#
+ })?;
+ let foo_project_src = foo_dir.child("src");
+ let foo_module = foo_project_src.child("foo");
+ let foo_init = foo_module.child("__init__.py");
+ foo_init.write_str(indoc! { r#"
+ import sys
+
+ def run():
+ print(".".join(str(key) for key in sys.version_info[:2]))
+ "#
+ })?;
+
+ // Although 3.11 is first on the path, we'll re-resolve with 3.12 because the `requires-python`
+ // is not compatible with 3.11.
+ uv_snapshot!(context.filters(), context.tool_run()
+ .arg("--from")
+ .arg("./foo")
+ .arg("foo")
+ .env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
+ .env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str()), @r"
+ success: true
+ exit_code: 0
+ ----- stdout -----
+ 3.12
+
+ ----- stderr -----
+ Resolved [N] packages in [TIME]
+ Prepared [N] packages in [TIME]
+ Installed [N] packages in [TIME]
+ + foo==1.0.0 (from file://[TEMP_DIR]/foo)
+ ");
+
+ uv_snapshot!(context.filters(), context.tool_run()
+ .arg("--from")
+ .arg("./foo")
+ .arg("--python")
+ .arg("3.11")
+ .arg("foo")
+ .env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
+ .env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str()), @r"
+ success: true
+ exit_code: 0
+ ----- stdout -----
+ 3.12
+
+ ----- stderr -----
+ Resolved [N] packages in [TIME]
+ ");
+
+ Ok(())
+}
From 3b050b554519f2bbe94a5987aaa87d097dfca5b5 Mon Sep 17 00:00:00 2001
From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com>
Date: Mon, 14 Jul 2025 09:58:55 -0400
Subject: [PATCH 022/130] Update Rust crate tokio to v1.46.1 (#14599)
---
Cargo.lock | 17 +++++++++++++++--
1 file changed, 15 insertions(+), 2 deletions(-)
diff --git a/Cargo.lock b/Cargo.lock
index 5dbdfaf65..8232c6905 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -1942,6 +1942,17 @@ dependencies = [
"similar",
]
+[[package]]
+name = "io-uring"
+version = "0.7.8"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b86e202f00093dcba4275d4636b93ef9dd75d025ae560d2521b45ea28ab49013"
+dependencies = [
+ "bitflags 2.9.1",
+ "cfg-if",
+ "libc",
+]
+
[[package]]
name = "ipnet"
version = "2.11.0"
@@ -4134,17 +4145,19 @@ source = "git+https://github.com/astral-sh/tl.git?rev=6e25b2ee2513d75385101a8ff9
[[package]]
name = "tokio"
-version = "1.45.1"
+version = "1.46.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "75ef51a33ef1da925cea3e4eb122833cb377c61439ca401b770f54902b806779"
+checksum = "0cc3a2344dafbe23a245241fe8b09735b521110d30fcefbbd5feb1797ca35d17"
dependencies = [
"backtrace",
"bytes",
+ "io-uring",
"libc",
"mio",
"parking_lot",
"pin-project-lite",
"signal-hook-registry",
+ "slab",
"socket2",
"tokio-macros",
"windows-sys 0.52.0",
From 0af025eafbf24215898628d7682f8c787356868d Mon Sep 17 00:00:00 2001
From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com>
Date: Mon, 14 Jul 2025 08:59:09 -0500
Subject: [PATCH 023/130] Update CodSpeedHQ/action action to v3.7.0 (#14597)
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
This PR contains the following updates:
| Package | Type | Update | Change |
|---|---|---|---|
| [CodSpeedHQ/action](https://redirect.github.com/CodSpeedHQ/action) |
action | minor | `v3.5.0` -> `v3.7.0` |
---
> [!WARNING]
> Some dependencies could not be looked up. Check the Dependency
Dashboard for more information.
---
### Release Notes
CodSpeedHQ/action (CodSpeedHQ/action)
###
[`v3.7.0`](https://redirect.github.com/CodSpeedHQ/action/releases/tag/v3.7.0)
[Compare
Source](https://redirect.github.com/CodSpeedHQ/action/compare/v3.6.1...v3.7.0)
#### What's Changed
##### 🚀 Features
- Add pre- and post-benchmark scripts by
[@not-matthias](https://redirect.github.com/not-matthias)
- Add cli args for perf by
[@not-matthias](https://redirect.github.com/not-matthias) in
[#94](https://redirect.github.com/CodSpeedHQ/runner/pull/94)
##### 🐛 Bug Fixes
- Forward environment to systemd-run cmd by
[@not-matthias](https://redirect.github.com/not-matthias)
- Only panic in upload for non-existing integration by
[@not-matthias](https://redirect.github.com/not-matthias)
- Multi-line commands in valgrind by
[@not-matthias](https://redirect.github.com/not-matthias)
- Symlink libpython doesn't work for statically linked python by
[@not-matthias](https://redirect.github.com/not-matthias) in
[#89](https://redirect.github.com/CodSpeedHQ/runner/pull/89)
- Run perf with sudo; support systemd-run for non-perf walltime by
[@not-matthias](https://redirect.github.com/not-matthias)
- Use correct path for unwind info by
[@not-matthias](https://redirect.github.com/not-matthias)
##### ⚙️ Internals
- Add executor tests by
[@not-matthias](https://redirect.github.com/not-matthias) in
[#95](https://redirect.github.com/CodSpeedHQ/runner/pull/95)
- Add log to detect invalid origin url by
[@not-matthias](https://redirect.github.com/not-matthias)
- Upgrade to edition 2024 by
[@not-matthias](https://redirect.github.com/not-matthias)
- Add debug logs for proc maps by
[@not-matthias](https://redirect.github.com/not-matthias) in
[#88](https://redirect.github.com/CodSpeedHQ/runner/pull/88)
- Enhance version resolution with 'latest' support and flexible formats
by [@art049](https://redirect.github.com/art049) in
[https://github.com/CodSpeedHQ/action/pull/132](https://redirect.github.com/CodSpeedHQ/action/pull/132)
**Full Changelog**:
https://github.com/CodSpeedHQ/action/compare/v3.6.1...v3.7.0
**Full Runner Changelog**:
https://github.com/CodSpeedHQ/runner/blob/main/CHANGELOG.md
###
[`v3.6.1`](https://redirect.github.com/CodSpeedHQ/action/releases/tag/v3.6.1)
[Compare
Source](https://redirect.github.com/CodSpeedHQ/action/compare/v3.5.0...v3.6.1)
##### What's Changed
##### 🚀 Features
- Allow setting upload url via env var for convenience by
[@GuillaumeLagrange](https://redirect.github.com/GuillaumeLagrange)
in [#85](https://redirect.github.com/CodSpeedHQ/runner/pull/85)
- Send unknown cpu\_brand when it is not recognized by
[@adriencaccia](https://redirect.github.com/adriencaccia)
- Allow only running the benchmarks, and only uploading the results by
[@GuillaumeLagrange](https://redirect.github.com/GuillaumeLagrange)
in [#81](https://redirect.github.com/CodSpeedHQ/runner/pull/81)
- Install perf on setup by
[@not-matthias](https://redirect.github.com/not-matthias)
- Add perf integration for python by
[@not-matthias](https://redirect.github.com/not-matthias)
- Add perf integration for rust by
[@not-matthias](https://redirect.github.com/not-matthias)
- Add fifo ipc by
[@not-matthias](https://redirect.github.com/not-matthias)
- Use custom time formatting to be in line with the rest of CodSpeed by
[@GuillaumeLagrange](https://redirect.github.com/GuillaumeLagrange)
in [#77](https://redirect.github.com/CodSpeedHQ/runner/pull/77)
- Output information about benches after a local run by
[@GuillaumeLagrange](https://redirect.github.com/GuillaumeLagrange)
in [#76](https://redirect.github.com/CodSpeedHQ/runner/pull/76)
- Allow specifying oauth token through CLI by
[@GuillaumeLagrange](https://redirect.github.com/GuillaumeLagrange)
in [#75](https://redirect.github.com/CodSpeedHQ/runner/pull/75)
- Add option to output structured json by
[@GuillaumeLagrange](https://redirect.github.com/GuillaumeLagrange)
in [#74](https://redirect.github.com/CodSpeedHQ/runner/pull/74)
- Add flags to specify repository from CLI by
[@GuillaumeLagrange](https://redirect.github.com/GuillaumeLagrange)
- Improve error handling for valgrind by
[@not-matthias](https://redirect.github.com/not-matthias) in
[#67](https://redirect.github.com/CodSpeedHQ/runner/pull/67)
- Handle local run failure by
[@adriencaccia](https://redirect.github.com/adriencaccia) in
[#71](https://redirect.github.com/CodSpeedHQ/runner/pull/71)
- Run benchmark with systemd (for optional cpu isolation) by
[@not-matthias](https://redirect.github.com/not-matthias) in
[#86](https://redirect.github.com/CodSpeedHQ/runner/pull/86)
##### 🐛 Bug Fixes
- Persist logs when running with skip\_upload by
[@GuillaumeLagrange](https://redirect.github.com/GuillaumeLagrange)
in [#84](https://redirect.github.com/CodSpeedHQ/runner/pull/84)
- Valgrind crash for unresolved libpython by
[@not-matthias](https://redirect.github.com/not-matthias) in
[#82](https://redirect.github.com/CodSpeedHQ/runner/pull/82)
- Support trailing slash in origin url by
[@not-matthias](https://redirect.github.com/not-matthias) in
[#83](https://redirect.github.com/CodSpeedHQ/runner/pull/83)
- Use bash to ensure correct behavior across systems by
[@not-matthias](https://redirect.github.com/not-matthias)
- Fix test randomly failing due to other test run in parallel by
[@GuillaumeLagrange](https://redirect.github.com/GuillaumeLagrange)
- Check child status code after valgrind by
[@not-matthias](https://redirect.github.com/not-matthias) in
[#72](https://redirect.github.com/CodSpeedHQ/runner/pull/72)
- Only show perf output at debug or trace level by
[@not-matthias](https://redirect.github.com/not-matthias) in
[#87](https://redirect.github.com/CodSpeedHQ/runner/pull/87)
##### ⚙️ Internals
- Dont use regex in perf map harvest by
[@not-matthias](https://redirect.github.com/not-matthias)
- Switch to astral-sh/cargo-dist by
[@adriencaccia](https://redirect.github.com/adriencaccia) in
[#80](https://redirect.github.com/CodSpeedHQ/runner/pull/80)
**Full Changelog**:
https://github.com/CodSpeedHQ/action/compare/v3.5.0...v3.6.1
**Full Runner Changelog**:
https://github.com/CodSpeedHQ/runner/blob/main/CHANGELOG.md
---
### Configuration
📅 **Schedule**: Branch creation - "before 4am on Monday" (UTC),
Automerge - At any time (no schedule defined).
🚦 **Automerge**: Disabled by config. Please merge this manually once you
are satisfied.
♻ **Rebasing**: Whenever PR becomes conflicted, or you tick the
rebase/retry checkbox.
🔕 **Ignore**: Close this PR and you won't be reminded about this update
again.
---
- [ ] If you want to rebase/retry this PR, check
this box
---
This PR was generated by [Mend Renovate](https://mend.io/renovate/).
View the [repository job
log](https://developer.mend.io/github/astral-sh/uv).
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
---
.github/workflows/ci.yml | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index ba7a4b4d1..0ccc9ea4e 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -2532,7 +2532,7 @@ jobs:
run: cargo codspeed build --profile profiling --features codspeed -p uv-bench
- name: "Run benchmarks"
- uses: CodSpeedHQ/action@0010eb0ca6e89b80c88e8edaaa07cfe5f3e6664d # v3.5.0
+ uses: CodSpeedHQ/action@c28fe9fbe7d57a3da1b7834ae3761c1d8217612d # v3.7.0
with:
run: cargo codspeed run
token: ${{ secrets.CODSPEED_TOKEN }}
@@ -2569,7 +2569,7 @@ jobs:
run: cargo codspeed build --profile profiling --features codspeed -p uv-bench
- name: "Run benchmarks"
- uses: CodSpeedHQ/action@0010eb0ca6e89b80c88e8edaaa07cfe5f3e6664d # v3.5.0
+ uses: CodSpeedHQ/action@c28fe9fbe7d57a3da1b7834ae3761c1d8217612d # v3.7.0
with:
run: cargo codspeed run
token: ${{ secrets.CODSPEED_TOKEN }}
From 4890f3ef2bbde7a2b36641dc1d8f92e7695f68ec Mon Sep 17 00:00:00 2001
From: Zanie Blue
Date: Mon, 14 Jul 2025 09:07:30 -0500
Subject: [PATCH 024/130] Do not re-resolve with a new Python version in `uv
tool` if it is incompatible with `--python` (#14606)
Closes https://github.com/astral-sh/uv/issues/14604
---
crates/uv/src/commands/tool/common.rs | 22 +++++++++++-----------
crates/uv/tests/it/tool_run.rs | 20 ++++++++++++++++++++
2 files changed, 31 insertions(+), 11 deletions(-)
diff --git a/crates/uv/src/commands/tool/common.rs b/crates/uv/src/commands/tool/common.rs
index ffc1b5645..b24a64e25 100644
--- a/crates/uv/src/commands/tool/common.rs
+++ b/crates/uv/src/commands/tool/common.rs
@@ -98,14 +98,6 @@ pub(crate) async fn refine_interpreter(
return Ok(None);
}
- // If the user passed a `--python` request, and the refined interpreter is incompatible, we
- // can't use it.
- if let Some(python_request) = python_request {
- if !python_request.satisfied(interpreter, cache) {
- return Ok(None);
- }
- }
-
// We want an interpreter that's as close to the required version as possible. If we choose the
// "latest" Python, we risk choosing a version that lacks wheels for the tool's requirements
// (assuming those requirements don't publish source distributions).
@@ -135,15 +127,15 @@ pub(crate) async fn refine_interpreter(
Bound::Unbounded => unreachable!("`requires-python` should never be unbounded"),
};
- let python_request = PythonRequest::Version(VersionRequest::Range(
+ let requires_python_request = PythonRequest::Version(VersionRequest::Range(
VersionSpecifiers::from_iter([lower_bound, upper_bound]),
PythonVariant::default(),
));
- debug!("Refining interpreter with: {python_request}");
+ debug!("Refining interpreter with: {requires_python_request}");
let interpreter = PythonInstallation::find_or_download(
- Some(&python_request),
+ Some(&requires_python_request),
EnvironmentPreference::OnlySystem,
python_preference,
python_downloads,
@@ -158,6 +150,14 @@ pub(crate) async fn refine_interpreter(
.await?
.into_interpreter();
+ // If the user passed a `--python` request, and the refined interpreter is incompatible, we
+ // can't use it.
+ if let Some(python_request) = python_request {
+ if !python_request.satisfied(&interpreter, cache) {
+ return Ok(None);
+ }
+ }
+
Ok(Some(interpreter))
}
diff --git a/crates/uv/tests/it/tool_run.rs b/crates/uv/tests/it/tool_run.rs
index 8bcf5c3d1..90d906fb5 100644
--- a/crates/uv/tests/it/tool_run.rs
+++ b/crates/uv/tests/it/tool_run.rs
@@ -3026,6 +3026,7 @@ fn tool_run_reresolve_python() -> anyhow::Result<()> {
+ foo==1.0.0 (from file://[TEMP_DIR]/foo)
");
+ // When an incompatible Python version is explicitly requested, we should not re-resolve
uv_snapshot!(context.filters(), context.tool_run()
.arg("--from")
.arg("./foo")
@@ -3034,6 +3035,25 @@ fn tool_run_reresolve_python() -> anyhow::Result<()> {
.arg("foo")
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str()), @r"
+ success: false
+ exit_code: 1
+ ----- stdout -----
+
+ ----- stderr -----
+ × No solution found when resolving tool dependencies:
+ ╰─▶ Because the current Python version (3.11.[X]) does not satisfy Python>=3.12 and foo==1.0.0 depends on Python>=3.12, we can conclude that foo==1.0.0 cannot be used.
+ And because only foo==1.0.0 is available and you require foo, we can conclude that your requirements are unsatisfiable.
+ ");
+
+ // Unless the discovered interpreter is compatible with the request
+ uv_snapshot!(context.filters(), context.tool_run()
+ .arg("--from")
+ .arg("./foo")
+ .arg("--python")
+ .arg(">=3.11")
+ .arg("foo")
+ .env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
+ .env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str()), @r"
success: true
exit_code: 0
----- stdout -----
From 852aba4f90988b7bd437573b721062228d504b49 Mon Sep 17 00:00:00 2001
From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com>
Date: Mon, 14 Jul 2025 16:09:06 +0200
Subject: [PATCH 025/130] Update Rust crate indicatif to 0.18.0 (#14598)
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
This PR contains the following updates:
| Package | Type | Update | Change |
|---|---|---|---|
| [indicatif](https://redirect.github.com/console-rs/indicatif) |
workspace.dependencies | minor | `0.17.8` -> `0.18.0` |
---
> [!WARNING]
> Some dependencies could not be looked up. Check the Dependency
Dashboard for more information.
---
### Release Notes
console-rs/indicatif (indicatif)
###
[`v0.18.0`](https://redirect.github.com/console-rs/indicatif/releases/tag/0.18.0)
[Compare
Source](https://redirect.github.com/console-rs/indicatif/compare/0.17.12...0.18.0)
Unfortunately
[0.17.12](https://redirect.github.com/console-rs/indicatif/releases/0.17.12)
had to be yanked because the console upgrade was a semver-incompatible
change. Rerelease as 0.18.0 instead.
#### What's Changed
- Bump version to 0.18.0 by
[@djc](https://redirect.github.com/djc) in
[https://github.com/console-rs/indicatif/pull/715](https://redirect.github.com/console-rs/indicatif/pull/715)
###
[`v0.17.12`](https://redirect.github.com/console-rs/indicatif/releases/tag/0.17.12)
[Compare
Source](https://redirect.github.com/console-rs/indicatif/compare/0.17.11...0.17.12)
#### What's Changed
- Add ProgressBar::force\_draw by
[@jaheba](https://redirect.github.com/jaheba) in
[https://github.com/console-rs/indicatif/pull/689](https://redirect.github.com/console-rs/indicatif/pull/689)
- Use width to truncate `HumanFloatCount` values by
[@ReagentX](https://redirect.github.com/ReagentX) in
[https://github.com/console-rs/indicatif/pull/696](https://redirect.github.com/console-rs/indicatif/pull/696)
- `ProgressStyle` enable/disable colors based on draw target by
[@tonywu6](https://redirect.github.com/tonywu6) in
[https://github.com/console-rs/indicatif/pull/699](https://redirect.github.com/console-rs/indicatif/pull/699)
- Switch dep number\_prefix to unit\_prefix by
[@kimono-koans](https://redirect.github.com/kimono-koans) in
[https://github.com/console-rs/indicatif/pull/709](https://redirect.github.com/console-rs/indicatif/pull/709)
- draw\_target: inline the format arg to silence clippy by
[@chris-laplante](https://redirect.github.com/chris-laplante) in
[https://github.com/console-rs/indicatif/pull/711](https://redirect.github.com/console-rs/indicatif/pull/711)
- Upgrade to console 0.16 by
[@djc](https://redirect.github.com/djc) in
[https://github.com/console-rs/indicatif/pull/712](https://redirect.github.com/console-rs/indicatif/pull/712)
---
### Configuration
📅 **Schedule**: Branch creation - "before 4am on Monday" (UTC),
Automerge - At any time (no schedule defined).
🚦 **Automerge**: Disabled by config. Please merge this manually once you
are satisfied.
♻ **Rebasing**: Whenever PR becomes conflicted, or you tick the
rebase/retry checkbox.
🔕 **Ignore**: Close this PR and you won't be reminded about this update
again.
---
- [ ] If you want to rebase/retry this PR, check
this box
---
This PR was generated by [Mend Renovate](https://mend.io/renovate/).
View the [repository job
log](https://developer.mend.io/github/astral-sh/uv).
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
---
Cargo.lock | 59 ++++++++++++++++++++++++++++++++++++------------------
Cargo.toml | 2 +-
2 files changed, 41 insertions(+), 20 deletions(-)
diff --git a/Cargo.lock b/Cargo.lock
index 8232c6905..7f6b601ca 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -788,10 +788,22 @@ dependencies = [
"encode_unicode",
"libc",
"once_cell",
- "unicode-width 0.2.1",
"windows-sys 0.59.0",
]
+[[package]]
+name = "console"
+version = "0.16.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2e09ced7ebbccb63b4c65413d821f2e00ce54c5ca4514ddc6b3c892fdbcbc69d"
+dependencies = [
+ "encode_unicode",
+ "libc",
+ "once_cell",
+ "unicode-width 0.2.1",
+ "windows-sys 0.60.2",
+]
+
[[package]]
name = "core-foundation"
version = "0.10.0"
@@ -1910,14 +1922,14 @@ dependencies = [
[[package]]
name = "indicatif"
-version = "0.17.11"
+version = "0.18.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "183b3088984b400f4cfac3620d5e076c84da5364016b4f49473de574b2586235"
+checksum = "70a646d946d06bedbbc4cac4c218acf4bbf2d87757a784857025f4d447e4e1cd"
dependencies = [
- "console",
- "number_prefix",
+ "console 0.16.0",
"portable-atomic",
"unicode-width 0.2.1",
+ "unit-prefix",
"web-time",
]
@@ -1933,7 +1945,7 @@ version = "1.43.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "154934ea70c58054b556dd430b99a98c2a7ff5309ac9891597e339b5c28f4371"
dependencies = [
- "console",
+ "console 0.15.11",
"once_cell",
"pest",
"pest_derive",
@@ -2465,12 +2477,6 @@ dependencies = [
"libc",
]
-[[package]]
-name = "number_prefix"
-version = "0.4.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "830b246a0e5f20af87141b25c173cd1b609bd7779a4617d6ec582abaf90870f3"
-
[[package]]
name = "object"
version = "0.36.7"
@@ -4523,6 +4529,12 @@ version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4a1a07cc7db3810833284e8d372ccdc6da29741639ecc70c9ec107df0fa6154c"
+[[package]]
+name = "unit-prefix"
+version = "0.5.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "323402cff2dd658f39ca17c789b502021b3f18707c91cdf22e3838e1b4023817"
+
[[package]]
name = "unsafe-libyaml"
version = "0.2.11"
@@ -4631,7 +4643,7 @@ dependencies = [
"base64 0.22.1",
"byteorder",
"clap",
- "console",
+ "console 0.15.11",
"ctrlc",
"dotenvy",
"dunce",
@@ -5038,7 +5050,7 @@ dependencies = [
name = "uv-console"
version = "0.0.1"
dependencies = [
- "console",
+ "console 0.15.11",
]
[[package]]
@@ -5675,7 +5687,7 @@ version = "0.1.0"
dependencies = [
"anyhow",
"configparser",
- "console",
+ "console 0.15.11",
"fs-err 3.1.1",
"futures",
"rustc-hash",
@@ -6332,7 +6344,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7f919aee0a93304be7f62e8e5027811bbba96bcb1de84d6618be56e43f8a32a1"
dependencies = [
"windows-core 0.59.0",
- "windows-targets 0.53.0",
+ "windows-targets 0.53.2",
]
[[package]]
@@ -6379,7 +6391,7 @@ dependencies = [
"windows-interface 0.59.1",
"windows-result 0.3.4",
"windows-strings 0.3.1",
- "windows-targets 0.53.0",
+ "windows-targets 0.53.2",
]
[[package]]
@@ -6550,6 +6562,15 @@ dependencies = [
"windows-targets 0.52.6",
]
+[[package]]
+name = "windows-sys"
+version = "0.60.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f2f500e4d28234f72040990ec9d39e3a6b950f9f22d3dba18416c35882612bcb"
+dependencies = [
+ "windows-targets 0.53.2",
+]
+
[[package]]
name = "windows-targets"
version = "0.48.5"
@@ -6583,9 +6604,9 @@ dependencies = [
[[package]]
name = "windows-targets"
-version = "0.53.0"
+version = "0.53.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b1e4c7e8ceaaf9cb7d7507c974735728ab453b67ef8f18febdd7c11fe59dca8b"
+checksum = "c66f69fcc9ce11da9966ddb31a40968cad001c5bedeb5c2b82ede4253ab48aef"
dependencies = [
"windows_aarch64_gnullvm 0.53.0",
"windows_aarch64_msvc 0.53.0",
diff --git a/Cargo.toml b/Cargo.toml
index ecdc11701..3405cff53 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -116,7 +116,7 @@ home = { version = "0.5.9" }
html-escape = { version = "0.2.13" }
http = { version = "1.1.0" }
indexmap = { version = "2.5.0" }
-indicatif = { version = "0.17.8" }
+indicatif = { version = "0.18.0" }
indoc = { version = "2.0.5" }
itertools = { version = "0.14.0" }
jiff = { version = "0.2.0", features = ["serde"] }
From df44199ceb4c856142bb67e060f464ba5c06d72e Mon Sep 17 00:00:00 2001
From: Geoffrey Thomas
Date: Mon, 14 Jul 2025 10:42:35 -0400
Subject: [PATCH 026/130] Add an exception handler on Windows (#14582)
We've seen a few cases of uv.exe exiting with an exception code as its
exit status and no user-visible output (#14563 in the field, and #13812
in CI). It seems that recent versions of Windows no longer show dialog
boxes on access violations (what UNIX calls segfaults) or similar
errors. Something is probably sent to Windows Error Reporting, and we
can maybe sign up to get the crashes from Microsoft, but the user
experience of seeing uv exit with no output is poor, both for end users
and during development. While it's possible to opt out of this behavior
or set up a debugger, this isn't the default configuration. (See
https://superuser.com/q/1246626 for some pointers.)
In order to get some output on a crash, we need to install our own
default handler for unhandled exceptions (or call all our code inside a
Structured Exception Handling __try/__catch block, which is complicated
on Rust). This is the moral equivalent of a segfault handler on Windows;
the kernel creates a new stack frame and passes arguments to it with
some processor state.
This commit adds a relatively simple exception handler that leans on
Rust's own backtrace implementation and also displays some minimal
information from the exception itself. This should be enough info to
communicate that something went wrong and let us collect enough
information to attempt to debug. There are also a handful of (non-Rust)
open-source libraries for this like Breakpad and Crashpad (both from
Google) and crashrpt.
The approach here, of using SetUnhandledExceptionFilter, seems to be the
standard one taken by other such libraries. Crashpad also seems to try
to use a newer mechanism for an out-of-tree DLL to report the crash:
https://issues.chromium.org/issues/42310037
If we have serious problems with memory corruption, it might be worth
adopting some third-party library that has already implemented this
approach. (In general, the docs of other crash reporting libraries are
worth skimming to understand how these things ought to work.)
Co-authored-by: samypr100 <3933065+samypr100@users.noreply.github.com>
---
Cargo.lock | 1 +
Cargo.toml | 2 +-
crates/uv/Cargo.toml | 1 +
crates/uv/src/lib.rs | 5 ++
crates/uv/src/windows_exception.rs | 130 +++++++++++++++++++++++++++++
5 files changed, 138 insertions(+), 1 deletion(-)
create mode 100644 crates/uv/src/windows_exception.rs
diff --git a/Cargo.lock b/Cargo.lock
index 7f6b601ca..f2bebefc9 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -4734,6 +4734,7 @@ dependencies = [
"walkdir",
"which",
"whoami",
+ "windows 0.59.0",
"wiremock",
"zip",
]
diff --git a/Cargo.toml b/Cargo.toml
index 3405cff53..752955223 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -184,7 +184,7 @@ url = { version = "2.5.2", features = ["serde"] }
version-ranges = { git = "https://github.com/astral-sh/pubgrub", rev = "06ec5a5f59ffaeb6cf5079c6cb184467da06c9db" }
walkdir = { version = "2.5.0" }
which = { version = "8.0.0", features = ["regex"] }
-windows = { version = "0.59.0", features = ["Win32_Storage_FileSystem"] }
+windows = { version = "0.59.0", features = ["Win32_System_Kernel", "Win32_System_Diagnostics_Debug", "Win32_Storage_FileSystem"] }
windows-core = { version = "0.59.0" }
windows-registry = { version = "0.5.0" }
windows-result = { version = "0.3.0" }
diff --git a/crates/uv/Cargo.toml b/crates/uv/Cargo.toml
index 7fa28ed67..904cc8fc3 100644
--- a/crates/uv/Cargo.toml
+++ b/crates/uv/Cargo.toml
@@ -108,6 +108,7 @@ zip = { workspace = true }
[target.'cfg(target_os = "windows")'.dependencies]
self-replace = { workspace = true }
+windows = { workspace = true }
[dev-dependencies]
assert_cmd = { version = "2.0.16" }
diff --git a/crates/uv/src/lib.rs b/crates/uv/src/lib.rs
index 84d889599..2a163d32c 100644
--- a/crates/uv/src/lib.rs
+++ b/crates/uv/src/lib.rs
@@ -57,6 +57,8 @@ pub(crate) mod commands;
pub(crate) mod logging;
pub(crate) mod printer;
pub(crate) mod settings;
+#[cfg(windows)]
+mod windows_exception;
#[instrument(skip_all)]
async fn run(mut cli: Cli) -> Result {
@@ -2189,6 +2191,9 @@ where
I: IntoIterator- ,
T: Into
+ Clone,
{
+ #[cfg(windows)]
+ windows_exception::setup();
+
// Set the `UV` variable to the current executable so it is implicitly propagated to all child
// processes, e.g., in `uv run`.
if let Ok(current_exe) = std::env::current_exe() {
diff --git a/crates/uv/src/windows_exception.rs b/crates/uv/src/windows_exception.rs
new file mode 100644
index 000000000..e96075f96
--- /dev/null
+++ b/crates/uv/src/windows_exception.rs
@@ -0,0 +1,130 @@
+//! Helper for setting up Windows exception handling.
+//!
+//! Recent versions of Windows seem to no longer show dialog boxes on access violations
+//! (segfaults) or similar errors. The user experience is that the command exits with
+//! the exception code as its exit status and no visible output. In order to see these
+//! errors both in the field and in CI, we need to install our own exception handler.
+//!
+//! This is a relatively simple exception handler that leans on Rust's own backtrace
+//! implementation and also displays some minimal information from the exception itself.
+
+#![allow(unsafe_code)]
+#![allow(clippy::print_stderr)]
+
+use windows::Win32::{
+ Foundation,
+ System::Diagnostics::Debug::{
+ CONTEXT, EXCEPTION_CONTINUE_SEARCH, EXCEPTION_POINTERS, SetUnhandledExceptionFilter,
+ },
+};
+
+fn display_exception_info(name: &str, info: &[usize; 15]) {
+ match info[0] {
+ 0 => eprintln!("{name} reading {:#x}", info[1]),
+ 1 => eprintln!("{name} writing {:#x}", info[1]),
+ 8 => eprintln!("{name} executing {:#x}", info[1]),
+ _ => eprintln!("{name} from operation {} at {:#x}", info[0], info[1]),
+ }
+}
+
+#[cfg(target_arch = "x86")]
+fn dump_regs(c: &CONTEXT) {
+ eprintln!(
+ "eax={:08x} ebx={:08x} ecx={:08x} edx={:08x} esi={:08x} edi={:08x}",
+ c.Eax, c.Ebx, c.Ecx, c.Edx, c.Esi, c.Edi
+ );
+ eprintln!(
+ "eip={:08x} ebp={:08x} esp={:08x} eflags={:08x}",
+ c.Eip, c.Ebp, c.Esp, c.EFlags
+ );
+}
+
+#[cfg(target_arch = "x86_64")]
+fn dump_regs(c: &CONTEXT) {
+ eprintln!("rax={:016x} rbx={:016x} rcx={:016x}", c.Rax, c.Rbx, c.Rcx);
+ eprintln!("rdx={:016x} rsx={:016x} rdi={:016x}", c.Rdx, c.Rsi, c.Rdi);
+ eprintln!("rsp={:016x} rbp={:016x} r8={:016x}", c.Rsp, c.Rbp, c.R8);
+ eprintln!(" r9={:016x} r10={:016x} r11={:016x}", c.R9, c.R10, c.R11);
+ eprintln!("r12={:016x} r13={:016x} r14={:016x}", c.R12, c.R13, c.R14);
+ eprintln!(
+ "r15={:016x} rip={:016x} eflags={:016x}",
+ c.R15, c.Rip, c.EFlags
+ );
+}
+
+#[cfg(target_arch = "aarch64")]
+fn dump_regs(c: &CONTEXT) {
+ // SAFETY: The two variants of this anonymous union are equivalent,
+ // one's an array and one has named registers.
+ let r = unsafe { c.Anonymous.Anonymous };
+ eprintln!("cpsr={:016x} sp={:016x} pc={:016x}", c.Cpsr, c.Sp, c.Pc);
+ eprintln!(" x0={:016x} x1={:016x} x2={:016x}", r.X0, r.X1, r.X2);
+ eprintln!(" x3={:016x} x4={:016x} x5={:016x}", r.X3, r.X4, r.X5);
+ eprintln!(" x6={:016x} x7={:016x} x8={:016x}", r.X6, r.X7, r.X8);
+ eprintln!(" x9={:016x} x10={:016x} x11={:016x}", r.X9, r.X10, r.X11);
+ eprintln!(" x12={:016x} x13={:016x} x14={:016x}", r.X12, r.X13, r.X14);
+ eprintln!(" x15={:016x} x16={:016x} x17={:016x}", r.X15, r.X16, r.X17);
+ eprintln!(" x18={:016x} x19={:016x} x20={:016x}", r.X18, r.X19, r.X20);
+ eprintln!(" x21={:016x} x22={:016x} x23={:016x}", r.X21, r.X22, r.X23);
+ eprintln!(" x24={:016x} x25={:016x} x26={:016x}", r.X24, r.X25, r.X26);
+ eprintln!(" x27={:016x} x28={:016x}", r.X27, r.X28);
+ eprintln!(" fp={:016x} lr={:016x}", r.Fp, r.Lr);
+}
+
+unsafe extern "system" fn unhandled_exception_filter(
+ exception_info: *const EXCEPTION_POINTERS,
+) -> i32 {
+ // TODO: Really we should not be using eprintln here because Stderr is not async-signal-safe.
+ // Probably we should be calling the console APIs directly.
+ eprintln!("error: unhandled exception in uv, please report a bug:");
+ let mut context = None;
+ // SAFETY: Pointer comes from the OS
+ if let Some(info) = unsafe { exception_info.as_ref() } {
+ // SAFETY: Pointer comes from the OS
+ if let Some(exc) = unsafe { info.ExceptionRecord.as_ref() } {
+ eprintln!(
+ "code {:#X} at address {:?}",
+ exc.ExceptionCode.0, exc.ExceptionAddress
+ );
+ match exc.ExceptionCode {
+ Foundation::EXCEPTION_ACCESS_VIOLATION => {
+ display_exception_info("EXCEPTION_ACCESS_VIOLATION", &exc.ExceptionInformation);
+ }
+ Foundation::EXCEPTION_IN_PAGE_ERROR => {
+ display_exception_info("EXCEPTION_IN_PAGE_ERROR", &exc.ExceptionInformation);
+ }
+ Foundation::EXCEPTION_ILLEGAL_INSTRUCTION => {
+ eprintln!("EXCEPTION_ILLEGAL_INSTRUCTION");
+ }
+ Foundation::EXCEPTION_STACK_OVERFLOW => {
+ eprintln!("EXCEPTION_STACK_OVERFLOW");
+ }
+ _ => {}
+ }
+ } else {
+ eprintln!("(ExceptionRecord is NULL)");
+ }
+ // SAFETY: Pointer comes from the OS
+ context = unsafe { info.ContextRecord.as_ref() };
+ } else {
+ eprintln!("(ExceptionInfo is NULL)");
+ }
+ let backtrace = std::backtrace::Backtrace::capture();
+ if backtrace.status() == std::backtrace::BacktraceStatus::Disabled {
+ eprintln!("note: run with `RUST_BACKTRACE=1` environment variable to display a backtrace");
+ } else {
+ if let Some(context) = context {
+ dump_regs(context);
+ }
+ eprintln!("stack backtrace:\n{backtrace:#}");
+ }
+ EXCEPTION_CONTINUE_SEARCH
+}
+
+/// Set up our handler for unhandled exceptions.
+pub(crate) fn setup() {
+ // SAFETY: winapi call
+ unsafe {
+ SetUnhandledExceptionFilter(Some(Some(unhandled_exception_filter)));
+ }
+}
From 34fbc06ad6111f43a259993c51f12ac021cc2238 Mon Sep 17 00:00:00 2001
From: Aria Desires
Date: Mon, 14 Jul 2025 10:53:39 -0400
Subject: [PATCH 027/130] Add experimental `uv sync --output-format json`
(#13689)
This is a continuation of the work in
* #12405
I have:
* moved to an architecture where the human output is derived from the
json structs to centralize more of the printing state/logic
* cleaned up some of the names/types
* added tests
* removed the restriction that this output is --dry-run only
I have not yet added package info, which was TBD in their design.
---------
Co-authored-by: x0rw
Co-authored-by: Zanie Blue
Co-authored-by: John Mumm
---
crates/uv-cli/src/lib.rs | 13 +
crates/uv-fs/src/path.rs | 6 +
crates/uv/src/commands/project/mod.rs | 16 +
crates/uv/src/commands/project/sync.rs | 615 ++++++++---
crates/uv/src/lib.rs | 1 +
crates/uv/src/settings.rs | 7 +-
crates/uv/tests/it/common/mod.rs | 2 +-
crates/uv/tests/it/sync.rs | 1326 ++++++++++++++++--------
docs/reference/cli.md | 7 +-
9 files changed, 1389 insertions(+), 604 deletions(-)
diff --git a/crates/uv-cli/src/lib.rs b/crates/uv-cli/src/lib.rs
index 056447959..0f3652341 100644
--- a/crates/uv-cli/src/lib.rs
+++ b/crates/uv-cli/src/lib.rs
@@ -46,6 +46,15 @@ pub enum PythonListFormat {
Json,
}
+#[derive(Debug, Default, Clone, Copy, clap::ValueEnum)]
+pub enum SyncFormat {
+ /// Display the result in a human-readable format.
+ #[default]
+ Text,
+ /// Display the result in JSON format.
+ Json,
+}
+
#[derive(Debug, Default, Clone, clap::ValueEnum)]
pub enum ListFormat {
/// Display the list of packages in a human-readable table.
@@ -3207,6 +3216,10 @@ pub struct SyncArgs {
#[arg(long, conflicts_with = "all_extras", value_parser = extra_name_with_clap_error)]
pub extra: Option>,
+ /// Select the output format.
+ #[arg(long, value_enum, default_value_t = SyncFormat::default())]
+ pub output_format: SyncFormat,
+
/// Include all optional dependencies.
///
/// When two or more extras are declared as conflicting in `tool.uv.conflicts`, using this flag
diff --git a/crates/uv-fs/src/path.rs b/crates/uv-fs/src/path.rs
index 40e579f8e..45d1da1c8 100644
--- a/crates/uv-fs/src/path.rs
+++ b/crates/uv-fs/src/path.rs
@@ -398,6 +398,12 @@ impl From> for PortablePathBuf {
}
}
+impl<'a> From<&'a Path> for PortablePathBuf {
+ fn from(path: &'a Path) -> Self {
+ Box::::from(path).into()
+ }
+}
+
#[cfg(feature = "serde")]
impl serde::Serialize for PortablePathBuf {
fn serialize(&self, serializer: S) -> Result
diff --git a/crates/uv/src/commands/project/mod.rs b/crates/uv/src/commands/project/mod.rs
index 1a0274cac..774009f63 100644
--- a/crates/uv/src/commands/project/mod.rs
+++ b/crates/uv/src/commands/project/mod.rs
@@ -1408,6 +1408,14 @@ impl ProjectEnvironment {
Self::WouldCreate(..) => Err(ProjectError::DroppedEnvironment),
}
}
+
+ /// Return the path to the actual target, if this was a dry run environment.
+ pub(crate) fn dry_run_target(&self) -> Option<&Path> {
+ match self {
+ Self::WouldReplace(path, _, _) | Self::WouldCreate(path, _, _) => Some(path),
+ Self::Created(_) | Self::Existing(_) | Self::Replaced(_) => None,
+ }
+ }
}
impl std::ops::Deref for ProjectEnvironment {
@@ -1588,6 +1596,14 @@ impl ScriptEnvironment {
Self::WouldCreate(..) => Err(ProjectError::DroppedEnvironment),
}
}
+
+ /// Return the path to the actual target, if this was a dry run environment.
+ pub(crate) fn dry_run_target(&self) -> Option<&Path> {
+ match self {
+ Self::WouldReplace(path, _, _) | Self::WouldCreate(path, _, _) => Some(path),
+ Self::Created(_) | Self::Existing(_) | Self::Replaced(_) => None,
+ }
+ }
}
impl std::ops::Deref for ScriptEnvironment {
diff --git a/crates/uv/src/commands/project/sync.rs b/crates/uv/src/commands/project/sync.rs
index a9a161527..94586004f 100644
--- a/crates/uv/src/commands/project/sync.rs
+++ b/crates/uv/src/commands/project/sync.rs
@@ -6,9 +6,10 @@ use std::sync::Arc;
use anyhow::{Context, Result};
use itertools::Itertools;
use owo_colors::OwoColorize;
+use serde::Serialize;
use tracing::warn;
-
use uv_cache::Cache;
+use uv_cli::SyncFormat;
use uv_client::{BaseClientBuilder, FlatIndexClient, RegistryClientBuilder};
use uv_configuration::{
Concurrency, Constraints, DependencyGroups, DependencyGroupsWithDefaults, DryRun, EditableMode,
@@ -19,7 +20,7 @@ use uv_dispatch::BuildDispatch;
use uv_distribution_types::{
DirectorySourceDist, Dist, Index, Requirement, Resolution, ResolvedDist, SourceDist,
};
-use uv_fs::Simplified;
+use uv_fs::{PortablePathBuf, Simplified};
use uv_installer::SitePackages;
use uv_normalize::{DefaultExtras, DefaultGroups, PackageName};
use uv_pep508::{MarkerTree, VersionOrUrl};
@@ -77,7 +78,14 @@ pub(crate) async fn sync(
cache: &Cache,
printer: Printer,
preview: PreviewMode,
+ output_format: SyncFormat,
) -> Result {
+ if preview.is_enabled() && matches!(output_format, SyncFormat::Json) {
+ warn_user!(
+ "The `--output-format json` option is experimental and the schema may change without warning. Pass `--preview` to disable this warning."
+ );
+ }
+
// Identify the target.
let workspace_cache = WorkspaceCache::default();
let target = if let Some(script) = script {
@@ -180,103 +188,16 @@ pub(crate) async fn sync(
})
.ok();
- // Notify the user of any environment changes.
- match &environment {
- SyncEnvironment::Project(ProjectEnvironment::Existing(environment))
- if dry_run.enabled() =>
- {
- writeln!(
- printer.stderr(),
- "{}",
- format!(
- "Discovered existing environment at: {}",
- environment.root().user_display().bold()
- )
- .dimmed()
- )?;
- }
- SyncEnvironment::Project(ProjectEnvironment::WouldReplace(root, ..))
- if dry_run.enabled() =>
- {
- writeln!(
- printer.stderr(),
- "{}",
- format!(
- "Would replace existing virtual environment at: {}",
- root.user_display().bold()
- )
- .dimmed()
- )?;
- }
- SyncEnvironment::Project(ProjectEnvironment::WouldCreate(root, ..))
- if dry_run.enabled() =>
- {
- writeln!(
- printer.stderr(),
- "{}",
- format!(
- "Would create virtual environment at: {}",
- root.user_display().bold()
- )
- .dimmed()
- )?;
- }
- SyncEnvironment::Script(ScriptEnvironment::Existing(environment)) => {
- if dry_run.enabled() {
- writeln!(
- printer.stderr(),
- "{}",
- format!(
- "Discovered existing environment at: {}",
- environment.root().user_display().bold()
- )
- .dimmed()
- )?;
- } else {
- writeln!(
- printer.stderr(),
- "Using script environment at: {}",
- environment.root().user_display().cyan()
- )?;
- }
- }
- SyncEnvironment::Script(ScriptEnvironment::Replaced(environment)) if !dry_run.enabled() => {
- writeln!(
- printer.stderr(),
- "Recreating script environment at: {}",
- environment.root().user_display().cyan()
- )?;
- }
- SyncEnvironment::Script(ScriptEnvironment::Created(environment)) if !dry_run.enabled() => {
- writeln!(
- printer.stderr(),
- "Creating script environment at: {}",
- environment.root().user_display().cyan()
- )?;
- }
- SyncEnvironment::Script(ScriptEnvironment::WouldReplace(root, ..)) if dry_run.enabled() => {
- writeln!(
- printer.stderr(),
- "{}",
- format!(
- "Would replace existing script environment at: {}",
- root.user_display().bold()
- )
- .dimmed()
- )?;
- }
- SyncEnvironment::Script(ScriptEnvironment::WouldCreate(root, ..)) if dry_run.enabled() => {
- writeln!(
- printer.stderr(),
- "{}",
- format!(
- "Would create script environment at: {}",
- root.user_display().bold()
- )
- .dimmed()
- )?;
- }
- _ => {}
+ let sync_report = SyncReport {
+ dry_run: dry_run.enabled(),
+ environment: EnvironmentReport::from(&environment),
+ action: SyncAction::from(&environment),
+ target: TargetName::from(&target),
+ };
+
+ // Show the intermediate results if relevant
+ if let Some(message) = sync_report.format(output_format) {
+ writeln!(printer.stderr(), "{message}")?;
}
// Special-case: we're syncing a script that doesn't have an associated lockfile. In that case,
@@ -340,7 +261,23 @@ pub(crate) async fn sync(
)
.await
{
- Ok(..) => return Ok(ExitStatus::Success),
+ Ok(..) => {
+ // Generate a report for the script without a lockfile
+ let report = Report {
+ schema: SchemaReport::default(),
+ target: TargetName::from(&target),
+ project: None,
+ script: Some(ScriptReport::from(script)),
+ sync: sync_report,
+ lock: None,
+ dry_run: dry_run.enabled(),
+ };
+ if let Some(output) = report.format(output_format) {
+ writeln!(printer.stdout(), "{output}")?;
+ }
+ return Ok(ExitStatus::Success);
+ }
+ // TODO(zanieb): We should respect `--output-format json` for the error case
Err(ProjectError::Operation(err)) => {
return diagnostics::OperationDiagnostic::native_tls(
network_settings.native_tls,
@@ -387,46 +324,7 @@ pub(crate) async fn sync(
.execute(lock_target)
.await
{
- Ok(result) => {
- if dry_run.enabled() {
- match result {
- LockResult::Unchanged(..) => {
- writeln!(
- printer.stderr(),
- "{}",
- format!(
- "Found up-to-date lockfile at: {}",
- lock_target.lock_path().user_display().bold()
- )
- .dimmed()
- )?;
- }
- LockResult::Changed(None, ..) => {
- writeln!(
- printer.stderr(),
- "{}",
- format!(
- "Would create lockfile at: {}",
- lock_target.lock_path().user_display().bold()
- )
- .dimmed()
- )?;
- }
- LockResult::Changed(Some(..), ..) => {
- writeln!(
- printer.stderr(),
- "{}",
- format!(
- "Would update lockfile at: {}",
- lock_target.lock_path().user_display().bold()
- )
- .dimmed()
- )?;
- }
- }
- }
- Outcome::Success(result.into_lock())
- }
+ Ok(result) => Outcome::Success(result),
Err(ProjectError::Operation(err)) => {
return diagnostics::OperationDiagnostic::native_tls(network_settings.native_tls)
.report(err)
@@ -440,6 +338,25 @@ pub(crate) async fn sync(
Err(err) => return Err(err.into()),
};
+ let lock_report = LockReport::from((&lock_target, &mode, &outcome));
+ if let Some(message) = lock_report.format(output_format) {
+ writeln!(printer.stderr(), "{message}")?;
+ }
+
+ let report = Report {
+ schema: SchemaReport::default(),
+ target: TargetName::from(&target),
+ project: target.project().map(ProjectReport::from),
+ script: target.script().map(ScriptReport::from),
+ sync: sync_report,
+ lock: Some(lock_report),
+ dry_run: dry_run.enabled(),
+ };
+
+ if let Some(output) = report.format(output_format) {
+ writeln!(printer.stdout(), "{output}")?;
+ }
+
// Identify the installation target.
let sync_target =
identify_installation_target(&target, outcome.lock(), all_packages, package.as_ref());
@@ -490,7 +407,7 @@ pub(crate) async fn sync(
#[allow(clippy::large_enum_variant)]
enum Outcome {
/// The `lock` operation was successful.
- Success(Lock),
+ Success(LockResult),
/// The `lock` operation successfully resolved, but failed due to a mismatch (e.g., with `--locked`).
LockMismatch(Box),
}
@@ -499,7 +416,7 @@ impl Outcome {
/// Return the [`Lock`] associated with this outcome.
fn lock(&self) -> &Lock {
match self {
- Self::Success(lock) => lock,
+ Self::Success(lock) => lock.lock(),
Self::LockMismatch(lock) => lock,
}
}
@@ -563,6 +480,22 @@ enum SyncTarget {
Script(Pep723Script),
}
+impl SyncTarget {
+ fn project(&self) -> Option<&VirtualProject> {
+ match self {
+ Self::Project(project) => Some(project),
+ Self::Script(_) => None,
+ }
+ }
+
+ fn script(&self) -> Option<&Pep723Script> {
+ match self {
+ Self::Project(_) => None,
+ Self::Script(script) => Some(script),
+ }
+ }
+}
+
#[derive(Debug)]
enum SyncEnvironment {
/// A Python environment for a project.
@@ -571,6 +504,15 @@ enum SyncEnvironment {
Script(ScriptEnvironment),
}
+impl SyncEnvironment {
+ fn dry_run_target(&self) -> Option<&Path> {
+ match self {
+ Self::Project(env) => env.dry_run_target(),
+ Self::Script(env) => env.dry_run_target(),
+ }
+ }
+}
+
impl Deref for SyncEnvironment {
type Target = PythonEnvironment;
@@ -892,3 +834,392 @@ fn store_credentials_from_target(target: InstallTarget<'_>) {
}
}
}
+
+#[derive(Debug, Serialize)]
+#[serde(rename_all = "snake_case")]
+struct WorkspaceReport {
+ /// The workspace directory path.
+ path: PortablePathBuf,
+}
+
+impl From<&Workspace> for WorkspaceReport {
+ fn from(workspace: &Workspace) -> Self {
+ Self {
+ path: workspace.install_path().as_path().into(),
+ }
+ }
+}
+#[derive(Debug, Serialize)]
+#[serde(rename_all = "snake_case")]
+struct ProjectReport {
+ //
+ path: PortablePathBuf,
+ workspace: WorkspaceReport,
+}
+
+impl From<&VirtualProject> for ProjectReport {
+ fn from(project: &VirtualProject) -> Self {
+ Self {
+ path: project.root().into(),
+ workspace: WorkspaceReport::from(project.workspace()),
+ }
+ }
+}
+
+impl From<&SyncTarget> for TargetName {
+ fn from(target: &SyncTarget) -> Self {
+ match target {
+ SyncTarget::Project(_) => TargetName::Project,
+ SyncTarget::Script(_) => TargetName::Script,
+ }
+ }
+}
+
+#[derive(Serialize, Debug)]
+struct ScriptReport {
+ /// The path to the script.
+ path: PortablePathBuf,
+}
+
+impl From<&Pep723Script> for ScriptReport {
+ fn from(script: &Pep723Script) -> Self {
+ Self {
+ path: script.path.as_path().into(),
+ }
+ }
+}
+
+#[derive(Serialize, Debug, Default)]
+#[serde(rename_all = "snake_case")]
+enum SchemaVersion {
+ /// An unstable, experimental schema.
+ #[default]
+ Preview,
+}
+
+#[derive(Serialize, Debug, Default)]
+struct SchemaReport {
+ /// The version of the schema.
+ version: SchemaVersion,
+}
+
+/// A report of the uv sync operation
+#[derive(Debug, Serialize)]
+#[serde(rename_all = "snake_case")]
+struct Report {
+ /// The schema of this report.
+ schema: SchemaReport,
+ /// The target of the sync operation, either a project or a script.
+ target: TargetName,
+ /// The report for a [`TargetName::Project`], if applicable.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ project: Option,
+ /// The report for a [`TargetName::Script`], if applicable.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ script: Option,
+ /// The report for the sync operation.
+ sync: SyncReport,
+ /// The report for the lock operation.
+ lock: Option,
+ /// Whether this is a dry run.
+ dry_run: bool,
+}
+
+/// The kind of target
+#[derive(Debug, Serialize, Clone, Copy)]
+#[serde(rename_all = "snake_case")]
+enum TargetName {
+ Project,
+ Script,
+}
+
+impl std::fmt::Display for TargetName {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ match self {
+ TargetName::Project => write!(f, "project"),
+ TargetName::Script => write!(f, "script"),
+ }
+ }
+}
+
+/// Represents the action taken during a sync.
+#[derive(Serialize, Debug)]
+#[serde(rename_all = "snake_case")]
+enum SyncAction {
+ /// The environment was checked and required no updates.
+ Check,
+ /// The environment was updated.
+ Update,
+ /// The environment was replaced.
+ Replace,
+ /// A new environment was created.
+ Create,
+}
+
+impl From<&SyncEnvironment> for SyncAction {
+ fn from(env: &SyncEnvironment) -> Self {
+ match &env {
+ SyncEnvironment::Project(ProjectEnvironment::Existing(..)) => SyncAction::Check,
+ SyncEnvironment::Project(ProjectEnvironment::Created(..)) => SyncAction::Create,
+ SyncEnvironment::Project(ProjectEnvironment::WouldCreate(..)) => SyncAction::Create,
+ SyncEnvironment::Project(ProjectEnvironment::WouldReplace(..)) => SyncAction::Replace,
+ SyncEnvironment::Project(ProjectEnvironment::Replaced(..)) => SyncAction::Update,
+ SyncEnvironment::Script(ScriptEnvironment::Existing(..)) => SyncAction::Check,
+ SyncEnvironment::Script(ScriptEnvironment::Created(..)) => SyncAction::Create,
+ SyncEnvironment::Script(ScriptEnvironment::WouldCreate(..)) => SyncAction::Create,
+ SyncEnvironment::Script(ScriptEnvironment::WouldReplace(..)) => SyncAction::Replace,
+ SyncEnvironment::Script(ScriptEnvironment::Replaced(..)) => SyncAction::Update,
+ }
+ }
+}
+
+impl SyncAction {
+ fn message(&self, target: TargetName, dry_run: bool) -> Option<&'static str> {
+ let message = if dry_run {
+ match self {
+ SyncAction::Check => "Would use",
+ SyncAction::Update => "Would update",
+ SyncAction::Replace => "Would replace",
+ SyncAction::Create => "Would create",
+ }
+ } else {
+ // For projects, we omit some of these messages when we're not in dry-run mode
+ let is_project = matches!(target, TargetName::Project);
+ match self {
+ SyncAction::Check | SyncAction::Update | SyncAction::Create if is_project => {
+ return None;
+ }
+ SyncAction::Check => "Using",
+ SyncAction::Update => "Updating",
+ SyncAction::Replace => "Replacing",
+ SyncAction::Create => "Creating",
+ }
+ };
+ Some(message)
+ }
+}
+
+/// Represents the action taken during a lock.
+#[derive(Serialize, Debug)]
+#[serde(rename_all = "snake_case")]
+enum LockAction {
+ /// The lockfile was used without checking.
+ Use,
+ /// The lockfile was checked and required no updates.
+ Check,
+ /// The lockfile was updated.
+ Update,
+ /// A new lockfile was created.
+ Create,
+}
+
+impl LockAction {
+ fn message(&self, dry_run: bool) -> Option<&'static str> {
+ let message = if dry_run {
+ match self {
+ LockAction::Use => return None,
+ LockAction::Check => "Found up-to-date",
+ LockAction::Update => "Would update",
+ LockAction::Create => "Would create",
+ }
+ } else {
+ return None;
+ };
+ Some(message)
+ }
+}
+
+#[derive(Serialize, Debug)]
+struct PythonReport {
+ path: PortablePathBuf,
+ version: uv_pep508::StringVersion,
+ implementation: String,
+}
+
+impl From<&uv_python::Interpreter> for PythonReport {
+ fn from(interpreter: &uv_python::Interpreter) -> Self {
+ Self {
+ path: interpreter.sys_executable().into(),
+ version: interpreter.python_full_version().clone(),
+ implementation: interpreter.implementation_name().to_string(),
+ }
+ }
+}
+
+impl PythonReport {
+ /// Set the path for this Python report.
+ #[must_use]
+ fn with_path(mut self, path: PortablePathBuf) -> Self {
+ self.path = path;
+ self
+ }
+}
+
+#[derive(Serialize, Debug)]
+struct EnvironmentReport {
+ /// The path to the environment.
+ path: PortablePathBuf,
+ /// The Python interpreter for the environment.
+ python: PythonReport,
+}
+
+impl From<&PythonEnvironment> for EnvironmentReport {
+ fn from(env: &PythonEnvironment) -> Self {
+ Self {
+ python: PythonReport::from(env.interpreter()),
+ path: env.root().into(),
+ }
+ }
+}
+
+impl From<&SyncEnvironment> for EnvironmentReport {
+ fn from(env: &SyncEnvironment) -> Self {
+ let report = EnvironmentReport::from(&**env);
+ // Replace the path if necessary; we construct a temporary virtual environment during dry
+ // run invocations and want to report the path we _would_ use.
+ if let Some(path) = env.dry_run_target() {
+ report.with_path(path.into())
+ } else {
+ report
+ }
+ }
+}
+
+impl EnvironmentReport {
+ /// Set the path for this environment report.
+ #[must_use]
+ fn with_path(mut self, path: PortablePathBuf) -> Self {
+ let python_path = &self.python.path;
+ if let Ok(python_path) = python_path.as_ref().strip_prefix(self.path) {
+ let new_path = path.as_ref().to_path_buf().join(python_path);
+ self.python = self.python.with_path(new_path.as_path().into());
+ }
+ self.path = path;
+ self
+ }
+}
+
+/// The report for a sync operation.
+#[derive(Serialize, Debug)]
+struct SyncReport {
+ /// The environment.
+ environment: EnvironmentReport,
+ /// The action performed during the sync, e.g., what was done to the environment.
+ action: SyncAction,
+
+ // We store these fields so the report can format itself self-contained, but the outer
+ // [`Report`] is intended to include these in user-facing output
+ #[serde(skip)]
+ dry_run: bool,
+ #[serde(skip)]
+ target: TargetName,
+}
+
+impl SyncReport {
+ fn format(&self, output_format: SyncFormat) -> Option {
+ match output_format {
+ // This is an intermediate report, when using JSON, it's only rendered at the end
+ SyncFormat::Json => None,
+ SyncFormat::Text => self.to_human_readable_string(),
+ }
+ }
+
+ fn to_human_readable_string(&self) -> Option {
+ let Self {
+ environment,
+ action,
+ dry_run,
+ target,
+ } = self;
+
+ let action = action.message(*target, *dry_run)?;
+
+ let message = format!(
+ "{action} {target} environment at: {path}",
+ path = environment.path.user_display().cyan(),
+ );
+ if *dry_run {
+ return Some(message.dimmed().to_string());
+ }
+
+ Some(message)
+ }
+}
+
+/// The report for a lock operation.
+#[derive(Debug, Serialize)]
+struct LockReport {
+ /// The path to the lockfile
+ path: PortablePathBuf,
+ /// Whether the lockfile was preserved, created, or updated.
+ action: LockAction,
+
+ // We store this field so the report can format itself self-contained, but the outer
+ // [`Report`] is intended to include this in user-facing output
+ #[serde(skip)]
+ dry_run: bool,
+}
+
+impl From<(&LockTarget<'_>, &LockMode<'_>, &Outcome)> for LockReport {
+ fn from((target, mode, outcome): (&LockTarget, &LockMode, &Outcome)) -> Self {
+ Self {
+ path: target.lock_path().deref().into(),
+ action: match outcome {
+ Outcome::Success(result) => {
+ match result {
+ LockResult::Unchanged(..) => match mode {
+ // When `--frozen` is used, we don't check the lockfile
+ LockMode::Frozen => LockAction::Use,
+ LockMode::DryRun(_) | LockMode::Locked(_) | LockMode::Write(_) => {
+ LockAction::Check
+ }
+ },
+ LockResult::Changed(None, ..) => LockAction::Create,
+ LockResult::Changed(Some(_), ..) => LockAction::Update,
+ }
+ }
+ // TODO(zanieb): We don't have a way to report the outcome of the lock yet
+ Outcome::LockMismatch(_) => LockAction::Check,
+ },
+ dry_run: matches!(mode, LockMode::DryRun(_)),
+ }
+ }
+}
+
+impl LockReport {
+ fn format(&self, output_format: SyncFormat) -> Option {
+ match output_format {
+ SyncFormat::Json => None,
+ SyncFormat::Text => self.to_human_readable_string(),
+ }
+ }
+
+ fn to_human_readable_string(&self) -> Option {
+ let Self {
+ path,
+ action,
+ dry_run,
+ } = self;
+
+ let action = action.message(*dry_run)?;
+
+ let message = format!(
+ "{action} lockfile at: {path}",
+ path = path.user_display().cyan(),
+ );
+ if *dry_run {
+ return Some(message.dimmed().to_string());
+ }
+
+ Some(message)
+ }
+}
+
+impl Report {
+ fn format(&self, output_format: SyncFormat) -> Option {
+ match output_format {
+ SyncFormat::Json => serde_json::to_string_pretty(self).ok(),
+ SyncFormat::Text => None,
+ }
+ }
+}
diff --git a/crates/uv/src/lib.rs b/crates/uv/src/lib.rs
index 2a163d32c..0b4d0bb82 100644
--- a/crates/uv/src/lib.rs
+++ b/crates/uv/src/lib.rs
@@ -1818,6 +1818,7 @@ async fn run_project(
&cache,
printer,
globals.preview,
+ args.output_format,
))
.await
}
diff --git a/crates/uv/src/settings.rs b/crates/uv/src/settings.rs
index f89704d45..8a325d538 100644
--- a/crates/uv/src/settings.rs
+++ b/crates/uv/src/settings.rs
@@ -11,8 +11,8 @@ use uv_cli::{
PipCheckArgs, PipCompileArgs, PipFreezeArgs, PipInstallArgs, PipListArgs, PipShowArgs,
PipSyncArgs, PipTreeArgs, PipUninstallArgs, PythonFindArgs, PythonInstallArgs, PythonListArgs,
PythonListFormat, PythonPinArgs, PythonUninstallArgs, PythonUpgradeArgs, RemoveArgs, RunArgs,
- SyncArgs, ToolDirArgs, ToolInstallArgs, ToolListArgs, ToolRunArgs, ToolUninstallArgs, TreeArgs,
- VenvArgs, VersionArgs, VersionBump, VersionFormat,
+ SyncArgs, SyncFormat, ToolDirArgs, ToolInstallArgs, ToolListArgs, ToolRunArgs,
+ ToolUninstallArgs, TreeArgs, VenvArgs, VersionArgs, VersionBump, VersionFormat,
};
use uv_cli::{
AuthorFrom, BuildArgs, ExportArgs, PublishArgs, PythonDirArgs, ResolverInstallerArgs,
@@ -1154,6 +1154,7 @@ pub(crate) struct SyncSettings {
pub(crate) install_mirrors: PythonInstallMirrors,
pub(crate) refresh: Refresh,
pub(crate) settings: ResolverInstallerSettings,
+ pub(crate) output_format: SyncFormat,
}
impl SyncSettings {
@@ -1194,6 +1195,7 @@ impl SyncSettings {
python_platform,
check,
no_check,
+ output_format,
} = args;
let install_mirrors = filesystem
.clone()
@@ -1213,6 +1215,7 @@ impl SyncSettings {
};
Self {
+ output_format,
locked,
frozen,
dry_run,
diff --git a/crates/uv/tests/it/common/mod.rs b/crates/uv/tests/it/common/mod.rs
index 90f436f6f..2dc72fa1d 100644
--- a/crates/uv/tests/it/common/mod.rs
+++ b/crates/uv/tests/it/common/mod.rs
@@ -210,7 +210,7 @@ impl TestContext {
pub fn with_filtered_python_names(mut self) -> Self {
if cfg!(windows) {
self.filters
- .push(("python.exe".to_string(), "python".to_string()));
+ .push((r"python\.exe".to_string(), "python".to_string()));
} else {
self.filters
.push((r"python\d.\d\d".to_string(), "python".to_string()));
diff --git a/crates/uv/tests/it/sync.rs b/crates/uv/tests/it/sync.rs
index d4479296a..7063035f9 100644
--- a/crates/uv/tests/it/sync.rs
+++ b/crates/uv/tests/it/sync.rs
@@ -27,7 +27,7 @@ fn sync() -> Result<()> {
)?;
// Running `uv sync` should generate a lockfile.
- uv_snapshot!(context.filters(), context.sync(), @r###"
+ uv_snapshot!(context.filters(), context.sync(), @r"
success: true
exit_code: 0
----- stdout -----
@@ -37,7 +37,7 @@ fn sync() -> Result<()> {
Prepared 1 package in [TIME]
Installed 1 package in [TIME]
+ iniconfig==2.0.0
- "###);
+ ");
assert!(context.temp_dir.child("uv.lock").exists());
@@ -60,14 +60,14 @@ fn locked() -> Result<()> {
)?;
// Running with `--locked` should error, if no lockfile is present.
- uv_snapshot!(context.filters(), context.sync().arg("--locked"), @r###"
+ uv_snapshot!(context.filters(), context.sync().arg("--locked"), @r"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: Unable to find lockfile at `uv.lock`. To create a lockfile, run `uv lock` or `uv sync`.
- "###);
+ ");
// Lock the initial requirements.
context.lock().assert().success();
@@ -86,7 +86,7 @@ fn locked() -> Result<()> {
)?;
// Running with `--locked` should error.
- uv_snapshot!(context.filters(), context.sync().arg("--locked"), @r###"
+ uv_snapshot!(context.filters(), context.sync().arg("--locked"), @r"
success: false
exit_code: 2
----- stdout -----
@@ -94,7 +94,7 @@ fn locked() -> Result<()> {
----- stderr -----
Resolved 2 packages in [TIME]
error: The lockfile at `uv.lock` needs to be updated, but `--locked` was provided. To update the lockfile, run `uv lock`.
- "###);
+ ");
let updated = context.read("uv.lock");
@@ -120,14 +120,14 @@ fn frozen() -> Result<()> {
)?;
// Running with `--frozen` should error, if no lockfile is present.
- uv_snapshot!(context.filters(), context.sync().arg("--frozen"), @r###"
+ uv_snapshot!(context.filters(), context.sync().arg("--frozen"), @r"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: Unable to find lockfile at `uv.lock`. To create a lockfile, run `uv lock` or `uv sync`.
- "###);
+ ");
context.lock().assert().success();
@@ -143,7 +143,7 @@ fn frozen() -> Result<()> {
)?;
// Running with `--frozen` should install the stale lockfile.
- uv_snapshot!(context.filters(), context.sync().arg("--frozen"), @r###"
+ uv_snapshot!(context.filters(), context.sync().arg("--frozen"), @r"
success: true
exit_code: 0
----- stdout -----
@@ -154,7 +154,7 @@ fn frozen() -> Result<()> {
+ anyio==3.7.0
+ idna==3.6
+ sniffio==1.3.1
- "###);
+ ");
Ok(())
}
@@ -172,7 +172,7 @@ fn empty() -> Result<()> {
)?;
// Running `uv sync` should generate an empty lockfile.
- uv_snapshot!(context.filters(), context.sync(), @r###"
+ uv_snapshot!(context.filters(), context.sync(), @r"
success: true
exit_code: 0
----- stdout -----
@@ -181,12 +181,12 @@ fn empty() -> Result<()> {
warning: No `requires-python` value found in the workspace. Defaulting to `>=3.12`.
Resolved in [TIME]
Audited in [TIME]
- "###);
+ ");
assert!(context.temp_dir.child("uv.lock").exists());
// Running `uv sync` again should succeed.
- uv_snapshot!(context.filters(), context.sync(), @r###"
+ uv_snapshot!(context.filters(), context.sync(), @r"
success: true
exit_code: 0
----- stdout -----
@@ -195,7 +195,7 @@ fn empty() -> Result<()> {
warning: No `requires-python` value found in the workspace. Defaulting to `>=3.12`.
Resolved in [TIME]
Audited in [TIME]
- "###);
+ ");
Ok(())
}
@@ -252,7 +252,7 @@ fn package() -> Result<()> {
let init = src.child("__init__.py");
init.touch()?;
- uv_snapshot!(context.filters(), context.sync().arg("--package").arg("child"), @r###"
+ uv_snapshot!(context.filters(), context.sync().arg("--package").arg("child"), @r"
success: true
exit_code: 0
----- stdout -----
@@ -263,7 +263,239 @@ fn package() -> Result<()> {
Installed 2 packages in [TIME]
+ child==0.1.0 (from file://[TEMP_DIR]/child)
+ iniconfig==2.0.0
- "###);
+ ");
+
+ Ok(())
+}
+
+/// Test json output
+#[test]
+fn sync_json() -> Result<()> {
+ let context = TestContext::new("3.12")
+ .with_filtered_python_names()
+ .with_filtered_virtualenv_bin();
+
+ let pyproject_toml = context.temp_dir.child("pyproject.toml");
+ pyproject_toml.write_str(
+ r#"
+ [project]
+ name = "project"
+ version = "0.1.0"
+ requires-python = ">=3.12"
+ dependencies = ["iniconfig"]
+ "#,
+ )?;
+
+ uv_snapshot!(context.filters(), context.sync()
+ .arg("--output-format").arg("json"), @r#"
+ success: true
+ exit_code: 0
+ ----- stdout -----
+ {
+ "schema": {
+ "version": "preview"
+ },
+ "target": "project",
+ "project": {
+ "path": "[TEMP_DIR]/",
+ "workspace": {
+ "path": "[TEMP_DIR]/"
+ }
+ },
+ "sync": {
+ "environment": {
+ "path": "[VENV]/",
+ "python": {
+ "path": "[VENV]/[BIN]/python",
+ "version": "3.12.[X]",
+ "implementation": "cpython"
+ }
+ },
+ "action": "check"
+ },
+ "lock": {
+ "path": "[TEMP_DIR]/uv.lock",
+ "action": "create"
+ },
+ "dry_run": false
+ }
+
+ ----- stderr -----
+ Resolved 2 packages in [TIME]
+ Prepared 1 package in [TIME]
+ Installed 1 package in [TIME]
+ + iniconfig==2.0.0
+ "#);
+
+ assert!(context.temp_dir.child("uv.lock").exists());
+
+ uv_snapshot!(context.filters(), context.sync()
+ .arg("--frozen")
+ .arg("--output-format").arg("json"), @r#"
+ success: true
+ exit_code: 0
+ ----- stdout -----
+ {
+ "schema": {
+ "version": "preview"
+ },
+ "target": "project",
+ "project": {
+ "path": "[TEMP_DIR]/",
+ "workspace": {
+ "path": "[TEMP_DIR]/"
+ }
+ },
+ "sync": {
+ "environment": {
+ "path": "[VENV]/",
+ "python": {
+ "path": "[VENV]/[BIN]/python",
+ "version": "3.12.[X]",
+ "implementation": "cpython"
+ }
+ },
+ "action": "check"
+ },
+ "lock": {
+ "path": "[TEMP_DIR]/uv.lock",
+ "action": "use"
+ },
+ "dry_run": false
+ }
+
+ ----- stderr -----
+ Audited 1 package in [TIME]
+ "#);
+
+ uv_snapshot!(context.filters(), context.sync()
+ .arg("--locked")
+ .arg("--output-format").arg("json"), @r#"
+ success: true
+ exit_code: 0
+ ----- stdout -----
+ {
+ "schema": {
+ "version": "preview"
+ },
+ "target": "project",
+ "project": {
+ "path": "[TEMP_DIR]/",
+ "workspace": {
+ "path": "[TEMP_DIR]/"
+ }
+ },
+ "sync": {
+ "environment": {
+ "path": "[VENV]/",
+ "python": {
+ "path": "[VENV]/[BIN]/python",
+ "version": "3.12.[X]",
+ "implementation": "cpython"
+ }
+ },
+ "action": "check"
+ },
+ "lock": {
+ "path": "[TEMP_DIR]/uv.lock",
+ "action": "check"
+ },
+ "dry_run": false
+ }
+
+ ----- stderr -----
+ Resolved 2 packages in [TIME]
+ Audited 1 package in [TIME]
+ "#);
+
+ // Invalidate the lockfile by changing the requirements.
+ let pyproject_toml = context.temp_dir.child("pyproject.toml");
+ pyproject_toml.write_str(
+ r#"
+ [project]
+ name = "project"
+ version = "0.1.0"
+ requires-python = ">=3.12"
+ dependencies = ["iniconfig<2"]
+ "#,
+ )?;
+
+ uv_snapshot!(context.filters(), context.sync()
+ .arg("--locked")
+ .arg("--output-format").arg("json"), @r"
+ success: false
+ exit_code: 2
+ ----- stdout -----
+
+ ----- stderr -----
+ Resolved 2 packages in [TIME]
+ error: The lockfile at `uv.lock` needs to be updated, but `--locked` was provided. To update the lockfile, run `uv lock`.
+ ");
+
+ Ok(())
+}
+
+/// Test --dry json output
+#[test]
+fn sync_dry_json() -> Result<()> {
+ let context = TestContext::new_with_versions(&["3.12"])
+ .with_filtered_python_names()
+ .with_filtered_virtualenv_bin();
+
+ let pyproject_toml = context.temp_dir.child("pyproject.toml");
+ pyproject_toml.write_str(
+ r#"
+ [project]
+ name = "project"
+ version = "0.1.0"
+ requires-python = ">=3.12"
+ dependencies = ["iniconfig"]
+ "#,
+ )?;
+
+ // Running `uv sync` should report intent to create the environment and lockfile
+ uv_snapshot!(context.filters(), context.sync()
+ .arg("--output-format").arg("json")
+ .arg("--dry-run"), @r#"
+ success: true
+ exit_code: 0
+ ----- stdout -----
+ {
+ "schema": {
+ "version": "preview"
+ },
+ "target": "project",
+ "project": {
+ "path": "[TEMP_DIR]/",
+ "workspace": {
+ "path": "[TEMP_DIR]/"
+ }
+ },
+ "sync": {
+ "environment": {
+ "path": "[VENV]/",
+ "python": {
+ "path": "[VENV]/[BIN]/python",
+ "version": "3.12.[X]",
+ "implementation": "cpython"
+ }
+ },
+ "action": "create"
+ },
+ "lock": {
+ "path": "[TEMP_DIR]/uv.lock",
+ "action": "create"
+ },
+ "dry_run": true
+ }
+
+ ----- stderr -----
+ Using CPython 3.12.[X] interpreter at: [PYTHON-3.12]
+ Resolved 2 packages in [TIME]
+ Would download 1 package
+ Would install 1 package
+ + iniconfig==2.0.0
+ "#);
Ok(())
}
@@ -322,7 +554,7 @@ fn mixed_requires_python() -> Result<()> {
)?;
// Running `uv sync` should succeed, locking for Python 3.12.
- uv_snapshot!(context.filters(), context.sync().arg("-p").arg("3.12"), @r###"
+ uv_snapshot!(context.filters(), context.sync().arg("-p").arg("3.12"), @r"
success: true
exit_code: 0
----- stdout -----
@@ -337,7 +569,7 @@ fn mixed_requires_python() -> Result<()> {
+ bird-feeder==0.1.0 (from file://[TEMP_DIR]/packages/bird-feeder)
+ idna==3.6
+ sniffio==1.3.1
- "###);
+ ");
// Running `uv sync` again should fail.
uv_snapshot!(context.filters(), context.sync().arg("-p").arg("3.9"), @r"
@@ -660,23 +892,23 @@ fn check() -> Result<()> {
)?;
// Running `uv sync --check` should fail.
- uv_snapshot!(context.filters(), context.sync().arg("--check"), @r###"
+ uv_snapshot!(context.filters(), context.sync().arg("--check"), @r"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
- Discovered existing environment at: .venv
+ Would use project environment at: .venv
Resolved 2 packages in [TIME]
Would create lockfile at: uv.lock
Would download 1 package
Would install 1 package
+ iniconfig==2.0.0
error: The environment is outdated; run `uv sync` to update the environment
- "###);
+ ");
// Sync the environment.
- uv_snapshot!(context.filters(), context.sync(), @r###"
+ uv_snapshot!(context.filters(), context.sync(), @r"
success: true
exit_code: 0
----- stdout -----
@@ -686,23 +918,23 @@ fn check() -> Result<()> {
Prepared 1 package in [TIME]
Installed 1 package in [TIME]
+ iniconfig==2.0.0
- "###);
+ ");
assert!(context.temp_dir.child("uv.lock").exists());
// Running `uv sync --check` should pass now that the environment is up to date.
- uv_snapshot!(context.filters(), context.sync().arg("--check"), @r###"
+ uv_snapshot!(context.filters(), context.sync().arg("--check"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
- Discovered existing environment at: .venv
+ Would use project environment at: .venv
Resolved 2 packages in [TIME]
Found up-to-date lockfile at: uv.lock
Audited 1 package in [TIME]
Would make no changes
- "###);
+ ");
Ok(())
}
@@ -750,7 +982,7 @@ fn sync_legacy_non_project_dev_dependencies() -> Result<()> {
.touch()?;
// Syncing with `--no-dev` should omit all dependencies except `iniconfig`.
- uv_snapshot!(context.filters(), context.sync().arg("--no-dev"), @r###"
+ uv_snapshot!(context.filters(), context.sync().arg("--no-dev"), @r"
success: true
exit_code: 0
----- stdout -----
@@ -761,11 +993,11 @@ fn sync_legacy_non_project_dev_dependencies() -> Result<()> {
Installed 2 packages in [TIME]
+ child==0.1.0 (from file://[TEMP_DIR]/child)
+ iniconfig==2.0.0
- "###);
+ ");
// Syncing without `--no-dev` should include `anyio`, `requests`, `pysocks`, and their
// dependencies, but not `typing-extensions`.
- uv_snapshot!(context.filters(), context.sync(), @r###"
+ uv_snapshot!(context.filters(), context.sync(), @r"
success: true
exit_code: 0
----- stdout -----
@@ -782,7 +1014,7 @@ fn sync_legacy_non_project_dev_dependencies() -> Result<()> {
+ requests==2.31.0
+ sniffio==1.3.1
+ urllib3==2.2.1
- "###);
+ ");
Ok(())
}
@@ -830,7 +1062,7 @@ fn sync_legacy_non_project_frozen() -> Result<()> {
context.lock().assert().success();
- uv_snapshot!(context.filters(), context.sync().arg("--frozen").arg("--package").arg("foo"), @r###"
+ uv_snapshot!(context.filters(), context.sync().arg("--frozen").arg("--package").arg("foo"), @r"
success: true
exit_code: 0
----- stdout -----
@@ -839,9 +1071,9 @@ fn sync_legacy_non_project_frozen() -> Result<()> {
Prepared 1 package in [TIME]
Installed 1 package in [TIME]
+ iniconfig==2.0.0
- "###);
+ ");
- uv_snapshot!(context.filters(), context.sync().arg("--frozen"), @r###"
+ uv_snapshot!(context.filters(), context.sync().arg("--frozen"), @r"
success: true
exit_code: 0
----- stdout -----
@@ -850,7 +1082,7 @@ fn sync_legacy_non_project_frozen() -> Result<()> {
Prepared 1 package in [TIME]
Installed 1 package in [TIME]
+ typing-extensions==4.10.0
- "###);
+ ");
Ok(())
}
@@ -903,7 +1135,7 @@ fn sync_legacy_non_project_group() -> Result<()> {
.child("__init__.py")
.touch()?;
- uv_snapshot!(context.filters(), context.sync(), @r###"
+ uv_snapshot!(context.filters(), context.sync(), @r"
success: true
exit_code: 0
----- stdout -----
@@ -914,9 +1146,9 @@ fn sync_legacy_non_project_group() -> Result<()> {
Installed 2 packages in [TIME]
+ child==0.1.0 (from file://[TEMP_DIR]/child)
+ iniconfig==2.0.0
- "###);
+ ");
- uv_snapshot!(context.filters(), context.sync().arg("--group").arg("foo"), @r###"
+ uv_snapshot!(context.filters(), context.sync().arg("--group").arg("foo"), @r"
success: true
exit_code: 0
----- stdout -----
@@ -928,9 +1160,9 @@ fn sync_legacy_non_project_group() -> Result<()> {
+ anyio==4.3.0
+ idna==3.6
+ sniffio==1.3.1
- "###);
+ ");
- uv_snapshot!(context.filters(), context.sync().arg("--only-group").arg("bar"), @r###"
+ uv_snapshot!(context.filters(), context.sync().arg("--only-group").arg("bar"), @r"
success: true
exit_code: 0
----- stdout -----
@@ -946,9 +1178,9 @@ fn sync_legacy_non_project_group() -> Result<()> {
- iniconfig==2.0.0
- sniffio==1.3.1
+ typing-extensions==4.10.0
- "###);
+ ");
- uv_snapshot!(context.filters(), context.sync().arg("--group").arg("baz"), @r###"
+ uv_snapshot!(context.filters(), context.sync().arg("--group").arg("baz"), @r"
success: true
exit_code: 0
----- stdout -----
@@ -958,9 +1190,9 @@ fn sync_legacy_non_project_group() -> Result<()> {
Installed 2 packages in [TIME]
+ child==0.1.0 (from file://[TEMP_DIR]/child)
+ iniconfig==2.0.0
- "###);
+ ");
- uv_snapshot!(context.filters(), context.sync().arg("--group").arg("bop"), @r###"
+ uv_snapshot!(context.filters(), context.sync().arg("--group").arg("bop"), @r"
success: false
exit_code: 2
----- stdout -----
@@ -968,7 +1200,7 @@ fn sync_legacy_non_project_group() -> Result<()> {
----- stderr -----
Resolved 6 packages in [TIME]
error: Group `bop` is not defined in any project's `dependency-groups` table
- "###);
+ ");
Ok(())
}
@@ -993,7 +1225,7 @@ fn sync_legacy_non_project_frozen_modification() -> Result<()> {
context.lock().assert().success();
- uv_snapshot!(context.filters(), context.sync().arg("--frozen").arg("--group").arg("async"), @r###"
+ uv_snapshot!(context.filters(), context.sync().arg("--frozen").arg("--group").arg("async"), @r"
success: true
exit_code: 0
----- stdout -----
@@ -1004,7 +1236,7 @@ fn sync_legacy_non_project_frozen_modification() -> Result<()> {
+ anyio==4.3.0
+ idna==3.6
+ sniffio==1.3.1
- "###);
+ ");
// Modify the "live" dependency groups.
pyproject_toml.write_str(
@@ -1018,14 +1250,14 @@ fn sync_legacy_non_project_frozen_modification() -> Result<()> {
)?;
// This should succeed.
- uv_snapshot!(context.filters(), context.sync().arg("--frozen").arg("--group").arg("async"), @r###"
+ uv_snapshot!(context.filters(), context.sync().arg("--frozen").arg("--group").arg("async"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Audited 3 packages in [TIME]
- "###);
+ ");
Ok(())
}
@@ -1074,7 +1306,7 @@ fn sync_build_isolation() -> Result<()> {
"###);
// Running `uv sync` should succeed.
- uv_snapshot!(context.filters(), context.sync().arg("--no-build-isolation"), @r###"
+ uv_snapshot!(context.filters(), context.sync().arg("--no-build-isolation"), @r"
success: true
exit_code: 0
----- stdout -----
@@ -1092,7 +1324,7 @@ fn sync_build_isolation() -> Result<()> {
+ source-distribution==0.0.1 (from https://files.pythonhosted.org/packages/10/1f/57aa4cce1b1abf6b433106676e15f9fa2c92ed2bd4cf77c3b50a9e9ac773/source_distribution-0.0.1.tar.gz)
- trove-classifiers==2024.3.3
- wheel==0.43.0
- "###);
+ ");
assert!(context.temp_dir.child("uv.lock").exists());
@@ -1122,7 +1354,7 @@ fn sync_build_isolation_package() -> Result<()> {
)?;
// Running `uv sync` should fail for iniconfig.
- uv_snapshot!(context.filters(), context.sync().arg("--no-build-isolation-package").arg("source-distribution"), @r###"
+ uv_snapshot!(context.filters(), context.sync().arg("--no-build-isolation-package").arg("source-distribution"), @r#"
success: false
exit_code: 1
----- stdout -----
@@ -1140,7 +1372,7 @@ fn sync_build_isolation_package() -> Result<()> {
hint: This usually indicates a problem with the package or the build environment.
help: `source-distribution` was included because `project` (v0.1.0) depends on `source-distribution`
- "###);
+ "#);
// Install `hatchling` for `source-distribution`.
uv_snapshot!(context.filters(), context.pip_install().arg("hatchling"), @r###"
@@ -1160,7 +1392,7 @@ fn sync_build_isolation_package() -> Result<()> {
"###);
// Running `uv sync` should succeed.
- uv_snapshot!(context.filters(), context.sync().arg("--no-build-isolation-package").arg("source-distribution"), @r###"
+ uv_snapshot!(context.filters(), context.sync().arg("--no-build-isolation-package").arg("source-distribution"), @r"
success: true
exit_code: 0
----- stdout -----
@@ -1177,7 +1409,7 @@ fn sync_build_isolation_package() -> Result<()> {
+ project==0.1.0 (from file://[TEMP_DIR]/)
+ source-distribution==0.0.1 (from https://files.pythonhosted.org/packages/10/1f/57aa4cce1b1abf6b433106676e15f9fa2c92ed2bd4cf77c3b50a9e9ac773/source_distribution-0.0.1.tar.gz)
- trove-classifiers==2024.3.3
- "###);
+ ");
assert!(context.temp_dir.child("uv.lock").exists());
@@ -1212,7 +1444,7 @@ fn sync_build_isolation_extra() -> Result<()> {
)?;
// Running `uv sync` should fail for the `compile` extra.
- uv_snapshot!(context.filters(), context.sync().arg("--extra").arg("compile"), @r###"
+ uv_snapshot!(context.filters(), context.sync().arg("--extra").arg("compile"), @r#"
success: false
exit_code: 1
----- stdout -----
@@ -1230,10 +1462,10 @@ fn sync_build_isolation_extra() -> Result<()> {
hint: This usually indicates a problem with the package or the build environment.
help: `source-distribution` was included because `project[compile]` (v0.1.0) depends on `source-distribution`
- "###);
+ "#);
// Running `uv sync` with `--all-extras` should also fail.
- uv_snapshot!(context.filters(), context.sync().arg("--all-extras"), @r###"
+ uv_snapshot!(context.filters(), context.sync().arg("--all-extras"), @r#"
success: false
exit_code: 1
----- stdout -----
@@ -1251,10 +1483,10 @@ fn sync_build_isolation_extra() -> Result<()> {
hint: This usually indicates a problem with the package or the build environment.
help: `source-distribution` was included because `project[compile]` (v0.1.0) depends on `source-distribution`
- "###);
+ "#);
// Install the build dependencies.
- uv_snapshot!(context.filters(), context.sync().arg("--extra").arg("build"), @r###"
+ uv_snapshot!(context.filters(), context.sync().arg("--extra").arg("build"), @r"
success: true
exit_code: 0
----- stdout -----
@@ -1269,10 +1501,10 @@ fn sync_build_isolation_extra() -> Result<()> {
+ pluggy==1.4.0
+ project==0.1.0 (from file://[TEMP_DIR]/)
+ trove-classifiers==2024.3.3
- "###);
+ ");
// Running `uv sync` for the `compile` extra should succeed, and remove the build dependencies.
- uv_snapshot!(context.filters(), context.sync().arg("--extra").arg("compile"), @r###"
+ uv_snapshot!(context.filters(), context.sync().arg("--extra").arg("compile"), @r"
success: true
exit_code: 0
----- stdout -----
@@ -1288,7 +1520,7 @@ fn sync_build_isolation_extra() -> Result<()> {
- pluggy==1.4.0
+ source-distribution==0.0.1 (from https://files.pythonhosted.org/packages/10/1f/57aa4cce1b1abf6b433106676e15f9fa2c92ed2bd4cf77c3b50a9e9ac773/source_distribution-0.0.1.tar.gz)
- trove-classifiers==2024.3.3
- "###);
+ ");
assert!(context.temp_dir.child("uv.lock").exists());
@@ -1342,7 +1574,7 @@ fn sync_reset_state() -> Result<()> {
init.touch()?;
// Running `uv sync` should succeed.
- uv_snapshot!(context.filters(), context.sync(), @r###"
+ uv_snapshot!(context.filters(), context.sync(), @r"
success: true
exit_code: 0
----- stdout -----
@@ -1354,7 +1586,7 @@ fn sync_reset_state() -> Result<()> {
+ project==0.1.0 (from file://[TEMP_DIR]/)
+ pydantic-core==2.17.0
+ typing-extensions==4.10.0
- "###);
+ ");
assert!(context.temp_dir.child("uv.lock").exists());
@@ -1396,7 +1628,7 @@ fn sync_relative_wheel() -> Result<()> {
context.temp_dir.join("wheels/ok-1.0.0-py3-none-any.whl"),
)?;
- uv_snapshot!(context.filters(), context.sync(), @r###"
+ uv_snapshot!(context.filters(), context.sync(), @r"
success: true
exit_code: 0
----- stdout -----
@@ -1407,7 +1639,7 @@ fn sync_relative_wheel() -> Result<()> {
Installed 2 packages in [TIME]
+ ok==1.0.0 (from file://[TEMP_DIR]/wheels/ok-1.0.0-py3-none-any.whl)
+ relative-wheel==0.1.0 (from file://[TEMP_DIR]/)
- "###);
+ ");
let lock = context.read("uv.lock");
@@ -1449,7 +1681,7 @@ fn sync_relative_wheel() -> Result<()> {
);
// Check that we can re-read the lockfile.
- uv_snapshot!(context.filters(), context.sync(), @r###"
+ uv_snapshot!(context.filters(), context.sync(), @r"
success: true
exit_code: 0
----- stdout -----
@@ -1457,7 +1689,7 @@ fn sync_relative_wheel() -> Result<()> {
----- stderr -----
Resolved 2 packages in [TIME]
Audited 2 packages in [TIME]
- "###);
+ ");
Ok(())
}
@@ -1481,7 +1713,7 @@ fn sync_environment() -> Result<()> {
"#,
)?;
- uv_snapshot!(context.filters(), context.sync(), @r###"
+ uv_snapshot!(context.filters(), context.sync(), @r"
success: false
exit_code: 2
----- stdout -----
@@ -1489,7 +1721,7 @@ fn sync_environment() -> Result<()> {
----- stderr -----
Resolved 2 packages in [TIME]
error: The current Python platform is not compatible with the lockfile's supported environments: `python_full_version < '3.11'`
- "###);
+ ");
assert!(context.temp_dir.child("uv.lock").exists());
@@ -1516,7 +1748,7 @@ fn sync_dev() -> Result<()> {
context.lock().assert().success();
- uv_snapshot!(context.filters(), context.sync().arg("--only-dev"), @r###"
+ uv_snapshot!(context.filters(), context.sync().arg("--only-dev"), @r"
success: true
exit_code: 0
----- stdout -----
@@ -1528,9 +1760,9 @@ fn sync_dev() -> Result<()> {
+ anyio==4.3.0
+ idna==3.6
+ sniffio==1.3.1
- "###);
+ ");
- uv_snapshot!(context.filters(), context.sync().arg("--no-dev"), @r###"
+ uv_snapshot!(context.filters(), context.sync().arg("--no-dev"), @r"
success: true
exit_code: 0
----- stdout -----
@@ -1544,9 +1776,9 @@ fn sync_dev() -> Result<()> {
- idna==3.6
- sniffio==1.3.1
+ typing-extensions==4.10.0
- "###);
+ ");
- uv_snapshot!(context.filters(), context.sync(), @r###"
+ uv_snapshot!(context.filters(), context.sync(), @r"
success: true
exit_code: 0
----- stdout -----
@@ -1557,10 +1789,10 @@ fn sync_dev() -> Result<()> {
+ anyio==4.3.0
+ idna==3.6
+ sniffio==1.3.1
- "###);
+ ");
// Using `--no-default-groups` should remove dev dependencies
- uv_snapshot!(context.filters(), context.sync().arg("--no-default-groups"), @r###"
+ uv_snapshot!(context.filters(), context.sync().arg("--no-default-groups"), @r"
success: true
exit_code: 0
----- stdout -----
@@ -1571,7 +1803,7 @@ fn sync_dev() -> Result<()> {
- anyio==4.3.0
- idna==3.6
- sniffio==1.3.1
- "###);
+ ");
Ok(())
}
@@ -1600,7 +1832,7 @@ fn sync_group() -> Result<()> {
context.lock().assert().success();
- uv_snapshot!(context.filters(), context.sync(), @r###"
+ uv_snapshot!(context.filters(), context.sync(), @r"
success: true
exit_code: 0
----- stdout -----
@@ -1611,9 +1843,9 @@ fn sync_group() -> Result<()> {
Installed 2 packages in [TIME]
+ iniconfig==2.0.0
+ typing-extensions==4.10.0
- "###);
+ ");
- uv_snapshot!(context.filters(), context.sync().arg("--group").arg("foo"), @r###"
+ uv_snapshot!(context.filters(), context.sync().arg("--group").arg("foo"), @r"
success: true
exit_code: 0
----- stdout -----
@@ -1625,9 +1857,9 @@ fn sync_group() -> Result<()> {
+ anyio==4.3.0
+ idna==3.6
+ sniffio==1.3.1
- "###);
+ ");
- uv_snapshot!(context.filters(), context.sync().arg("--only-group").arg("bar"), @r###"
+ uv_snapshot!(context.filters(), context.sync().arg("--only-group").arg("bar"), @r"
success: true
exit_code: 0
----- stdout -----
@@ -1645,9 +1877,9 @@ fn sync_group() -> Result<()> {
- sniffio==1.3.1
- typing-extensions==4.10.0
+ urllib3==2.2.1
- "###);
+ ");
- uv_snapshot!(context.filters(), context.sync().arg("--group").arg("foo").arg("--group").arg("bar"), @r###"
+ uv_snapshot!(context.filters(), context.sync().arg("--group").arg("foo").arg("--group").arg("bar"), @r"
success: true
exit_code: 0
----- stdout -----
@@ -1659,9 +1891,9 @@ fn sync_group() -> Result<()> {
+ iniconfig==2.0.0
+ sniffio==1.3.1
+ typing-extensions==4.10.0
- "###);
+ ");
- uv_snapshot!(context.filters(), context.sync().arg("--all-groups"), @r###"
+ uv_snapshot!(context.filters(), context.sync().arg("--all-groups"), @r"
success: true
exit_code: 0
----- stdout -----
@@ -1669,9 +1901,9 @@ fn sync_group() -> Result<()> {
----- stderr -----
Resolved 10 packages in [TIME]
Audited 9 packages in [TIME]
- "###);
+ ");
- uv_snapshot!(context.filters(), context.sync().arg("--all-groups").arg("--no-group").arg("bar"), @r###"
+ uv_snapshot!(context.filters(), context.sync().arg("--all-groups").arg("--no-group").arg("bar"), @r"
success: true
exit_code: 0
----- stdout -----
@@ -1683,9 +1915,9 @@ fn sync_group() -> Result<()> {
- charset-normalizer==3.3.2
- requests==2.31.0
- urllib3==2.2.1
- "###);
+ ");
- uv_snapshot!(context.filters(), context.sync().arg("--all-groups").arg("--no-dev"), @r###"
+ uv_snapshot!(context.filters(), context.sync().arg("--all-groups").arg("--no-dev"), @r"
success: true
exit_code: 0
----- stdout -----
@@ -1699,9 +1931,9 @@ fn sync_group() -> Result<()> {
- iniconfig==2.0.0
+ requests==2.31.0
+ urllib3==2.2.1
- "###);
+ ");
- uv_snapshot!(context.filters(), context.sync().arg("--dev"), @r###"
+ uv_snapshot!(context.filters(), context.sync().arg("--dev"), @r"
success: true
exit_code: 0
----- stdout -----
@@ -1718,9 +1950,9 @@ fn sync_group() -> Result<()> {
- requests==2.31.0
- sniffio==1.3.1
- urllib3==2.2.1
- "###);
+ ");
- uv_snapshot!(context.filters(), context.sync().arg("--dev").arg("--no-group").arg("dev"), @r###"
+ uv_snapshot!(context.filters(), context.sync().arg("--dev").arg("--no-group").arg("dev"), @r"
success: true
exit_code: 0
----- stdout -----
@@ -1729,9 +1961,9 @@ fn sync_group() -> Result<()> {
Resolved 10 packages in [TIME]
Uninstalled 1 package in [TIME]
- iniconfig==2.0.0
- "###);
+ ");
- uv_snapshot!(context.filters(), context.sync().arg("--group").arg("dev").arg("--no-dev"), @r###"
+ uv_snapshot!(context.filters(), context.sync().arg("--group").arg("dev").arg("--no-dev"), @r"
success: true
exit_code: 0
----- stdout -----
@@ -1739,9 +1971,9 @@ fn sync_group() -> Result<()> {
----- stderr -----
Resolved 10 packages in [TIME]
Audited 1 package in [TIME]
- "###);
+ ");
- uv_snapshot!(context.filters(), context.sync().arg("--all-groups"), @r###"
+ uv_snapshot!(context.filters(), context.sync().arg("--all-groups"), @r"
success: true
exit_code: 0
----- stdout -----
@@ -1757,10 +1989,10 @@ fn sync_group() -> Result<()> {
+ requests==2.31.0
+ sniffio==1.3.1
+ urllib3==2.2.1
- "###);
+ ");
// Using `--no-default-groups` should exclude all groups
- uv_snapshot!(context.filters(), context.sync().arg("--no-default-groups"), @r###"
+ uv_snapshot!(context.filters(), context.sync().arg("--no-default-groups"), @r"
success: true
exit_code: 0
----- stdout -----
@@ -1776,9 +2008,9 @@ fn sync_group() -> Result<()> {
- requests==2.31.0
- sniffio==1.3.1
- urllib3==2.2.1
- "###);
+ ");
- uv_snapshot!(context.filters(), context.sync().arg("--all-groups"), @r###"
+ uv_snapshot!(context.filters(), context.sync().arg("--all-groups"), @r"
success: true
exit_code: 0
----- stdout -----
@@ -1794,11 +2026,11 @@ fn sync_group() -> Result<()> {
+ requests==2.31.0
+ sniffio==1.3.1
+ urllib3==2.2.1
- "###);
+ ");
// Using `--no-default-groups` with `--group foo` and `--group bar` should include those groups,
// excluding the remaining `dev` group.
- uv_snapshot!(context.filters(), context.sync().arg("--no-default-groups").arg("--group").arg("foo").arg("--group").arg("bar"), @r###"
+ uv_snapshot!(context.filters(), context.sync().arg("--no-default-groups").arg("--group").arg("foo").arg("--group").arg("bar"), @r"
success: true
exit_code: 0
----- stdout -----
@@ -1807,7 +2039,7 @@ fn sync_group() -> Result<()> {
Resolved 10 packages in [TIME]
Uninstalled 1 package in [TIME]
- iniconfig==2.0.0
- "###);
+ ");
Ok(())
}
@@ -1833,7 +2065,7 @@ fn sync_include_group() -> Result<()> {
context.lock().assert().success();
- uv_snapshot!(context.filters(), context.sync(), @r###"
+ uv_snapshot!(context.filters(), context.sync(), @r"
success: true
exit_code: 0
----- stdout -----
@@ -1843,9 +2075,9 @@ fn sync_include_group() -> Result<()> {
Prepared 1 package in [TIME]
Installed 1 package in [TIME]
+ typing-extensions==4.10.0
- "###);
+ ");
- uv_snapshot!(context.filters(), context.sync().arg("--group").arg("foo"), @r###"
+ uv_snapshot!(context.filters(), context.sync().arg("--group").arg("foo"), @r"
success: true
exit_code: 0
----- stdout -----
@@ -1858,9 +2090,9 @@ fn sync_include_group() -> Result<()> {
+ idna==3.6
+ iniconfig==2.0.0
+ sniffio==1.3.1
- "###);
+ ");
- uv_snapshot!(context.filters(), context.sync().arg("--only-group").arg("bar"), @r###"
+ uv_snapshot!(context.filters(), context.sync().arg("--only-group").arg("bar"), @r"
success: true
exit_code: 0
----- stdout -----
@@ -1872,9 +2104,9 @@ fn sync_include_group() -> Result<()> {
- idna==3.6
- sniffio==1.3.1
- typing-extensions==4.10.0
- "###);
+ ");
- uv_snapshot!(context.filters(), context.sync().arg("--group").arg("foo").arg("--group").arg("bar"), @r###"
+ uv_snapshot!(context.filters(), context.sync().arg("--group").arg("foo").arg("--group").arg("bar"), @r"
success: true
exit_code: 0
----- stdout -----
@@ -1886,9 +2118,9 @@ fn sync_include_group() -> Result<()> {
+ idna==3.6
+ sniffio==1.3.1
+ typing-extensions==4.10.0
- "###);
+ ");
- uv_snapshot!(context.filters(), context.sync().arg("--only-group").arg("foo"), @r###"
+ uv_snapshot!(context.filters(), context.sync().arg("--only-group").arg("foo"), @r"
success: true
exit_code: 0
----- stdout -----
@@ -1897,9 +2129,9 @@ fn sync_include_group() -> Result<()> {
Resolved 6 packages in [TIME]
Uninstalled 1 package in [TIME]
- typing-extensions==4.10.0
- "###);
+ ");
- uv_snapshot!(context.filters(), context.sync().arg("--all-groups"), @r###"
+ uv_snapshot!(context.filters(), context.sync().arg("--all-groups"), @r"
success: true
exit_code: 0
----- stdout -----
@@ -1908,9 +2140,9 @@ fn sync_include_group() -> Result<()> {
Resolved 6 packages in [TIME]
Installed 1 package in [TIME]
+ typing-extensions==4.10.0
- "###);
+ ");
- uv_snapshot!(context.filters(), context.sync().arg("--no-default-groups"), @r###"
+ uv_snapshot!(context.filters(), context.sync().arg("--no-default-groups"), @r"
success: true
exit_code: 0
----- stdout -----
@@ -1922,9 +2154,9 @@ fn sync_include_group() -> Result<()> {
- idna==3.6
- iniconfig==2.0.0
- sniffio==1.3.1
- "###);
+ ");
- uv_snapshot!(context.filters(), context.sync().arg("--all-groups"), @r###"
+ uv_snapshot!(context.filters(), context.sync().arg("--all-groups"), @r"
success: true
exit_code: 0
----- stdout -----
@@ -1936,9 +2168,9 @@ fn sync_include_group() -> Result<()> {
+ idna==3.6
+ iniconfig==2.0.0
+ sniffio==1.3.1
- "###);
+ ");
- uv_snapshot!(context.filters(), context.sync().arg("--no-default-groups").arg("--group").arg("foo"), @r###"
+ uv_snapshot!(context.filters(), context.sync().arg("--no-default-groups").arg("--group").arg("foo"), @r"
success: true
exit_code: 0
----- stdout -----
@@ -1946,7 +2178,7 @@ fn sync_include_group() -> Result<()> {
----- stderr -----
Resolved 6 packages in [TIME]
Audited 5 packages in [TIME]
- "###);
+ ");
Ok(())
}
@@ -1972,7 +2204,7 @@ fn sync_exclude_group() -> Result<()> {
context.lock().assert().success();
- uv_snapshot!(context.filters(), context.sync().arg("--group").arg("foo"), @r###"
+ uv_snapshot!(context.filters(), context.sync().arg("--group").arg("foo"), @r"
success: true
exit_code: 0
----- stdout -----
@@ -1986,9 +2218,9 @@ fn sync_exclude_group() -> Result<()> {
+ iniconfig==2.0.0
+ sniffio==1.3.1
+ typing-extensions==4.10.0
- "###);
+ ");
- uv_snapshot!(context.filters(), context.sync().arg("--group").arg("foo").arg("--no-group").arg("foo"), @r###"
+ uv_snapshot!(context.filters(), context.sync().arg("--group").arg("foo").arg("--no-group").arg("foo"), @r"
success: true
exit_code: 0
----- stdout -----
@@ -2000,9 +2232,9 @@ fn sync_exclude_group() -> Result<()> {
- idna==3.6
- iniconfig==2.0.0
- sniffio==1.3.1
- "###);
+ ");
- uv_snapshot!(context.filters(), context.sync().arg("--only-group").arg("bar"), @r###"
+ uv_snapshot!(context.filters(), context.sync().arg("--only-group").arg("bar"), @r"
success: true
exit_code: 0
----- stdout -----
@@ -2013,9 +2245,9 @@ fn sync_exclude_group() -> Result<()> {
Installed 1 package in [TIME]
+ iniconfig==2.0.0
- typing-extensions==4.10.0
- "###);
+ ");
- uv_snapshot!(context.filters(), context.sync().arg("--only-group").arg("bar").arg("--no-group").arg("bar"), @r###"
+ uv_snapshot!(context.filters(), context.sync().arg("--only-group").arg("bar").arg("--no-group").arg("bar"), @r"
success: true
exit_code: 0
----- stdout -----
@@ -2024,7 +2256,7 @@ fn sync_exclude_group() -> Result<()> {
Resolved 6 packages in [TIME]
Uninstalled 1 package in [TIME]
- iniconfig==2.0.0
- "###);
+ ");
Ok(())
}
@@ -2052,7 +2284,7 @@ fn sync_dev_group() -> Result<()> {
context.lock().assert().success();
- uv_snapshot!(context.filters(), context.sync(), @r###"
+ uv_snapshot!(context.filters(), context.sync(), @r"
success: true
exit_code: 0
----- stdout -----
@@ -2066,7 +2298,7 @@ fn sync_dev_group() -> Result<()> {
+ iniconfig==2.0.0
+ sniffio==1.3.1
+ typing-extensions==4.10.0
- "###);
+ ");
Ok(())
}
@@ -2093,7 +2325,7 @@ fn sync_non_existent_group() -> Result<()> {
context.lock().assert().success();
// Requesting a non-existent group should fail.
- uv_snapshot!(context.filters(), context.sync().arg("--group").arg("baz"), @r###"
+ uv_snapshot!(context.filters(), context.sync().arg("--group").arg("baz"), @r"
success: false
exit_code: 2
----- stdout -----
@@ -2101,9 +2333,9 @@ fn sync_non_existent_group() -> Result<()> {
----- stderr -----
Resolved 7 packages in [TIME]
error: Group `baz` is not defined in the project's `dependency-groups` table
- "###);
+ ");
- uv_snapshot!(context.filters(), context.sync().arg("--no-group").arg("baz"), @r###"
+ uv_snapshot!(context.filters(), context.sync().arg("--no-group").arg("baz"), @r"
success: false
exit_code: 2
----- stdout -----
@@ -2111,10 +2343,10 @@ fn sync_non_existent_group() -> Result<()> {
----- stderr -----
Resolved 7 packages in [TIME]
error: Group `baz` is not defined in the project's `dependency-groups` table
- "###);
+ ");
// Requesting an empty group should succeed.
- uv_snapshot!(context.filters(), context.sync().arg("--group").arg("foo"), @r###"
+ uv_snapshot!(context.filters(), context.sync().arg("--group").arg("foo"), @r"
success: true
exit_code: 0
----- stdout -----
@@ -2124,11 +2356,11 @@ fn sync_non_existent_group() -> Result<()> {
Prepared 1 package in [TIME]
Installed 1 package in [TIME]
+ typing-extensions==4.10.0
- "###);
+ ");
// Requesting with `--frozen` should respect the groups in the lockfile, rather than the
// `pyproject.toml`.
- uv_snapshot!(context.filters(), context.sync().arg("--frozen").arg("--group").arg("bar"), @r###"
+ uv_snapshot!(context.filters(), context.sync().arg("--frozen").arg("--group").arg("bar"), @r"
success: true
exit_code: 0
----- stdout -----
@@ -2141,7 +2373,7 @@ fn sync_non_existent_group() -> Result<()> {
+ idna==3.6
+ requests==2.31.0
+ urllib3==2.2.1
- "###);
+ ");
// Replace `bar` with `baz`.
pyproject_toml.write_str(
@@ -2157,23 +2389,23 @@ fn sync_non_existent_group() -> Result<()> {
"#,
)?;
- uv_snapshot!(context.filters(), context.sync().arg("--frozen").arg("--group").arg("bar"), @r###"
+ uv_snapshot!(context.filters(), context.sync().arg("--frozen").arg("--group").arg("bar"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Audited 6 packages in [TIME]
- "###);
+ ");
- uv_snapshot!(context.filters(), context.sync().arg("--frozen").arg("--group").arg("baz"), @r###"
+ uv_snapshot!(context.filters(), context.sync().arg("--frozen").arg("--group").arg("baz"), @r"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: Group `baz` is not defined in the project's `dependency-groups` table
- "###);
+ ");
Ok(())
}
@@ -2453,7 +2685,7 @@ fn sync_default_groups() -> Result<()> {
context.lock().assert().success();
// The `dev` group should be synced by default.
- uv_snapshot!(context.filters(), context.sync(), @r###"
+ uv_snapshot!(context.filters(), context.sync(), @r"
success: true
exit_code: 0
----- stdout -----
@@ -2464,7 +2696,7 @@ fn sync_default_groups() -> Result<()> {
Installed 2 packages in [TIME]
+ iniconfig==2.0.0
+ typing-extensions==4.10.0
- "###);
+ ");
// If we remove it from the `default-groups` list, it should be removed.
pyproject_toml.write_str(
@@ -2485,7 +2717,7 @@ fn sync_default_groups() -> Result<()> {
"#,
)?;
- uv_snapshot!(context.filters(), context.sync(), @r###"
+ uv_snapshot!(context.filters(), context.sync(), @r"
success: true
exit_code: 0
----- stdout -----
@@ -2494,7 +2726,7 @@ fn sync_default_groups() -> Result<()> {
Resolved 10 packages in [TIME]
Uninstalled 1 package in [TIME]
- iniconfig==2.0.0
- "###);
+ ");
// If we set a different default group, it should be synced instead.
pyproject_toml.write_str(
@@ -2515,7 +2747,7 @@ fn sync_default_groups() -> Result<()> {
"#,
)?;
- uv_snapshot!(context.filters(), context.sync(), @r###"
+ uv_snapshot!(context.filters(), context.sync(), @r"
success: true
exit_code: 0
----- stdout -----
@@ -2527,7 +2759,7 @@ fn sync_default_groups() -> Result<()> {
+ anyio==4.3.0
+ idna==3.6
+ sniffio==1.3.1
- "###);
+ ");
// `--no-group` should remove from the defaults.
pyproject_toml.write_str(
@@ -2548,7 +2780,7 @@ fn sync_default_groups() -> Result<()> {
"#,
)?;
- uv_snapshot!(context.filters(), context.sync().arg("--no-group").arg("foo"), @r###"
+ uv_snapshot!(context.filters(), context.sync().arg("--no-group").arg("foo"), @r"
success: true
exit_code: 0
----- stdout -----
@@ -2559,10 +2791,10 @@ fn sync_default_groups() -> Result<()> {
- anyio==4.3.0
- idna==3.6
- sniffio==1.3.1
- "###);
+ ");
// Using `--group` should include the defaults
- uv_snapshot!(context.filters(), context.sync().arg("--group").arg("dev"), @r###"
+ uv_snapshot!(context.filters(), context.sync().arg("--group").arg("dev"), @r"
success: true
exit_code: 0
----- stdout -----
@@ -2574,10 +2806,10 @@ fn sync_default_groups() -> Result<()> {
+ idna==3.6
+ iniconfig==2.0.0
+ sniffio==1.3.1
- "###);
+ ");
// Using `--all-groups` should include the defaults
- uv_snapshot!(context.filters(), context.sync().arg("--all-groups"), @r###"
+ uv_snapshot!(context.filters(), context.sync().arg("--all-groups"), @r"
success: true
exit_code: 0
----- stdout -----
@@ -2590,10 +2822,10 @@ fn sync_default_groups() -> Result<()> {
+ charset-normalizer==3.3.2
+ requests==2.31.0
+ urllib3==2.2.1
- "###);
+ ");
// Using `--only-group` should exclude the defaults
- uv_snapshot!(context.filters(), context.sync().arg("--only-group").arg("dev"), @r###"
+ uv_snapshot!(context.filters(), context.sync().arg("--only-group").arg("dev"), @r"
success: true
exit_code: 0
----- stdout -----
@@ -2609,9 +2841,9 @@ fn sync_default_groups() -> Result<()> {
- sniffio==1.3.1
- typing-extensions==4.10.0
- urllib3==2.2.1
- "###);
+ ");
- uv_snapshot!(context.filters(), context.sync().arg("--all-groups"), @r###"
+ uv_snapshot!(context.filters(), context.sync().arg("--all-groups"), @r"
success: true
exit_code: 0
----- stdout -----
@@ -2627,10 +2859,10 @@ fn sync_default_groups() -> Result<()> {
+ sniffio==1.3.1
+ typing-extensions==4.10.0
+ urllib3==2.2.1
- "###);
+ ");
// Using `--no-default-groups` should exclude all groups
- uv_snapshot!(context.filters(), context.sync().arg("--no-default-groups"), @r###"
+ uv_snapshot!(context.filters(), context.sync().arg("--no-default-groups"), @r"
success: true
exit_code: 0
----- stdout -----
@@ -2646,9 +2878,9 @@ fn sync_default_groups() -> Result<()> {
- requests==2.31.0
- sniffio==1.3.1
- urllib3==2.2.1
- "###);
+ ");
- uv_snapshot!(context.filters(), context.sync().arg("--all-groups"), @r###"
+ uv_snapshot!(context.filters(), context.sync().arg("--all-groups"), @r"
success: true
exit_code: 0
----- stdout -----
@@ -2664,11 +2896,11 @@ fn sync_default_groups() -> Result<()> {
+ requests==2.31.0
+ sniffio==1.3.1
+ urllib3==2.2.1
- "###);
+ ");
// Using `--no-default-groups` with `--group foo` and `--group bar` should include those groups,
// excluding the remaining `dev` group.
- uv_snapshot!(context.filters(), context.sync().arg("--no-default-groups").arg("--group").arg("foo").arg("--group").arg("bar"), @r###"
+ uv_snapshot!(context.filters(), context.sync().arg("--no-default-groups").arg("--group").arg("foo").arg("--group").arg("bar"), @r"
success: true
exit_code: 0
----- stdout -----
@@ -2677,7 +2909,7 @@ fn sync_default_groups() -> Result<()> {
Resolved 10 packages in [TIME]
Uninstalled 1 package in [TIME]
- iniconfig==2.0.0
- "###);
+ ");
Ok(())
}
@@ -2749,7 +2981,7 @@ fn sync_default_groups_all() -> Result<()> {
");
// Using `--all-groups` should be redundant and work fine
- uv_snapshot!(context.filters(), context.sync().arg("--all-groups"), @r###"
+ uv_snapshot!(context.filters(), context.sync().arg("--all-groups"), @r"
success: true
exit_code: 0
----- stdout -----
@@ -2765,7 +2997,7 @@ fn sync_default_groups_all() -> Result<()> {
+ requests==2.31.0
+ sniffio==1.3.1
+ urllib3==2.2.1
- "###);
+ ");
// Using `--no-dev` should exclude just the dev group
uv_snapshot!(context.filters(), context.sync().arg("--no-dev"), @r"
@@ -2900,7 +3132,7 @@ fn sync_group_member() -> Result<()> {
// Generate a lockfile.
context.lock().assert().success();
- uv_snapshot!(context.filters(), context.sync().arg("--only-group").arg("foo"), @r###"
+ uv_snapshot!(context.filters(), context.sync().arg("--only-group").arg("foo"), @r"
success: true
exit_code: 0
----- stdout -----
@@ -2912,7 +3144,7 @@ fn sync_group_member() -> Result<()> {
+ child==0.1.0 (from file://[TEMP_DIR]/child)
+ iniconfig==2.0.0
+ typing-extensions==4.10.0
- "###);
+ ");
Ok(())
}
@@ -3023,7 +3255,7 @@ fn sync_group_legacy_non_project_member() -> Result<()> {
);
});
- uv_snapshot!(context.filters(), context.sync().arg("--only-group").arg("foo"), @r###"
+ uv_snapshot!(context.filters(), context.sync().arg("--only-group").arg("foo"), @r"
success: true
exit_code: 0
----- stdout -----
@@ -3035,7 +3267,7 @@ fn sync_group_legacy_non_project_member() -> Result<()> {
+ child==0.1.0 (from file://[TEMP_DIR]/child)
+ iniconfig==2.0.0
+ typing-extensions==4.10.0
- "###);
+ ");
Ok(())
}
@@ -3157,7 +3389,7 @@ fn sync_group_self() -> Result<()> {
);
});
- uv_snapshot!(context.filters(), context.sync().arg("--only-group").arg("foo"), @r###"
+ uv_snapshot!(context.filters(), context.sync().arg("--only-group").arg("foo"), @r"
success: true
exit_code: 0
----- stdout -----
@@ -3169,9 +3401,9 @@ fn sync_group_self() -> Result<()> {
+ iniconfig==2.0.0
+ project==0.1.0 (from file://[TEMP_DIR]/)
+ typing-extensions==4.10.0
- "###);
+ ");
- uv_snapshot!(context.filters(), context.sync().arg("--only-group").arg("bar"), @r###"
+ uv_snapshot!(context.filters(), context.sync().arg("--only-group").arg("bar"), @r"
success: true
exit_code: 0
----- stdout -----
@@ -3183,7 +3415,7 @@ fn sync_group_self() -> Result<()> {
Installed 1 package in [TIME]
+ idna==3.6
- typing-extensions==4.10.0
- "###);
+ ");
Ok(())
}
@@ -3208,7 +3440,7 @@ fn sync_non_existent_extra() -> Result<()> {
context.lock().assert().success();
// Requesting a non-existent extra should fail.
- uv_snapshot!(context.filters(), context.sync().arg("--extra").arg("baz"), @r###"
+ uv_snapshot!(context.filters(), context.sync().arg("--extra").arg("baz"), @r"
success: false
exit_code: 2
----- stdout -----
@@ -3216,10 +3448,10 @@ fn sync_non_existent_extra() -> Result<()> {
----- stderr -----
Resolved 4 packages in [TIME]
error: Extra `baz` is not defined in the project's `optional-dependencies` table
- "###);
+ ");
// Excluding a non-existing extra when requesting all extras should fail.
- uv_snapshot!(context.filters(), context.sync().arg("--all-extras").arg("--no-extra").arg("baz"), @r###"
+ uv_snapshot!(context.filters(), context.sync().arg("--all-extras").arg("--no-extra").arg("baz"), @r"
success: false
exit_code: 2
----- stdout -----
@@ -3227,7 +3459,7 @@ fn sync_non_existent_extra() -> Result<()> {
----- stderr -----
Resolved 4 packages in [TIME]
error: Extra `baz` is not defined in the project's `optional-dependencies` table
- "###);
+ ");
Ok(())
}
@@ -3249,7 +3481,7 @@ fn sync_non_existent_extra_no_optional_dependencies() -> Result<()> {
context.lock().assert().success();
// Requesting a non-existent extra should fail.
- uv_snapshot!(context.filters(), context.sync().arg("--extra").arg("baz"), @r###"
+ uv_snapshot!(context.filters(), context.sync().arg("--extra").arg("baz"), @r"
success: false
exit_code: 2
----- stdout -----
@@ -3257,10 +3489,10 @@ fn sync_non_existent_extra_no_optional_dependencies() -> Result<()> {
----- stderr -----
Resolved 1 package in [TIME]
error: Extra `baz` is not defined in the project's `optional-dependencies` table
- "###);
+ ");
// Excluding a non-existing extra when requesting all extras should fail.
- uv_snapshot!(context.filters(), context.sync().arg("--all-extras").arg("--no-extra").arg("baz"), @r###"
+ uv_snapshot!(context.filters(), context.sync().arg("--all-extras").arg("--no-extra").arg("baz"), @r"
success: false
exit_code: 2
----- stdout -----
@@ -3268,7 +3500,7 @@ fn sync_non_existent_extra_no_optional_dependencies() -> Result<()> {
----- stderr -----
Resolved 1 package in [TIME]
error: Extra `baz` is not defined in the project's `optional-dependencies` table
- "###);
+ ");
Ok(())
}
@@ -3321,14 +3553,14 @@ fn sync_ignore_extras_check_when_no_provides_extras() -> Result<()> {
"#})?;
// Requesting a non-existent extra should not fail, as no validation should be performed.
- uv_snapshot!(context.filters(), context.sync().arg("--frozen").arg("--extra").arg("baz"), @r###"
+ uv_snapshot!(context.filters(), context.sync().arg("--frozen").arg("--extra").arg("baz"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Audited in [TIME]
- "###);
+ ");
Ok(())
}
@@ -3376,7 +3608,7 @@ fn sync_non_existent_extra_workspace_member() -> Result<()> {
context.lock().assert().success();
// Requesting an extra that only exists in the child should fail.
- uv_snapshot!(context.filters(), context.sync().arg("--extra").arg("async"), @r###"
+ uv_snapshot!(context.filters(), context.sync().arg("--extra").arg("async"), @r"
success: false
exit_code: 2
----- stdout -----
@@ -3384,10 +3616,10 @@ fn sync_non_existent_extra_workspace_member() -> Result<()> {
----- stderr -----
Resolved 5 packages in [TIME]
error: Extra `async` is not defined in the project's `optional-dependencies` table
- "###);
+ ");
// Unless we sync from the child directory.
- uv_snapshot!(context.filters(), context.sync().arg("--package").arg("child").arg("--extra").arg("async"), @r###"
+ uv_snapshot!(context.filters(), context.sync().arg("--package").arg("child").arg("--extra").arg("async"), @r"
success: true
exit_code: 0
----- stdout -----
@@ -3399,7 +3631,7 @@ fn sync_non_existent_extra_workspace_member() -> Result<()> {
+ anyio==4.3.0
+ idna==3.6
+ sniffio==1.3.1
- "###);
+ ");
Ok(())
}
@@ -3449,7 +3681,7 @@ fn sync_non_existent_extra_non_project_workspace() -> Result<()> {
// Requesting an extra that only exists in the child should succeed, since we sync all members
// by default.
- uv_snapshot!(context.filters(), context.sync().arg("--extra").arg("async"), @r###"
+ uv_snapshot!(context.filters(), context.sync().arg("--extra").arg("async"), @r"
success: true
exit_code: 0
----- stdout -----
@@ -3461,10 +3693,10 @@ fn sync_non_existent_extra_non_project_workspace() -> Result<()> {
+ anyio==4.3.0
+ idna==3.6
+ sniffio==1.3.1
- "###);
+ ");
// Syncing from the child should also succeed.
- uv_snapshot!(context.filters(), context.sync().arg("--package").arg("child").arg("--extra").arg("async"), @r###"
+ uv_snapshot!(context.filters(), context.sync().arg("--package").arg("child").arg("--extra").arg("async"), @r"
success: true
exit_code: 0
----- stdout -----
@@ -3472,10 +3704,10 @@ fn sync_non_existent_extra_non_project_workspace() -> Result<()> {
----- stderr -----
Resolved 5 packages in [TIME]
Audited 3 packages in [TIME]
- "###);
+ ");
// Syncing from an unrelated child should fail.
- uv_snapshot!(context.filters(), context.sync().arg("--package").arg("other").arg("--extra").arg("async"), @r###"
+ uv_snapshot!(context.filters(), context.sync().arg("--package").arg("other").arg("--extra").arg("async"), @r"
success: false
exit_code: 2
----- stdout -----
@@ -3483,7 +3715,7 @@ fn sync_non_existent_extra_non_project_workspace() -> Result<()> {
----- stderr -----
Resolved 5 packages in [TIME]
error: Extra `async` is not defined in the project's `optional-dependencies` table
- "###);
+ ");
Ok(())
}
@@ -3551,7 +3783,7 @@ fn no_install_project() -> Result<()> {
context.lock().assert().success();
// Running with `--no-install-project` should install `anyio`, but not `project`.
- uv_snapshot!(context.filters(), context.sync().arg("--no-install-project"), @r###"
+ uv_snapshot!(context.filters(), context.sync().arg("--no-install-project"), @r"
success: true
exit_code: 0
----- stdout -----
@@ -3563,7 +3795,7 @@ fn no_install_project() -> Result<()> {
+ anyio==3.7.0
+ idna==3.6
+ sniffio==1.3.1
- "###);
+ ");
// However, we do require the `pyproject.toml`.
fs_err::remove_file(pyproject_toml)?;
@@ -3633,7 +3865,7 @@ fn no_install_workspace() -> Result<()> {
// Running with `--no-install-workspace` should install `anyio` and `iniconfig`, but not
// `project` or `child`.
- uv_snapshot!(context.filters(), context.sync().arg("--no-install-workspace"), @r###"
+ uv_snapshot!(context.filters(), context.sync().arg("--no-install-workspace"), @r"
success: true
exit_code: 0
----- stdout -----
@@ -3646,7 +3878,7 @@ fn no_install_workspace() -> Result<()> {
+ idna==3.6
+ iniconfig==2.0.0
+ sniffio==1.3.1
- "###);
+ ");
// Remove the virtual environment.
fs_err::remove_dir_all(&context.venv)?;
@@ -3654,7 +3886,7 @@ fn no_install_workspace() -> Result<()> {
// We don't require the `pyproject.toml` for non-root members, if `--frozen` is provided.
fs_err::remove_file(child.join("pyproject.toml"))?;
- uv_snapshot!(context.filters(), context.sync().arg("--no-install-workspace").arg("--frozen"), @r###"
+ uv_snapshot!(context.filters(), context.sync().arg("--no-install-workspace").arg("--frozen"), @r"
success: true
exit_code: 0
----- stdout -----
@@ -3667,10 +3899,10 @@ fn no_install_workspace() -> Result<()> {
+ idna==3.6
+ iniconfig==2.0.0
+ sniffio==1.3.1
- "###);
+ ");
// Even if `--package` is used.
- uv_snapshot!(context.filters(), context.sync().arg("--package").arg("child").arg("--no-install-workspace").arg("--frozen"), @r###"
+ uv_snapshot!(context.filters(), context.sync().arg("--package").arg("child").arg("--no-install-workspace").arg("--frozen"), @r"
success: true
exit_code: 0
----- stdout -----
@@ -3680,20 +3912,20 @@ fn no_install_workspace() -> Result<()> {
- anyio==3.7.0
- idna==3.6
- sniffio==1.3.1
- "###);
+ ");
// Unless the package doesn't exist.
- uv_snapshot!(context.filters(), context.sync().arg("--package").arg("fake").arg("--no-install-workspace").arg("--frozen"), @r###"
+ uv_snapshot!(context.filters(), context.sync().arg("--package").arg("fake").arg("--no-install-workspace").arg("--frozen"), @r"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: Could not find root package `fake`
- "###);
+ ");
// Even if `--all-packages` is used.
- uv_snapshot!(context.filters(), context.sync().arg("--all-packages").arg("--no-install-workspace").arg("--frozen"), @r###"
+ uv_snapshot!(context.filters(), context.sync().arg("--all-packages").arg("--no-install-workspace").arg("--frozen"), @r"
success: true
exit_code: 0
----- stdout -----
@@ -3703,7 +3935,7 @@ fn no_install_workspace() -> Result<()> {
+ anyio==3.7.0
+ idna==3.6
+ sniffio==1.3.1
- "###);
+ ");
// But we do require the root `pyproject.toml`.
fs_err::remove_file(context.temp_dir.join("pyproject.toml"))?;
@@ -3744,7 +3976,7 @@ fn no_install_package() -> Result<()> {
context.lock().assert().success();
// Running with `--no-install-package anyio` should skip anyio but include everything else
- uv_snapshot!(context.filters(), context.sync().arg("--no-install-package").arg("anyio"), @r###"
+ uv_snapshot!(context.filters(), context.sync().arg("--no-install-package").arg("anyio"), @r"
success: true
exit_code: 0
----- stdout -----
@@ -3756,11 +3988,11 @@ fn no_install_package() -> Result<()> {
+ idna==3.6
+ project==0.1.0 (from file://[TEMP_DIR]/)
+ sniffio==1.3.1
- "###);
+ ");
// Running with `--no-install-package project` should skip the project itself (not as a special
// case, that's just the name of the project)
- uv_snapshot!(context.filters(), context.sync().arg("--no-install-package").arg("project"), @r###"
+ uv_snapshot!(context.filters(), context.sync().arg("--no-install-package").arg("project"), @r"
success: true
exit_code: 0
----- stdout -----
@@ -3772,7 +4004,7 @@ fn no_install_package() -> Result<()> {
Installed 1 package in [TIME]
+ anyio==3.7.0
- project==0.1.0 (from file://[TEMP_DIR]/)
- "###);
+ ");
Ok(())
}
@@ -3801,7 +4033,7 @@ fn no_install_project_no_build() -> Result<()> {
context.lock().assert().success();
// `--no-build` should raise an error, since we try to install the project.
- uv_snapshot!(context.filters(), context.sync().arg("--no-build"), @r###"
+ uv_snapshot!(context.filters(), context.sync().arg("--no-build"), @r"
success: false
exit_code: 2
----- stdout -----
@@ -3809,11 +4041,11 @@ fn no_install_project_no_build() -> Result<()> {
----- stderr -----
Resolved 4 packages in [TIME]
error: Distribution `project==0.1.0 @ editable+.` can't be installed because it is marked as `--no-build` but has no binary distribution
- "###);
+ ");
// But it's fine to combine `--no-install-project` with `--no-build`. We shouldn't error, since
// we aren't building the project.
- uv_snapshot!(context.filters(), context.sync().arg("--no-install-project").arg("--no-build").arg("--locked"), @r###"
+ uv_snapshot!(context.filters(), context.sync().arg("--no-install-project").arg("--no-build").arg("--locked"), @r"
success: true
exit_code: 0
----- stdout -----
@@ -3825,7 +4057,7 @@ fn no_install_project_no_build() -> Result<()> {
+ anyio==3.7.0
+ idna==3.6
+ sniffio==1.3.1
- "###);
+ ");
Ok(())
}
@@ -3978,7 +4210,7 @@ fn convert_to_virtual() -> Result<()> {
)?;
// Running `uv sync` should install the project itself.
- uv_snapshot!(context.filters(), context.sync(), @r###"
+ uv_snapshot!(context.filters(), context.sync(), @r"
success: true
exit_code: 0
----- stdout -----
@@ -3989,7 +4221,7 @@ fn convert_to_virtual() -> Result<()> {
Installed 2 packages in [TIME]
+ iniconfig==2.0.0
+ project==0.1.0 (from file://[TEMP_DIR]/)
- "###);
+ ");
let lock = context.read("uv.lock");
@@ -4040,7 +4272,7 @@ fn convert_to_virtual() -> Result<()> {
)?;
// Running `uv sync` should remove the project itself.
- uv_snapshot!(context.filters(), context.sync(), @r###"
+ uv_snapshot!(context.filters(), context.sync(), @r"
success: true
exit_code: 0
----- stdout -----
@@ -4049,7 +4281,7 @@ fn convert_to_virtual() -> Result<()> {
Resolved 2 packages in [TIME]
Uninstalled 1 package in [TIME]
- project==0.1.0 (from file://[TEMP_DIR]/)
- "###);
+ ");
let lock = context.read("uv.lock");
@@ -4108,7 +4340,7 @@ fn convert_to_package() -> Result<()> {
)?;
// Running `uv sync` should not install the project itself.
- uv_snapshot!(context.filters(), context.sync(), @r###"
+ uv_snapshot!(context.filters(), context.sync(), @r"
success: true
exit_code: 0
----- stdout -----
@@ -4118,7 +4350,7 @@ fn convert_to_package() -> Result<()> {
Prepared 1 package in [TIME]
Installed 1 package in [TIME]
+ iniconfig==2.0.0
- "###);
+ ");
let lock = context.read("uv.lock");
@@ -4173,7 +4405,7 @@ fn convert_to_package() -> Result<()> {
)?;
// Running `uv sync` should install the project itself.
- uv_snapshot!(context.filters(), context.sync(), @r###"
+ uv_snapshot!(context.filters(), context.sync(), @r"
success: true
exit_code: 0
----- stdout -----
@@ -4183,7 +4415,7 @@ fn convert_to_package() -> Result<()> {
Prepared 1 package in [TIME]
Installed 1 package in [TIME]
+ project==0.1.0 (from file://[TEMP_DIR]/)
- "###);
+ ");
let lock = context.read("uv.lock");
@@ -4243,7 +4475,7 @@ fn sync_custom_environment_path() -> Result<()> {
)?;
// Running `uv sync` should create `.venv` by default
- uv_snapshot!(context.filters(), context.sync(), @r###"
+ uv_snapshot!(context.filters(), context.sync(), @r"
success: true
exit_code: 0
----- stdout -----
@@ -4255,7 +4487,7 @@ fn sync_custom_environment_path() -> Result<()> {
Prepared 1 package in [TIME]
Installed 1 package in [TIME]
+ iniconfig==2.0.0
- "###);
+ ");
context
.temp_dir
@@ -4263,7 +4495,7 @@ fn sync_custom_environment_path() -> Result<()> {
.assert(predicate::path::is_dir());
// Running `uv sync` should create `foo` in the project directory when customized
- uv_snapshot!(context.filters(), context.sync().env(EnvVars::UV_PROJECT_ENVIRONMENT, "foo"), @r###"
+ uv_snapshot!(context.filters(), context.sync().env(EnvVars::UV_PROJECT_ENVIRONMENT, "foo"), @r"
success: true
exit_code: 0
----- stdout -----
@@ -4274,7 +4506,7 @@ fn sync_custom_environment_path() -> Result<()> {
Resolved 2 packages in [TIME]
Installed 1 package in [TIME]
+ iniconfig==2.0.0
- "###);
+ ");
context
.temp_dir
@@ -4288,7 +4520,7 @@ fn sync_custom_environment_path() -> Result<()> {
.assert(predicate::path::is_dir());
// An absolute path can be provided
- uv_snapshot!(context.filters(), context.sync().env(EnvVars::UV_PROJECT_ENVIRONMENT, "foobar/.venv"), @r###"
+ uv_snapshot!(context.filters(), context.sync().env(EnvVars::UV_PROJECT_ENVIRONMENT, "foobar/.venv"), @r"
success: true
exit_code: 0
----- stdout -----
@@ -4299,7 +4531,7 @@ fn sync_custom_environment_path() -> Result<()> {
Resolved 2 packages in [TIME]
Installed 1 package in [TIME]
+ iniconfig==2.0.0
- "###);
+ ");
context
.temp_dir
@@ -4313,7 +4545,7 @@ fn sync_custom_environment_path() -> Result<()> {
.assert(predicate::path::is_dir());
// An absolute path can be provided
- uv_snapshot!(context.filters(), context.sync().env(EnvVars::UV_PROJECT_ENVIRONMENT, context.temp_dir.join("bar")), @r###"
+ uv_snapshot!(context.filters(), context.sync().env(EnvVars::UV_PROJECT_ENVIRONMENT, context.temp_dir.join("bar")), @r"
success: true
exit_code: 0
----- stdout -----
@@ -4324,7 +4556,7 @@ fn sync_custom_environment_path() -> Result<()> {
Resolved 2 packages in [TIME]
Installed 1 package in [TIME]
+ iniconfig==2.0.0
- "###);
+ ");
context
.temp_dir
@@ -4334,7 +4566,7 @@ fn sync_custom_environment_path() -> Result<()> {
// And, it can be outside the project
let tempdir = tempdir_in(TestContext::test_bucket_dir())?;
context = context.with_filtered_path(tempdir.path(), "OTHER_TEMPDIR");
- uv_snapshot!(context.filters(), context.sync().env(EnvVars::UV_PROJECT_ENVIRONMENT, tempdir.path().join(".venv")), @r###"
+ uv_snapshot!(context.filters(), context.sync().env(EnvVars::UV_PROJECT_ENVIRONMENT, tempdir.path().join(".venv")), @r"
success: true
exit_code: 0
----- stdout -----
@@ -4345,7 +4577,7 @@ fn sync_custom_environment_path() -> Result<()> {
Resolved 2 packages in [TIME]
Installed 1 package in [TIME]
+ iniconfig==2.0.0
- "###);
+ ");
ChildPath::new(tempdir.path())
.child(".venv")
@@ -4382,7 +4614,7 @@ fn sync_custom_environment_path() -> Result<()> {
fs_err::write(context.temp_dir.join("foo").join("file"), b"")?;
// We can delete and use it
- uv_snapshot!(context.filters(), context.sync().env(EnvVars::UV_PROJECT_ENVIRONMENT, "foo"), @r###"
+ uv_snapshot!(context.filters(), context.sync().env(EnvVars::UV_PROJECT_ENVIRONMENT, "foo"), @r"
success: true
exit_code: 0
----- stdout -----
@@ -4394,7 +4626,7 @@ fn sync_custom_environment_path() -> Result<()> {
Resolved 2 packages in [TIME]
Installed 1 package in [TIME]
+ iniconfig==2.0.0
- "###);
+ ");
Ok(())
}
@@ -4417,7 +4649,7 @@ fn sync_active_project_environment() -> Result<()> {
)?;
// Running `uv sync` with `VIRTUAL_ENV` should warn
- uv_snapshot!(context.filters(), context.sync().env(EnvVars::VIRTUAL_ENV, "foo"), @r###"
+ uv_snapshot!(context.filters(), context.sync().env(EnvVars::VIRTUAL_ENV, "foo"), @r"
success: true
exit_code: 0
----- stdout -----
@@ -4430,7 +4662,7 @@ fn sync_active_project_environment() -> Result<()> {
Prepared 1 package in [TIME]
Installed 1 package in [TIME]
+ iniconfig==2.0.0
- "###);
+ ");
context
.temp_dir
@@ -4443,7 +4675,7 @@ fn sync_active_project_environment() -> Result<()> {
.assert(predicate::path::missing());
// Using `--active` should create the environment
- uv_snapshot!(context.filters(), context.sync().env(EnvVars::VIRTUAL_ENV, "foo").arg("--active"), @r###"
+ uv_snapshot!(context.filters(), context.sync().env(EnvVars::VIRTUAL_ENV, "foo").arg("--active"), @r"
success: true
exit_code: 0
----- stdout -----
@@ -4454,7 +4686,7 @@ fn sync_active_project_environment() -> Result<()> {
Resolved 2 packages in [TIME]
Installed 1 package in [TIME]
+ iniconfig==2.0.0
- "###);
+ ");
context
.temp_dir
@@ -4462,7 +4694,7 @@ fn sync_active_project_environment() -> Result<()> {
.assert(predicate::path::is_dir());
// A subsequent sync will re-use the environment
- uv_snapshot!(context.filters(), context.sync().env(EnvVars::VIRTUAL_ENV, "foo").arg("--active"), @r###"
+ uv_snapshot!(context.filters(), context.sync().env(EnvVars::VIRTUAL_ENV, "foo").arg("--active"), @r"
success: true
exit_code: 0
----- stdout -----
@@ -4470,13 +4702,13 @@ fn sync_active_project_environment() -> Result<()> {
----- stderr -----
Resolved 2 packages in [TIME]
Audited 1 package in [TIME]
- "###);
+ ");
// Setting both the `VIRTUAL_ENV` and `UV_PROJECT_ENVIRONMENT` is fine if they agree
uv_snapshot!(context.filters(), context.sync()
.arg("--active")
.env(EnvVars::VIRTUAL_ENV, "foo")
- .env(EnvVars::UV_PROJECT_ENVIRONMENT, "foo"), @r###"
+ .env(EnvVars::UV_PROJECT_ENVIRONMENT, "foo"), @r"
success: true
exit_code: 0
----- stdout -----
@@ -4484,13 +4716,13 @@ fn sync_active_project_environment() -> Result<()> {
----- stderr -----
Resolved 2 packages in [TIME]
Audited 1 package in [TIME]
- "###);
+ ");
// If they disagree, we use `VIRTUAL_ENV` because of `--active`
uv_snapshot!(context.filters(), context.sync()
.arg("--active")
.env(EnvVars::VIRTUAL_ENV, "foo")
- .env(EnvVars::UV_PROJECT_ENVIRONMENT, "bar"), @r###"
+ .env(EnvVars::UV_PROJECT_ENVIRONMENT, "bar"), @r"
success: true
exit_code: 0
----- stdout -----
@@ -4498,7 +4730,7 @@ fn sync_active_project_environment() -> Result<()> {
----- stderr -----
Resolved 2 packages in [TIME]
Audited 1 package in [TIME]
- "###);
+ ");
context
.temp_dir
@@ -4507,7 +4739,7 @@ fn sync_active_project_environment() -> Result<()> {
// Requesting another Python version will invalidate the environment
uv_snapshot!(context.filters(), context.sync()
- .env(EnvVars::VIRTUAL_ENV, "foo").arg("--active").arg("-p").arg("3.12"), @r###"
+ .env(EnvVars::VIRTUAL_ENV, "foo").arg("--active").arg("-p").arg("3.12"), @r"
success: true
exit_code: 0
----- stdout -----
@@ -4519,7 +4751,7 @@ fn sync_active_project_environment() -> Result<()> {
Resolved 2 packages in [TIME]
Installed 1 package in [TIME]
+ iniconfig==2.0.0
- "###);
+ ");
Ok(())
}
@@ -4553,7 +4785,7 @@ fn sync_active_script_environment() -> Result<()> {
.collect::>();
// Running `uv sync --script` with `VIRTUAL_ENV` should warn
- uv_snapshot!(&filters, context.sync().arg("--script").arg("script.py").env(EnvVars::VIRTUAL_ENV, "foo"), @r###"
+ uv_snapshot!(&filters, context.sync().arg("--script").arg("script.py").env(EnvVars::VIRTUAL_ENV, "foo"), @r"
success: true
exit_code: 0
----- stdout -----
@@ -4567,7 +4799,7 @@ fn sync_active_script_environment() -> Result<()> {
+ anyio==4.3.0
+ idna==3.6
+ sniffio==1.3.1
- "###);
+ ");
context
.temp_dir
@@ -4575,7 +4807,7 @@ fn sync_active_script_environment() -> Result<()> {
.assert(predicate::path::missing());
// Using `--active` should create the environment
- uv_snapshot!(&filters, context.sync().arg("--script").arg("script.py").env(EnvVars::VIRTUAL_ENV, "foo").arg("--active"), @r###"
+ uv_snapshot!(&filters, context.sync().arg("--script").arg("script.py").env(EnvVars::VIRTUAL_ENV, "foo").arg("--active"), @r"
success: true
exit_code: 0
----- stdout -----
@@ -4587,7 +4819,7 @@ fn sync_active_script_environment() -> Result<()> {
+ anyio==4.3.0
+ idna==3.6
+ sniffio==1.3.1
- "###);
+ ");
context
.temp_dir
@@ -4595,7 +4827,7 @@ fn sync_active_script_environment() -> Result<()> {
.assert(predicate::path::is_dir());
// A subsequent sync will re-use the environment
- uv_snapshot!(&filters, context.sync().arg("--script").arg("script.py").env(EnvVars::VIRTUAL_ENV, "foo").arg("--active"), @r###"
+ uv_snapshot!(&filters, context.sync().arg("--script").arg("script.py").env(EnvVars::VIRTUAL_ENV, "foo").arg("--active"), @r"
success: true
exit_code: 0
----- stdout -----
@@ -4604,7 +4836,7 @@ fn sync_active_script_environment() -> Result<()> {
Using script environment at: foo
Resolved 3 packages in [TIME]
Audited 3 packages in [TIME]
- "###);
+ ");
// Requesting another Python version will invalidate the environment
uv_snapshot!(&filters, context.sync()
@@ -4613,19 +4845,198 @@ fn sync_active_script_environment() -> Result<()> {
.env(EnvVars::VIRTUAL_ENV, "foo")
.arg("--active")
.arg("-p")
- .arg("3.12"), @r###"
+ .arg("3.12"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
- Recreating script environment at: foo
+ Updating script environment at: foo
Resolved 3 packages in [TIME]
Installed 3 packages in [TIME]
+ anyio==4.3.0
+ idna==3.6
+ sniffio==1.3.1
- "###);
+ ");
+
+ Ok(())
+}
+
+#[test]
+fn sync_active_script_environment_json() -> Result<()> {
+ let context = TestContext::new_with_versions(&["3.11", "3.12"])
+ .with_filtered_virtualenv_bin()
+ .with_filtered_python_names();
+
+ let script = context.temp_dir.child("script.py");
+ script.write_str(indoc! { r#"
+ # /// script
+ # requires-python = ">=3.11"
+ # dependencies = [
+ # "anyio",
+ # ]
+ # ///
+
+ import anyio
+ "#
+ })?;
+
+ let filters = context
+ .filters()
+ .into_iter()
+ .chain(vec![
+ (
+ r"environments-v2/script-[a-z0-9]+",
+ "environments-v2/script-[HASH]",
+ ),
+ ("bin/python3", "[PYTHON]"),
+ ("Scripts/python.exe", "[PYTHON]"),
+ ])
+ .collect::>();
+
+ // Running `uv sync --script` with `VIRTUAL_ENV` should warn
+ uv_snapshot!(&filters, context.sync()
+ .arg("--script").arg("script.py")
+ .arg("--output-format").arg("json")
+ .env(EnvVars::VIRTUAL_ENV, "foo"), @r#"
+ success: true
+ exit_code: 0
+ ----- stdout -----
+ {
+ "schema": {
+ "version": "preview"
+ },
+ "target": "script",
+ "script": {
+ "path": "[TEMP_DIR]/script.py"
+ },
+ "sync": {
+ "environment": {
+ "path": "[CACHE_DIR]/environments-v2/script-[HASH]",
+ "python": {
+ "path": "[CACHE_DIR]/environments-v2/script-[HASH]/[BIN]/python",
+ "version": "3.11.[X]",
+ "implementation": "cpython"
+ }
+ },
+ "action": "create"
+ },
+ "lock": null,
+ "dry_run": false
+ }
+
+ ----- stderr -----
+ warning: `VIRTUAL_ENV=foo` does not match the script environment path `[CACHE_DIR]/environments-v2/script-[HASH]` and will be ignored; use `--active` to target the active environment instead
+ Resolved 3 packages in [TIME]
+ Prepared 3 packages in [TIME]
+ Installed 3 packages in [TIME]
+ + anyio==4.3.0
+ + idna==3.6
+ + sniffio==1.3.1
+ "#);
+
+ context
+ .temp_dir
+ .child("foo")
+ .assert(predicate::path::missing());
+
+ // Using `--active` should create the environment
+ uv_snapshot!(&filters, context.sync()
+ .arg("--script").arg("script.py")
+ .arg("--output-format").arg("json")
+ .env(EnvVars::VIRTUAL_ENV, "foo").arg("--active"), @r#"
+ success: true
+ exit_code: 0
+ ----- stdout -----
+ {
+ "schema": {
+ "version": "preview"
+ },
+ "target": "script",
+ "script": {
+ "path": "[TEMP_DIR]/script.py"
+ },
+ "sync": {
+ "environment": {
+ "path": "[TEMP_DIR]/foo",
+ "python": {
+ "path": "[TEMP_DIR]/foo/[BIN]/python",
+ "version": "3.11.[X]",
+ "implementation": "cpython"
+ }
+ },
+ "action": "create"
+ },
+ "lock": null,
+ "dry_run": false
+ }
+
+ ----- stderr -----
+ Resolved 3 packages in [TIME]
+ Installed 3 packages in [TIME]
+ + anyio==4.3.0
+ + idna==3.6
+ + sniffio==1.3.1
+ "#);
+
+ context
+ .temp_dir
+ .child("foo")
+ .assert(predicate::path::is_dir());
+
+ // A subsequent sync will re-use the environment
+ uv_snapshot!(&filters, context.sync().arg("--script").arg("script.py").env(EnvVars::VIRTUAL_ENV, "foo").arg("--active"), @r"
+ success: true
+ exit_code: 0
+ ----- stdout -----
+
+ ----- stderr -----
+ Using script environment at: foo
+ Resolved 3 packages in [TIME]
+ Audited 3 packages in [TIME]
+ ");
+
+ // Requesting another Python version will invalidate the environment
+ uv_snapshot!(&filters, context.sync()
+ .arg("--script").arg("script.py")
+ .arg("--output-format").arg("json")
+ .env(EnvVars::VIRTUAL_ENV, "foo")
+ .arg("--active")
+ .arg("-p")
+ .arg("3.12"), @r#"
+ success: true
+ exit_code: 0
+ ----- stdout -----
+ {
+ "schema": {
+ "version": "preview"
+ },
+ "target": "script",
+ "script": {
+ "path": "[TEMP_DIR]/script.py"
+ },
+ "sync": {
+ "environment": {
+ "path": "[TEMP_DIR]/foo",
+ "python": {
+ "path": "[TEMP_DIR]/foo/[BIN]/python",
+ "version": "3.12.[X]",
+ "implementation": "cpython"
+ }
+ },
+ "action": "update"
+ },
+ "lock": null,
+ "dry_run": false
+ }
+
+ ----- stderr -----
+ Resolved 3 packages in [TIME]
+ Installed 3 packages in [TIME]
+ + anyio==4.3.0
+ + idna==3.6
+ + sniffio==1.3.1
+ "#);
Ok(())
}
@@ -4650,7 +5061,7 @@ fn sync_workspace_custom_environment_path() -> Result<()> {
context.init().arg("child").assert().success();
// Running `uv sync` should create `.venv` in the workspace root
- uv_snapshot!(context.filters(), context.sync(), @r###"
+ uv_snapshot!(context.filters(), context.sync(), @r"
success: true
exit_code: 0
----- stdout -----
@@ -4660,7 +5071,7 @@ fn sync_workspace_custom_environment_path() -> Result<()> {
Prepared 1 package in [TIME]
Installed 1 package in [TIME]
+ iniconfig==2.0.0
- "###);
+ ");
context
.temp_dir
@@ -4668,7 +5079,7 @@ fn sync_workspace_custom_environment_path() -> Result<()> {
.assert(predicate::path::is_dir());
// Similarly, `uv sync` from the child project uses `.venv` in the workspace root
- uv_snapshot!(context.filters(), context.sync().current_dir(context.temp_dir.join("child")), @r###"
+ uv_snapshot!(context.filters(), context.sync().current_dir(context.temp_dir.join("child")), @r"
success: true
exit_code: 0
----- stdout -----
@@ -4677,7 +5088,7 @@ fn sync_workspace_custom_environment_path() -> Result<()> {
Resolved 3 packages in [TIME]
Uninstalled 1 package in [TIME]
- iniconfig==2.0.0
- "###);
+ ");
context
.temp_dir
@@ -4691,7 +5102,7 @@ fn sync_workspace_custom_environment_path() -> Result<()> {
.assert(predicate::path::missing());
// Running `uv sync` should create `foo` in the workspace root when customized
- uv_snapshot!(context.filters(), context.sync().env(EnvVars::UV_PROJECT_ENVIRONMENT, "foo"), @r###"
+ uv_snapshot!(context.filters(), context.sync().env(EnvVars::UV_PROJECT_ENVIRONMENT, "foo"), @r"
success: true
exit_code: 0
----- stdout -----
@@ -4702,7 +5113,7 @@ fn sync_workspace_custom_environment_path() -> Result<()> {
Resolved 3 packages in [TIME]
Installed 1 package in [TIME]
+ iniconfig==2.0.0
- "###);
+ ");
context
.temp_dir
@@ -4716,7 +5127,7 @@ fn sync_workspace_custom_environment_path() -> Result<()> {
.assert(predicate::path::is_dir());
// Similarly, `uv sync` from the child project uses `foo` relative to the workspace root
- uv_snapshot!(context.filters(), context.sync().env(EnvVars::UV_PROJECT_ENVIRONMENT, "foo").current_dir(context.temp_dir.join("child")), @r###"
+ uv_snapshot!(context.filters(), context.sync().env(EnvVars::UV_PROJECT_ENVIRONMENT, "foo").current_dir(context.temp_dir.join("child")), @r"
success: true
exit_code: 0
----- stdout -----
@@ -4725,7 +5136,7 @@ fn sync_workspace_custom_environment_path() -> Result<()> {
Resolved 3 packages in [TIME]
Uninstalled 1 package in [TIME]
- iniconfig==2.0.0
- "###);
+ ");
context
.temp_dir
@@ -4739,7 +5150,7 @@ fn sync_workspace_custom_environment_path() -> Result<()> {
.assert(predicate::path::missing());
// And, `uv sync --package child` uses `foo` relative to the workspace root
- uv_snapshot!(context.filters(), context.sync().arg("--package").arg("child").env(EnvVars::UV_PROJECT_ENVIRONMENT, "foo"), @r###"
+ uv_snapshot!(context.filters(), context.sync().arg("--package").arg("child").env(EnvVars::UV_PROJECT_ENVIRONMENT, "foo"), @r"
success: true
exit_code: 0
----- stdout -----
@@ -4747,7 +5158,7 @@ fn sync_workspace_custom_environment_path() -> Result<()> {
----- stderr -----
Resolved 3 packages in [TIME]
Audited in [TIME]
- "###);
+ ");
context
.temp_dir
@@ -4782,7 +5193,7 @@ fn sync_empty_virtual_environment() -> Result<()> {
)?;
// Running `uv sync` should work
- uv_snapshot!(context.filters(), context.sync(), @r###"
+ uv_snapshot!(context.filters(), context.sync(), @r"
success: true
exit_code: 0
----- stdout -----
@@ -4794,7 +5205,7 @@ fn sync_empty_virtual_environment() -> Result<()> {
Prepared 1 package in [TIME]
Installed 1 package in [TIME]
+ iniconfig==2.0.0
- "###);
+ ");
Ok(())
}
@@ -4816,7 +5227,7 @@ fn sync_legacy_non_project_warning() -> Result<()> {
)?;
// We should not warn if it matches the project environment
- uv_snapshot!(context.filters(), context.sync().env(EnvVars::VIRTUAL_ENV, context.temp_dir.join(".venv")), @r###"
+ uv_snapshot!(context.filters(), context.sync().env(EnvVars::VIRTUAL_ENV, context.temp_dir.join(".venv")), @r"
success: true
exit_code: 0
----- stdout -----
@@ -4826,10 +5237,10 @@ fn sync_legacy_non_project_warning() -> Result<()> {
Prepared 1 package in [TIME]
Installed 1 package in [TIME]
+ iniconfig==2.0.0
- "###);
+ ");
// Including if it's a relative path that matches
- uv_snapshot!(context.filters(), context.sync().env(EnvVars::VIRTUAL_ENV, ".venv"), @r###"
+ uv_snapshot!(context.filters(), context.sync().env(EnvVars::VIRTUAL_ENV, ".venv"), @r"
success: true
exit_code: 0
----- stdout -----
@@ -4837,7 +5248,7 @@ fn sync_legacy_non_project_warning() -> Result<()> {
----- stderr -----
Resolved 2 packages in [TIME]
Audited 1 package in [TIME]
- "###);
+ ");
// Or, if it's a link that resolves to the same path
#[cfg(unix)]
@@ -4847,7 +5258,7 @@ fn sync_legacy_non_project_warning() -> Result<()> {
let link = context.temp_dir.join("link");
symlink(context.temp_dir.join(".venv"), &link)?;
- uv_snapshot!(context.filters(), context.sync().env(EnvVars::VIRTUAL_ENV, link), @r###"
+ uv_snapshot!(context.filters(), context.sync().env(EnvVars::VIRTUAL_ENV, link), @r"
success: true
exit_code: 0
----- stdout -----
@@ -4855,11 +5266,11 @@ fn sync_legacy_non_project_warning() -> Result<()> {
----- stderr -----
Resolved 2 packages in [TIME]
Audited 1 package in [TIME]
- "###);
+ ");
}
// But we should warn if it's a different path
- uv_snapshot!(context.filters(), context.sync().env(EnvVars::VIRTUAL_ENV, "foo"), @r###"
+ uv_snapshot!(context.filters(), context.sync().env(EnvVars::VIRTUAL_ENV, "foo"), @r"
success: true
exit_code: 0
----- stdout -----
@@ -4868,10 +5279,10 @@ fn sync_legacy_non_project_warning() -> Result<()> {
warning: `VIRTUAL_ENV=foo` does not match the project environment path `.venv` and will be ignored; use `--active` to target the active environment instead
Resolved 2 packages in [TIME]
Audited 1 package in [TIME]
- "###);
+ ");
// Including absolute paths
- uv_snapshot!(context.filters(), context.sync().env(EnvVars::VIRTUAL_ENV, context.temp_dir.join("foo")), @r###"
+ uv_snapshot!(context.filters(), context.sync().env(EnvVars::VIRTUAL_ENV, context.temp_dir.join("foo")), @r"
success: true
exit_code: 0
----- stdout -----
@@ -4880,10 +5291,10 @@ fn sync_legacy_non_project_warning() -> Result<()> {
warning: `VIRTUAL_ENV=foo` does not match the project environment path `.venv` and will be ignored; use `--active` to target the active environment instead
Resolved 2 packages in [TIME]
Audited 1 package in [TIME]
- "###);
+ ");
// We should not warn if the project environment has been customized and matches
- uv_snapshot!(context.filters(), context.sync().env(EnvVars::VIRTUAL_ENV, "foo").env(EnvVars::UV_PROJECT_ENVIRONMENT, "foo"), @r###"
+ uv_snapshot!(context.filters(), context.sync().env(EnvVars::VIRTUAL_ENV, "foo").env(EnvVars::UV_PROJECT_ENVIRONMENT, "foo"), @r"
success: true
exit_code: 0
----- stdout -----
@@ -4894,10 +5305,10 @@ fn sync_legacy_non_project_warning() -> Result<()> {
Resolved 2 packages in [TIME]
Installed 1 package in [TIME]
+ iniconfig==2.0.0
- "###);
+ ");
// But we should warn if they don't match still
- uv_snapshot!(context.filters(), context.sync().env(EnvVars::VIRTUAL_ENV, "foo").env(EnvVars::UV_PROJECT_ENVIRONMENT, "bar"), @r###"
+ uv_snapshot!(context.filters(), context.sync().env(EnvVars::VIRTUAL_ENV, "foo").env(EnvVars::UV_PROJECT_ENVIRONMENT, "bar"), @r"
success: true
exit_code: 0
----- stdout -----
@@ -4909,14 +5320,14 @@ fn sync_legacy_non_project_warning() -> Result<()> {
Resolved 2 packages in [TIME]
Installed 1 package in [TIME]
+ iniconfig==2.0.0
- "###);
+ ");
let child = context.temp_dir.child("child");
child.create_dir_all()?;
// And `VIRTUAL_ENV` is resolved relative to the project root so with relative paths we should
// warn from a child too
- uv_snapshot!(context.filters(), context.sync().env(EnvVars::VIRTUAL_ENV, "foo").env(EnvVars::UV_PROJECT_ENVIRONMENT, "foo").current_dir(&child), @r###"
+ uv_snapshot!(context.filters(), context.sync().env(EnvVars::VIRTUAL_ENV, "foo").env(EnvVars::UV_PROJECT_ENVIRONMENT, "foo").current_dir(&child), @r"
success: true
exit_code: 0
----- stdout -----
@@ -4925,10 +5336,10 @@ fn sync_legacy_non_project_warning() -> Result<()> {
warning: `VIRTUAL_ENV=foo` does not match the project environment path `[TEMP_DIR]/foo` and will be ignored; use `--active` to target the active environment instead
Resolved 2 packages in [TIME]
Audited 1 package in [TIME]
- "###);
+ ");
// But, a matching absolute path shouldn't warn
- uv_snapshot!(context.filters(), context.sync().env(EnvVars::VIRTUAL_ENV, context.temp_dir.join("foo")).env(EnvVars::UV_PROJECT_ENVIRONMENT, "foo").current_dir(&child), @r###"
+ uv_snapshot!(context.filters(), context.sync().env(EnvVars::VIRTUAL_ENV, context.temp_dir.join("foo")).env(EnvVars::UV_PROJECT_ENVIRONMENT, "foo").current_dir(&child), @r"
success: true
exit_code: 0
----- stdout -----
@@ -4936,7 +5347,7 @@ fn sync_legacy_non_project_warning() -> Result<()> {
----- stderr -----
Resolved 2 packages in [TIME]
Audited 1 package in [TIME]
- "###);
+ ");
Ok(())
}
@@ -4956,7 +5367,7 @@ fn sync_update_project() -> Result<()> {
"#,
)?;
- uv_snapshot!(context.filters(), context.sync(), @r###"
+ uv_snapshot!(context.filters(), context.sync(), @r"
success: true
exit_code: 0
----- stdout -----
@@ -4968,7 +5379,7 @@ fn sync_update_project() -> Result<()> {
Prepared 1 package in [TIME]
Installed 1 package in [TIME]
+ iniconfig==2.0.0
- "###);
+ ");
// Bump the project version.
pyproject_toml.write_str(
@@ -4985,7 +5396,7 @@ fn sync_update_project() -> Result<()> {
"#,
)?;
- uv_snapshot!(context.filters(), context.sync(), @r###"
+ uv_snapshot!(context.filters(), context.sync(), @r"
success: true
exit_code: 0
----- stdout -----
@@ -4995,7 +5406,7 @@ fn sync_update_project() -> Result<()> {
Prepared 1 package in [TIME]
Installed 1 package in [TIME]
+ my-project==0.2.0 (from file://[TEMP_DIR]/)
- "###);
+ ");
Ok(())
}
@@ -5016,7 +5427,7 @@ fn sync_environment_prompt() -> Result<()> {
)?;
// Running `uv sync` should create `.venv`
- uv_snapshot!(context.filters(), context.sync(), @r###"
+ uv_snapshot!(context.filters(), context.sync(), @r"
success: true
exit_code: 0
----- stdout -----
@@ -5028,7 +5439,7 @@ fn sync_environment_prompt() -> Result<()> {
Prepared 1 package in [TIME]
Installed 1 package in [TIME]
+ iniconfig==2.0.0
- "###);
+ ");
// The `pyvenv.cfg` should contain the prompt matching the project name
let pyvenv_cfg = context.read(".venv/pyvenv.cfg");
@@ -5055,7 +5466,7 @@ fn no_binary() -> Result<()> {
context.lock().assert().success();
- uv_snapshot!(context.filters(), context.sync().arg("--no-binary-package").arg("iniconfig"), @r###"
+ uv_snapshot!(context.filters(), context.sync().arg("--no-binary-package").arg("iniconfig"), @r"
success: true
exit_code: 0
----- stdout -----
@@ -5065,11 +5476,11 @@ fn no_binary() -> Result<()> {
Prepared 1 package in [TIME]
Installed 1 package in [TIME]
+ iniconfig==2.0.0
- "###);
+ ");
assert!(context.temp_dir.child("uv.lock").exists());
- uv_snapshot!(context.filters(), context.sync().arg("--reinstall").arg("--no-binary"), @r###"
+ uv_snapshot!(context.filters(), context.sync().arg("--reinstall").arg("--no-binary"), @r"
success: true
exit_code: 0
----- stdout -----
@@ -5080,9 +5491,9 @@ fn no_binary() -> Result<()> {
Uninstalled 1 package in [TIME]
Installed 1 package in [TIME]
~ iniconfig==2.0.0
- "###);
+ ");
- uv_snapshot!(context.filters(), context.sync().arg("--reinstall").env("UV_NO_BINARY_PACKAGE", "iniconfig"), @r###"
+ uv_snapshot!(context.filters(), context.sync().arg("--reinstall").env("UV_NO_BINARY_PACKAGE", "iniconfig"), @r"
success: true
exit_code: 0
----- stdout -----
@@ -5093,9 +5504,9 @@ fn no_binary() -> Result<()> {
Uninstalled 1 package in [TIME]
Installed 1 package in [TIME]
~ iniconfig==2.0.0
- "###);
+ ");
- uv_snapshot!(context.filters(), context.sync().arg("--reinstall").env("UV_NO_BINARY", "1"), @r###"
+ uv_snapshot!(context.filters(), context.sync().arg("--reinstall").env("UV_NO_BINARY", "1"), @r"
success: true
exit_code: 0
----- stdout -----
@@ -5106,7 +5517,7 @@ fn no_binary() -> Result<()> {
Uninstalled 1 package in [TIME]
Installed 1 package in [TIME]
~ iniconfig==2.0.0
- "###);
+ ");
uv_snapshot!(context.filters(), context.sync().arg("--reinstall").env("UV_NO_BINARY", "iniconfig"), @r###"
success: false
@@ -5139,7 +5550,7 @@ fn no_binary_error() -> Result<()> {
context.lock().assert().success();
- uv_snapshot!(context.filters(), context.sync().arg("--no-binary-package").arg("odrive"), @r###"
+ uv_snapshot!(context.filters(), context.sync().arg("--no-binary-package").arg("odrive"), @r"
success: false
exit_code: 2
----- stdout -----
@@ -5147,7 +5558,7 @@ fn no_binary_error() -> Result<()> {
----- stderr -----
Resolved 31 packages in [TIME]
error: Distribution `odrive==0.6.8 @ registry+https://pypi.org/simple` can't be installed because it is marked as `--no-binary` but has no source distribution
- "###);
+ ");
assert!(context.temp_dir.child("uv.lock").exists());
@@ -5171,7 +5582,7 @@ fn no_build() -> Result<()> {
context.lock().assert().success();
- uv_snapshot!(context.filters(), context.sync().arg("--no-build-package").arg("iniconfig"), @r###"
+ uv_snapshot!(context.filters(), context.sync().arg("--no-build-package").arg("iniconfig"), @r"
success: true
exit_code: 0
----- stdout -----
@@ -5181,11 +5592,11 @@ fn no_build() -> Result<()> {
Prepared 1 package in [TIME]
Installed 1 package in [TIME]
+ iniconfig==2.0.0
- "###);
+ ");
assert!(context.temp_dir.child("uv.lock").exists());
- uv_snapshot!(context.filters(), context.sync().arg("--reinstall").env("UV_NO_BUILD_PACKAGE", "iniconfig"), @r###"
+ uv_snapshot!(context.filters(), context.sync().arg("--reinstall").env("UV_NO_BUILD_PACKAGE", "iniconfig"), @r"
success: true
exit_code: 0
----- stdout -----
@@ -5196,7 +5607,7 @@ fn no_build() -> Result<()> {
Uninstalled 1 package in [TIME]
Installed 1 package in [TIME]
~ iniconfig==2.0.0
- "###);
+ ");
Ok(())
}
@@ -5218,7 +5629,7 @@ fn no_build_error() -> Result<()> {
context.lock().assert().success();
- uv_snapshot!(context.filters(), context.sync().arg("--no-build-package").arg("django-allauth"), @r###"
+ uv_snapshot!(context.filters(), context.sync().arg("--no-build-package").arg("django-allauth"), @r"
success: false
exit_code: 2
----- stdout -----
@@ -5226,7 +5637,7 @@ fn no_build_error() -> Result<()> {
----- stderr -----
Resolved 19 packages in [TIME]
error: Distribution `django-allauth==0.51.0 @ registry+https://pypi.org/simple` can't be installed because it is marked as `--no-build` but has no binary distribution
- "###);
+ ");
uv_snapshot!(context.filters(), context.sync().arg("--no-build"), @r"
success: false
@@ -5248,7 +5659,7 @@ fn no_build_error() -> Result<()> {
error: Distribution `django-allauth==0.51.0 @ registry+https://pypi.org/simple` can't be installed because it is marked as `--no-build` but has no binary distribution
");
- uv_snapshot!(context.filters(), context.sync().arg("--reinstall").env("UV_NO_BUILD_PACKAGE", "django-allauth"), @r###"
+ uv_snapshot!(context.filters(), context.sync().arg("--reinstall").env("UV_NO_BUILD_PACKAGE", "django-allauth"), @r"
success: false
exit_code: 2
----- stdout -----
@@ -5256,7 +5667,7 @@ fn no_build_error() -> Result<()> {
----- stderr -----
Resolved 19 packages in [TIME]
error: Distribution `django-allauth==0.51.0 @ registry+https://pypi.org/simple` can't be installed because it is marked as `--no-build` but has no binary distribution
- "###);
+ ");
uv_snapshot!(context.filters(), context.sync().arg("--reinstall").env("UV_NO_BUILD", "django-allauth"), @r###"
success: false
@@ -5300,7 +5711,7 @@ fn sync_wheel_url_source_error() -> Result<()> {
Resolved 3 packages in [TIME]
"###);
- uv_snapshot!(context.filters(), context.sync(), @r###"
+ uv_snapshot!(context.filters(), context.sync(), @r"
success: false
exit_code: 2
----- stdout -----
@@ -5310,7 +5721,7 @@ fn sync_wheel_url_source_error() -> Result<()> {
error: Distribution `cffi==1.17.1 @ direct+https://files.pythonhosted.org/packages/08/fd/cc2fedbd887223f9f5d170c96e57cbf655df9831a6546c1727ae13fa977a/cffi-1.17.1-cp310-cp310-macosx_11_0_arm64.whl` can't be installed because the binary distribution is incompatible with the current platform
hint: You're using CPython 3.12 (`cp312`), but `cffi` (v1.17.1) only has wheels with the following Python ABI tag: `cp310`
- "###);
+ ");
Ok(())
}
@@ -5351,7 +5762,7 @@ fn sync_wheel_path_source_error() -> Result<()> {
Resolved 3 packages in [TIME]
"###);
- uv_snapshot!(context.filters(), context.sync(), @r###"
+ uv_snapshot!(context.filters(), context.sync(), @r"
success: false
exit_code: 2
----- stdout -----
@@ -5361,7 +5772,7 @@ fn sync_wheel_path_source_error() -> Result<()> {
error: Distribution `cffi==1.17.1 @ path+cffi-1.17.1-cp310-cp310-macosx_11_0_arm64.whl` can't be installed because the binary distribution is incompatible with the current platform
hint: You're using CPython 3.12 (`cp312`), but `cffi` (v1.17.1) only has wheels with the following Python ABI tag: `cp310`
- "###);
+ ");
Ok(())
}
@@ -5423,7 +5834,7 @@ fn sync_override_package() -> Result<()> {
.touch()?;
// Syncing the project should _not_ install `core`.
- uv_snapshot!(context.filters(), context.sync(), @r###"
+ uv_snapshot!(context.filters(), context.sync(), @r"
success: true
exit_code: 0
----- stdout -----
@@ -5433,7 +5844,7 @@ fn sync_override_package() -> Result<()> {
Prepared 1 package in [TIME]
Installed 1 package in [TIME]
+ project==0.0.0 (from file://[TEMP_DIR]/)
- "###);
+ ");
// Mark the source as `package = true`.
let pyproject_toml = context.temp_dir.child("pyproject.toml");
@@ -5455,7 +5866,7 @@ fn sync_override_package() -> Result<()> {
)?;
// Syncing the project _should_ install `core`.
- uv_snapshot!(context.filters(), context.sync(), @r###"
+ uv_snapshot!(context.filters(), context.sync(), @r"
success: true
exit_code: 0
----- stdout -----
@@ -5467,7 +5878,7 @@ fn sync_override_package() -> Result<()> {
Installed 2 packages in [TIME]
+ core==0.1.0 (from file://[TEMP_DIR]/core)
~ project==0.0.0 (from file://[TEMP_DIR]/)
- "###);
+ ");
// Remove `package = false`.
let pyproject_toml = context.temp_dir.child("core").child("pyproject.toml");
@@ -5485,7 +5896,7 @@ fn sync_override_package() -> Result<()> {
)?;
// Syncing the project _should_ install `core`.
- uv_snapshot!(context.filters(), context.sync(), @r###"
+ uv_snapshot!(context.filters(), context.sync(), @r"
success: true
exit_code: 0
----- stdout -----
@@ -5496,7 +5907,7 @@ fn sync_override_package() -> Result<()> {
Uninstalled 1 package in [TIME]
Installed 1 package in [TIME]
~ core==0.1.0 (from file://[TEMP_DIR]/core)
- "###);
+ ");
// Mark the source as `package = false`.
let pyproject_toml = context.temp_dir.child("pyproject.toml");
@@ -5518,7 +5929,7 @@ fn sync_override_package() -> Result<()> {
)?;
// Syncing the project should _not_ install `core`.
- uv_snapshot!(context.filters(), context.sync(), @r###"
+ uv_snapshot!(context.filters(), context.sync(), @r"
success: true
exit_code: 0
----- stdout -----
@@ -5530,7 +5941,7 @@ fn sync_override_package() -> Result<()> {
Installed 1 package in [TIME]
- core==0.1.0 (from file://[TEMP_DIR]/core)
~ project==0.0.0 (from file://[TEMP_DIR]/)
- "###);
+ ");
Ok(())
}
@@ -5592,7 +6003,7 @@ fn transitive_dev() -> Result<()> {
let init = src.child("__init__.py");
init.touch()?;
- uv_snapshot!(context.filters(), context.sync().arg("--dev"), @r###"
+ uv_snapshot!(context.filters(), context.sync().arg("--dev"), @r"
success: true
exit_code: 0
----- stdout -----
@@ -5605,7 +6016,7 @@ fn transitive_dev() -> Result<()> {
+ child==0.1.0 (from file://[TEMP_DIR]/child)
+ idna==3.6
+ sniffio==1.3.1
- "###);
+ ");
Ok(())
}
@@ -5665,7 +6076,7 @@ fn sync_no_editable() -> Result<()> {
let init = src.child("__init__.py");
init.touch()?;
- uv_snapshot!(context.filters(), context.sync().arg("--no-editable"), @r###"
+ uv_snapshot!(context.filters(), context.sync().arg("--no-editable"), @r"
success: true
exit_code: 0
----- stdout -----
@@ -5676,7 +6087,7 @@ fn sync_no_editable() -> Result<()> {
Installed 2 packages in [TIME]
+ child==0.1.0 (from file://[TEMP_DIR]/child)
+ root==0.1.0 (from file://[TEMP_DIR]/)
- "###);
+ ");
uv_snapshot!(context.filters(), context.sync().env(EnvVars::UV_NO_EDITABLE, "1"), @r"
success: true
@@ -5731,7 +6142,7 @@ fn sync_scripts_without_build_system() -> Result<()> {
"#,
)?;
- uv_snapshot!(context.filters(), context.sync(), @r###"
+ uv_snapshot!(context.filters(), context.sync(), @r"
success: true
exit_code: 0
----- stdout -----
@@ -5740,7 +6151,7 @@ fn sync_scripts_without_build_system() -> Result<()> {
warning: Skipping installation of entry points (`project.scripts`) because this project is not packaged; to install entry points, set `tool.uv.package = true` or define a `build-system`
Resolved 1 package in [TIME]
Audited in [TIME]
- "###);
+ ");
Ok(())
}
@@ -5780,7 +6191,7 @@ fn sync_scripts_project_not_packaged() -> Result<()> {
"#,
)?;
- uv_snapshot!(context.filters(), context.sync(), @r###"
+ uv_snapshot!(context.filters(), context.sync(), @r"
success: true
exit_code: 0
----- stdout -----
@@ -5789,7 +6200,7 @@ fn sync_scripts_project_not_packaged() -> Result<()> {
warning: Skipping installation of entry points (`project.scripts`) because this project is not packaged; to install entry points, set `tool.uv.package = true` or define a `build-system`
Resolved 1 package in [TIME]
Audited in [TIME]
- "###);
+ ");
Ok(())
}
@@ -5822,7 +6233,7 @@ fn sync_dynamic_extra() -> Result<()> {
.child("requirements-dev.txt")
.write_str("typing-extensions")?;
- uv_snapshot!(context.filters(), context.sync().arg("--extra").arg("dev"), @r###"
+ uv_snapshot!(context.filters(), context.sync().arg("--extra").arg("dev"), @r"
success: true
exit_code: 0
----- stdout -----
@@ -5834,7 +6245,7 @@ fn sync_dynamic_extra() -> Result<()> {
+ iniconfig==2.0.0
+ project==0.1.0 (from file://[TEMP_DIR]/)
+ typing-extensions==4.10.0
- "###);
+ ");
let lock = context.read("uv.lock");
@@ -5895,7 +6306,7 @@ fn sync_dynamic_extra() -> Result<()> {
);
// Check that we can re-read the lockfile.
- uv_snapshot!(context.filters(), context.sync().arg("--locked"), @r###"
+ uv_snapshot!(context.filters(), context.sync().arg("--locked"), @r"
success: true
exit_code: 0
----- stdout -----
@@ -5904,7 +6315,7 @@ fn sync_dynamic_extra() -> Result<()> {
Resolved 3 packages in [TIME]
Uninstalled 1 package in [TIME]
- typing-extensions==4.10.0
- "###);
+ ");
Ok(())
}
@@ -5974,7 +6385,7 @@ fn build_system_requires_workspace() -> Result<()> {
",
})?;
- uv_snapshot!(context.filters(), context.sync().current_dir(context.temp_dir.child("project")), @r###"
+ uv_snapshot!(context.filters(), context.sync().current_dir(context.temp_dir.child("project")), @r"
success: true
exit_code: 0
----- stdout -----
@@ -5987,7 +6398,7 @@ fn build_system_requires_workspace() -> Result<()> {
Installed 2 packages in [TIME]
+ iniconfig==2.0.0
+ project==0.1.0 (from file://[TEMP_DIR]/project)
- "###);
+ ");
Ok(())
}
@@ -6054,7 +6465,7 @@ fn build_system_requires_path() -> Result<()> {
",
})?;
- uv_snapshot!(context.filters(), context.sync().current_dir(context.temp_dir.child("project")), @r###"
+ uv_snapshot!(context.filters(), context.sync().current_dir(context.temp_dir.child("project")), @r"
success: true
exit_code: 0
----- stdout -----
@@ -6067,7 +6478,7 @@ fn build_system_requires_path() -> Result<()> {
Installed 2 packages in [TIME]
+ iniconfig==2.0.0
+ project==0.1.0 (from file://[TEMP_DIR]/project)
- "###);
+ ");
Ok(())
}
@@ -6119,7 +6530,7 @@ fn sync_invalid_environment() -> Result<()> {
fs_err::write(context.temp_dir.join(".venv").join("file"), b"")?;
// We can delete and use it
- uv_snapshot!(context.filters(), context.sync(), @r###"
+ uv_snapshot!(context.filters(), context.sync(), @r"
success: true
exit_code: 0
----- stdout -----
@@ -6132,7 +6543,7 @@ fn sync_invalid_environment() -> Result<()> {
Prepared 1 package in [TIME]
Installed 1 package in [TIME]
+ iniconfig==2.0.0
- "###);
+ ");
let bin = venv_bin_path(context.temp_dir.join(".venv"));
@@ -6141,7 +6552,7 @@ fn sync_invalid_environment() -> Result<()> {
{
fs_err::remove_file(bin.join("python"))?;
fs_err::os::unix::fs::symlink(context.temp_dir.join("does-not-exist"), bin.join("python"))?;
- uv_snapshot!(context.filters(), context.sync(), @r###"
+ uv_snapshot!(context.filters(), context.sync(), @r"
success: true
exit_code: 0
----- stdout -----
@@ -6154,7 +6565,7 @@ fn sync_invalid_environment() -> Result<()> {
Resolved 2 packages in [TIME]
Installed 1 package in [TIME]
+ iniconfig==2.0.0
- "###);
+ ");
}
// But if the Python executable is missing entirely we should also fail
@@ -6242,7 +6653,7 @@ fn sync_no_sources_missing_member() -> Result<()> {
let init = src.child("__init__.py");
init.touch()?;
- uv_snapshot!(context.filters(), context.sync().arg("--no-sources"), @r###"
+ uv_snapshot!(context.filters(), context.sync().arg("--no-sources"), @r"
success: true
exit_code: 0
----- stdout -----
@@ -6254,7 +6665,7 @@ fn sync_no_sources_missing_member() -> Result<()> {
+ anyio==4.3.0
+ idna==3.6
+ sniffio==1.3.1
- "###);
+ ");
Ok(())
}
@@ -6273,7 +6684,7 @@ fn sync_python_version() -> Result<()> {
"#})?;
// We should respect the project's required version, not the first on the path
- uv_snapshot!(context.filters(), context.sync(), @r###"
+ uv_snapshot!(context.filters(), context.sync(), @r"
success: true
exit_code: 0
----- stdout -----
@@ -6287,7 +6698,7 @@ fn sync_python_version() -> Result<()> {
+ anyio==3.7.0
+ idna==3.6
+ sniffio==1.3.1
- "###);
+ ");
// Unless explicitly requested...
uv_snapshot!(context.filters(), context.sync().arg("--python").arg("3.10"), @r"
@@ -6310,7 +6721,7 @@ fn sync_python_version() -> Result<()> {
----- stderr -----
"###);
- uv_snapshot!(context.filters(), context.sync(), @r###"
+ uv_snapshot!(context.filters(), context.sync(), @r"
success: true
exit_code: 0
----- stdout -----
@@ -6324,7 +6735,7 @@ fn sync_python_version() -> Result<()> {
+ anyio==3.7.0
+ idna==3.6
+ sniffio==1.3.1
- "###);
+ ");
// Create a pin that's incompatible with the project
uv_snapshot!(context.filters(), context.python_pin().arg("3.10").arg("--no-workspace"), @r###"
@@ -6363,7 +6774,7 @@ fn sync_python_version() -> Result<()> {
"#})
.unwrap();
- uv_snapshot!(context.filters(), context.sync().current_dir(&child_dir), @r###"
+ uv_snapshot!(context.filters(), context.sync().current_dir(&child_dir), @r"
success: true
exit_code: 0
----- stdout -----
@@ -6376,7 +6787,7 @@ fn sync_python_version() -> Result<()> {
+ anyio==3.7.0
+ idna==3.6
+ sniffio==1.3.1
- "###);
+ ");
Ok(())
}
@@ -6406,7 +6817,7 @@ fn sync_explicit() -> Result<()> {
"#,
)?;
- uv_snapshot!(context.filters(), context.sync(), @r###"
+ uv_snapshot!(context.filters(), context.sync(), @r"
success: true
exit_code: 0
----- stdout -----
@@ -6416,13 +6827,13 @@ fn sync_explicit() -> Result<()> {
Prepared 1 package in [TIME]
Installed 1 package in [TIME]
+ idna==2.7
- "###);
+ ");
// Clear the environment.
fs_err::remove_dir_all(&context.venv)?;
// The package should be drawn from the cache.
- uv_snapshot!(context.filters(), context.sync(), @r###"
+ uv_snapshot!(context.filters(), context.sync(), @r"
success: true
exit_code: 0
----- stdout -----
@@ -6433,7 +6844,7 @@ fn sync_explicit() -> Result<()> {
Resolved 2 packages in [TIME]
Installed 1 package in [TIME]
+ idna==2.7
- "###);
+ ");
Ok(())
}
@@ -6495,7 +6906,7 @@ fn sync_all() -> Result<()> {
context.lock().assert().success();
// Sync all workspace members.
- uv_snapshot!(context.filters(), context.sync().arg("--all-packages"), @r###"
+ uv_snapshot!(context.filters(), context.sync().arg("--all-packages"), @r"
success: true
exit_code: 0
----- stdout -----
@@ -6510,7 +6921,7 @@ fn sync_all() -> Result<()> {
+ iniconfig==2.0.0
+ project==0.1.0 (from file://[TEMP_DIR]/)
+ sniffio==1.3.1
- "###);
+ ");
Ok(())
}
@@ -6576,7 +6987,7 @@ fn sync_all_extras() -> Result<()> {
context.lock().assert().success();
// Sync an extra that exists in both the parent and child.
- uv_snapshot!(context.filters(), context.sync().arg("--all-packages").arg("--extra").arg("types"), @r###"
+ uv_snapshot!(context.filters(), context.sync().arg("--all-packages").arg("--extra").arg("types"), @r"
success: true
exit_code: 0
----- stdout -----
@@ -6589,10 +7000,10 @@ fn sync_all_extras() -> Result<()> {
+ iniconfig==2.0.0
+ sniffio==1.3.1
+ typing-extensions==4.10.0
- "###);
+ ");
// Sync an extra that only exists in the child.
- uv_snapshot!(context.filters(), context.sync().arg("--all-packages").arg("--extra").arg("testing"), @r###"
+ uv_snapshot!(context.filters(), context.sync().arg("--all-packages").arg("--extra").arg("testing"), @r"
success: true
exit_code: 0
----- stdout -----
@@ -6605,10 +7016,10 @@ fn sync_all_extras() -> Result<()> {
+ packaging==24.0
- sniffio==1.3.1
- typing-extensions==4.10.0
- "###);
+ ");
// Sync all extras.
- uv_snapshot!(context.filters(), context.sync().arg("--all-packages").arg("--all-extras"), @r###"
+ uv_snapshot!(context.filters(), context.sync().arg("--all-packages").arg("--all-extras"), @r"
success: true
exit_code: 0
----- stdout -----
@@ -6621,10 +7032,10 @@ fn sync_all_extras() -> Result<()> {
+ idna==3.6
+ sniffio==1.3.1
+ typing-extensions==4.10.0
- "###);
+ ");
// Sync all extras excluding an extra that exists in both the parent and child.
- uv_snapshot!(context.filters(), context.sync().arg("--all-packages").arg("--all-extras").arg("--no-extra").arg("types"), @r###"
+ uv_snapshot!(context.filters(), context.sync().arg("--all-packages").arg("--all-extras").arg("--no-extra").arg("types"), @r"
success: true
exit_code: 0
----- stdout -----
@@ -6633,10 +7044,10 @@ fn sync_all_extras() -> Result<()> {
Resolved 8 packages in [TIME]
Uninstalled 1 package in [TIME]
- typing-extensions==4.10.0
- "###);
+ ");
// Sync an extra that doesn't exist.
- uv_snapshot!(context.filters(), context.sync().arg("--all-packages").arg("--extra").arg("foo"), @r###"
+ uv_snapshot!(context.filters(), context.sync().arg("--all-packages").arg("--extra").arg("foo"), @r"
success: false
exit_code: 2
----- stdout -----
@@ -6644,10 +7055,10 @@ fn sync_all_extras() -> Result<()> {
----- stderr -----
Resolved 8 packages in [TIME]
error: Extra `foo` is not defined in any project's `optional-dependencies` table
- "###);
+ ");
// Sync all extras excluding an extra that doesn't exist.
- uv_snapshot!(context.filters(), context.sync().arg("--all-packages").arg("--all-extras").arg("--no-extra").arg("foo"), @r###"
+ uv_snapshot!(context.filters(), context.sync().arg("--all-packages").arg("--all-extras").arg("--no-extra").arg("foo"), @r"
success: false
exit_code: 2
----- stdout -----
@@ -6655,7 +7066,7 @@ fn sync_all_extras() -> Result<()> {
----- stderr -----
Resolved 8 packages in [TIME]
error: Extra `foo` is not defined in any project's `optional-dependencies` table
- "###);
+ ");
Ok(())
}
@@ -6731,7 +7142,7 @@ fn sync_all_extras_dynamic() -> Result<()> {
context.lock().assert().success();
// Sync an extra that exists in the parent.
- uv_snapshot!(context.filters(), context.sync().arg("--all-packages").arg("--extra").arg("types"), @r###"
+ uv_snapshot!(context.filters(), context.sync().arg("--all-packages").arg("--extra").arg("types"), @r"
success: true
exit_code: 0
----- stdout -----
@@ -6743,10 +7154,10 @@ fn sync_all_extras_dynamic() -> Result<()> {
+ child==0.1.0 (from file://[TEMP_DIR]/child)
+ project==0.1.0 (from file://[TEMP_DIR]/)
+ sniffio==1.3.1
- "###);
+ ");
// Sync a dynamic extra that exists in the child.
- uv_snapshot!(context.filters(), context.sync().arg("--all-packages").arg("--extra").arg("dev"), @r###"
+ uv_snapshot!(context.filters(), context.sync().arg("--all-packages").arg("--extra").arg("dev"), @r"
success: true
exit_code: 0
----- stdout -----
@@ -6758,10 +7169,10 @@ fn sync_all_extras_dynamic() -> Result<()> {
Installed 1 package in [TIME]
- sniffio==1.3.1
+ typing-extensions==4.10.0
- "###);
+ ");
// Sync a dynamic extra that doesn't exist in the child.
- uv_snapshot!(context.filters(), context.sync().arg("--all-packages").arg("--extra").arg("foo"), @r###"
+ uv_snapshot!(context.filters(), context.sync().arg("--all-packages").arg("--extra").arg("foo"), @r"
success: false
exit_code: 2
----- stdout -----
@@ -6769,7 +7180,7 @@ fn sync_all_extras_dynamic() -> Result<()> {
----- stderr -----
Resolved 6 packages in [TIME]
error: Extra `foo` is not defined in any project's `optional-dependencies` table
- "###);
+ ");
Ok(())
}
@@ -6836,7 +7247,7 @@ fn sync_all_groups() -> Result<()> {
context.lock().assert().success();
// Sync a group that exists in both the parent and child.
- uv_snapshot!(context.filters(), context.sync().arg("--all-packages").arg("--group").arg("types"), @r###"
+ uv_snapshot!(context.filters(), context.sync().arg("--all-packages").arg("--group").arg("types"), @r"
success: true
exit_code: 0
----- stdout -----
@@ -6849,10 +7260,10 @@ fn sync_all_groups() -> Result<()> {
+ iniconfig==2.0.0
+ sniffio==1.3.1
+ typing-extensions==4.10.0
- "###);
+ ");
// Sync a group that only exists in the child.
- uv_snapshot!(context.filters(), context.sync().arg("--all-packages").arg("--group").arg("testing"), @r###"
+ uv_snapshot!(context.filters(), context.sync().arg("--all-packages").arg("--group").arg("testing"), @r"
success: true
exit_code: 0
----- stdout -----
@@ -6865,10 +7276,10 @@ fn sync_all_groups() -> Result<()> {
+ packaging==24.0
- sniffio==1.3.1
- typing-extensions==4.10.0
- "###);
+ ");
// Sync a group that doesn't exist.
- uv_snapshot!(context.filters(), context.sync().arg("--all-packages").arg("--group").arg("foo"), @r###"
+ uv_snapshot!(context.filters(), context.sync().arg("--all-packages").arg("--group").arg("foo"), @r"
success: false
exit_code: 2
----- stdout -----
@@ -6876,10 +7287,10 @@ fn sync_all_groups() -> Result<()> {
----- stderr -----
Resolved 8 packages in [TIME]
error: Group `foo` is not defined in any project's `dependency-groups` table
- "###);
+ ");
// Sync an empty group.
- uv_snapshot!(context.filters(), context.sync().arg("--group").arg("empty"), @r###"
+ uv_snapshot!(context.filters(), context.sync().arg("--group").arg("empty"), @r"
success: true
exit_code: 0
----- stdout -----
@@ -6888,7 +7299,7 @@ fn sync_all_groups() -> Result<()> {
Resolved 8 packages in [TIME]
Uninstalled 1 package in [TIME]
- packaging==24.0
- "###);
+ ");
Ok(())
}
@@ -6940,7 +7351,7 @@ fn sync_multiple_sources_index_disjoint_extras() -> Result<()> {
// Generate a lockfile.
context.lock().assert().success();
- uv_snapshot!(context.filters(), context.sync().arg("--extra").arg("cu124"), @r###"
+ uv_snapshot!(context.filters(), context.sync().arg("--extra").arg("cu124"), @r"
success: true
exit_code: 0
----- stdout -----
@@ -6951,7 +7362,7 @@ fn sync_multiple_sources_index_disjoint_extras() -> Result<()> {
Installed 2 packages in [TIME]
+ jinja2==3.1.3
+ markupsafe==2.1.5
- "###);
+ ");
Ok(())
}
@@ -6982,7 +7393,7 @@ fn sync_derivation_chain() -> Result<()> {
.chain([(r"/.*/src", "/[TMP]/src")])
.collect::>();
- uv_snapshot!(filters, context.sync(), @r###"
+ uv_snapshot!(filters, context.sync(), @r#"
success: false
exit_code: 1
----- stdout -----
@@ -7013,7 +7424,7 @@ fn sync_derivation_chain() -> Result<()> {
hint: This usually indicates a problem with the package or the build environment.
help: `wsgiref` (v0.1.2) was included because `project` (v0.1.0) depends on `wsgiref`
- "###);
+ "#);
Ok(())
}
@@ -7045,7 +7456,7 @@ fn sync_derivation_chain_extra() -> Result<()> {
.chain([(r"/.*/src", "/[TMP]/src")])
.collect::>();
- uv_snapshot!(filters, context.sync().arg("--extra").arg("wsgi"), @r###"
+ uv_snapshot!(filters, context.sync().arg("--extra").arg("wsgi"), @r#"
success: false
exit_code: 1
----- stdout -----
@@ -7076,7 +7487,7 @@ fn sync_derivation_chain_extra() -> Result<()> {
hint: This usually indicates a problem with the package or the build environment.
help: `wsgiref` (v0.1.2) was included because `project[wsgi]` (v0.1.0) depends on `wsgiref`
- "###);
+ "#);
Ok(())
}
@@ -7110,7 +7521,7 @@ fn sync_derivation_chain_group() -> Result<()> {
.chain([(r"/.*/src", "/[TMP]/src")])
.collect::>();
- uv_snapshot!(filters, context.sync().arg("--group").arg("wsgi"), @r###"
+ uv_snapshot!(filters, context.sync().arg("--group").arg("wsgi"), @r#"
success: false
exit_code: 1
----- stdout -----
@@ -7141,7 +7552,7 @@ fn sync_derivation_chain_group() -> Result<()> {
hint: This usually indicates a problem with the package or the build environment.
help: `wsgiref` (v0.1.2) was included because `project:wsgi` (v0.1.0) depends on `wsgiref`
- "###);
+ "#);
Ok(())
}
@@ -7235,7 +7646,7 @@ fn sync_stale_egg_info() -> Result<()> {
}
);
- uv_snapshot!(context.filters(), context.sync(), @r###"
+ uv_snapshot!(context.filters(), context.sync(), @r"
success: true
exit_code: 0
----- stdout -----
@@ -7247,7 +7658,7 @@ fn sync_stale_egg_info() -> Result<()> {
+ member==0.1.dev5+gfea1041 (from git+https://github.com/astral-sh/uv-stale-egg-info-test.git@fea10416b9c479ac88fb217e14e40249b63bfbee#subdirectory=member)
+ root==0.1.dev5+gfea1041 (from git+https://github.com/astral-sh/uv-stale-egg-info-test.git@fea10416b9c479ac88fb217e14e40249b63bfbee)
+ setuptools==69.2.0
- "###);
+ ");
Ok(())
}
@@ -7330,7 +7741,7 @@ fn sync_git_repeated_member_static_metadata() -> Result<()> {
}
);
- uv_snapshot!(context.filters(), context.sync(), @r###"
+ uv_snapshot!(context.filters(), context.sync(), @r"
success: true
exit_code: 0
----- stdout -----
@@ -7341,7 +7752,7 @@ fn sync_git_repeated_member_static_metadata() -> Result<()> {
Installed 2 packages in [TIME]
+ uv-git-workspace-in-root==0.1.0 (from git+https://github.com/astral-sh/workspace-in-root-test.git@d3ab48d2338296d47e28dbb2fb327c5e2ac4ac68)
+ workspace-member-in-subdir==0.1.0 (from git+https://github.com/astral-sh/workspace-in-root-test.git@d3ab48d2338296d47e28dbb2fb327c5e2ac4ac68#subdirectory=workspace-member-in-subdir)
- "###);
+ ");
Ok(())
}
@@ -7446,7 +7857,7 @@ fn sync_git_repeated_member_dynamic_metadata() -> Result<()> {
}
);
- uv_snapshot!(context.filters(), context.sync(), @r###"
+ uv_snapshot!(context.filters(), context.sync(), @r"
success: true
exit_code: 0
----- stdout -----
@@ -7459,7 +7870,7 @@ fn sync_git_repeated_member_dynamic_metadata() -> Result<()> {
+ iniconfig==2.0.0
+ package==0.1.0 (from git+https://github.com/astral-sh/uv-dynamic-metadata-test.git@6c5aa0a65db737c9e7e2e60dc865bd8087012e64)
+ typing-extensions==4.10.0
- "###);
+ ");
Ok(())
}
@@ -7542,7 +7953,7 @@ fn sync_git_repeated_member_backwards_path() -> Result<()> {
}
);
- uv_snapshot!(context.filters(), context.sync(), @r###"
+ uv_snapshot!(context.filters(), context.sync(), @r"
success: true
exit_code: 0
----- stdout -----
@@ -7553,7 +7964,7 @@ fn sync_git_repeated_member_backwards_path() -> Result<()> {
Installed 2 packages in [TIME]
+ dependency==0.1.0 (from git+https://github.com/astral-sh/uv-backwards-path-test@4bcc7fcd2e548c2ab7ba6b97b1c4e3ababccc7a9#subdirectory=dependency)
+ package==0.1.0 (from git+https://github.com/astral-sh/uv-backwards-path-test@4bcc7fcd2e548c2ab7ba6b97b1c4e3ababccc7a9#subdirectory=root)
- "###);
+ ");
Ok(())
}
@@ -7578,7 +7989,7 @@ fn mismatched_name_self_editable() -> Result<()> {
"#,
)?;
- uv_snapshot!(context.filters(), context.sync(), @r###"
+ uv_snapshot!(context.filters(), context.sync(), @r"
success: false
exit_code: 1
----- stdout -----
@@ -7588,7 +7999,7 @@ fn mismatched_name_self_editable() -> Result<()> {
× Failed to build `foo @ file://[TEMP_DIR]/`
╰─▶ Package metadata name `project` does not match given name `foo`
help: `foo` was included because `project` (v0.1.0) depends on `foo`
- "###);
+ ");
Ok(())
}
@@ -7610,7 +8021,7 @@ fn mismatched_name_cached_wheel() -> Result<()> {
"#,
)?;
- uv_snapshot!(context.filters(), context.sync(), @r###"
+ uv_snapshot!(context.filters(), context.sync(), @r"
success: true
exit_code: 0
----- stdout -----
@@ -7620,7 +8031,7 @@ fn mismatched_name_cached_wheel() -> Result<()> {
Prepared 1 package in [TIME]
Installed 1 package in [TIME]
+ iniconfig==2.0.0 (from https://files.pythonhosted.org/packages/d7/4b/cbd8e699e64a6f16ca3a8220661b5f83792b3017d0f79807cb8708d33913/iniconfig-2.0.0.tar.gz)
- "###);
+ ");
pyproject_toml.write_str(
r#"
@@ -7632,7 +8043,7 @@ fn mismatched_name_cached_wheel() -> Result<()> {
"#,
)?;
- uv_snapshot!(context.filters(), context.sync(), @r###"
+ uv_snapshot!(context.filters(), context.sync(), @r"
success: false
exit_code: 1
----- stdout -----
@@ -7640,7 +8051,7 @@ fn mismatched_name_cached_wheel() -> Result<()> {
----- stderr -----
× Failed to download and build `foo @ https://files.pythonhosted.org/packages/d7/4b/cbd8e699e64a6f16ca3a8220661b5f83792b3017d0f79807cb8708d33913/iniconfig-2.0.0.tar.gz`
╰─▶ Package metadata name `iniconfig` does not match given name `foo`
- "###);
+ ");
Ok(())
}
@@ -7720,7 +8131,7 @@ fn sync_git_path_dependency() -> Result<()> {
}
);
- uv_snapshot!(context.filters(), context.sync(), @r###"
+ uv_snapshot!(context.filters(), context.sync(), @r"
success: true
exit_code: 0
----- stdout -----
@@ -7731,7 +8142,7 @@ fn sync_git_path_dependency() -> Result<()> {
Installed 2 packages in [TIME]
+ package1==0.1.0 (from git+https://github.com/astral-sh/uv-path-dependency-test.git@28781b32cf1f260cdb2c8040628079eb265202bd#subdirectory=package1)
+ package2==0.1.0 (from git+https://github.com/astral-sh/uv-path-dependency-test.git@28781b32cf1f260cdb2c8040628079eb265202bd#subdirectory=package2)
- "###);
+ ");
Ok(())
}
@@ -7835,7 +8246,7 @@ fn sync_build_tag() -> Result<()> {
"###);
// Install from the lockfile.
- uv_snapshot!(context.filters(), context.sync().arg("--frozen"), @r###"
+ uv_snapshot!(context.filters(), context.sync().arg("--frozen"), @r"
success: true
exit_code: 0
----- stdout -----
@@ -7844,7 +8255,7 @@ fn sync_build_tag() -> Result<()> {
Prepared 1 package in [TIME]
Installed 1 package in [TIME]
+ build-tag==1.0.0
- "###);
+ ");
// Ensure that we choose the highest build tag (5).
uv_snapshot!(context.filters(), context.run().arg("--no-sync").arg("python").arg("-c").arg("import build_tag; build_tag.main()"), @r###"
@@ -7904,7 +8315,7 @@ fn url_hash_mismatch() -> Result<()> {
"#})?;
// Running `uv sync` should fail.
- uv_snapshot!(context.filters(), context.sync(), @r###"
+ uv_snapshot!(context.filters(), context.sync(), @r"
success: false
exit_code: 1
----- stdout -----
@@ -7920,7 +8331,7 @@ fn url_hash_mismatch() -> Result<()> {
Computed:
sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3
help: `iniconfig` was included because `project` (v0.1.0) depends on `iniconfig`
- "###);
+ ");
Ok(())
}
@@ -7977,7 +8388,7 @@ fn path_hash_mismatch() -> Result<()> {
"#})?;
// Running `uv sync` should fail.
- uv_snapshot!(context.filters(), context.sync(), @r###"
+ uv_snapshot!(context.filters(), context.sync(), @r"
success: false
exit_code: 1
----- stdout -----
@@ -7993,7 +8404,7 @@ fn path_hash_mismatch() -> Result<()> {
Computed:
sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3
help: `iniconfig` was included because `project` (v0.1.0) depends on `iniconfig`
- "###);
+ ");
Ok(())
}
@@ -8029,7 +8440,7 @@ fn find_links_relative_in_config_works_from_subdir() -> Result<()> {
subdir.create_dir_all()?;
// Run `uv sync --offline` from subdir. We expect it to find the local wheel in ../packages/.
- uv_snapshot!(context.filters(), context.sync().current_dir(&subdir).arg("--offline"), @r###"
+ uv_snapshot!(context.filters(), context.sync().current_dir(&subdir).arg("--offline"), @r"
success: true
exit_code: 0
----- stdout -----
@@ -8039,7 +8450,7 @@ fn find_links_relative_in_config_works_from_subdir() -> Result<()> {
Prepared 1 package in [TIME]
Installed 1 package in [TIME]
+ ok==1.0.0
- "###);
+ ");
Ok(())
}
@@ -8060,23 +8471,23 @@ fn sync_dry_run() -> Result<()> {
)?;
// Perform a `--dry-run`.
- uv_snapshot!(context.filters(), context.sync().arg("--dry-run"), @r###"
+ uv_snapshot!(context.filters(), context.sync().arg("--dry-run"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Using CPython 3.12.[X] interpreter at: [PYTHON-3.12]
- Would create virtual environment at: .venv
+ Would create project environment at: .venv
Resolved 2 packages in [TIME]
Would create lockfile at: uv.lock
Would download 1 package
Would install 1 package
+ iniconfig==2.0.0
- "###);
+ ");
// Perform a full sync.
- uv_snapshot!(context.filters(), context.sync(), @r###"
+ uv_snapshot!(context.filters(), context.sync(), @r"
success: true
exit_code: 0
----- stdout -----
@@ -8088,7 +8499,7 @@ fn sync_dry_run() -> Result<()> {
Prepared 1 package in [TIME]
Installed 1 package in [TIME]
+ iniconfig==2.0.0
- "###);
+ ");
// Update the requirements.
pyproject_toml.write_str(
@@ -8101,13 +8512,13 @@ fn sync_dry_run() -> Result<()> {
"#,
)?;
- uv_snapshot!(context.filters(), context.sync().arg("--dry-run"), @r###"
+ uv_snapshot!(context.filters(), context.sync().arg("--dry-run"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
- Discovered existing environment at: .venv
+ Would use project environment at: .venv
Resolved 2 packages in [TIME]
Would update lockfile at: uv.lock
Would download 1 package
@@ -8115,7 +8526,7 @@ fn sync_dry_run() -> Result<()> {
Would install 1 package
- iniconfig==2.0.0
+ typing-extensions==4.10.0
- "###);
+ ");
// Update the `requires-python`.
pyproject_toml.write_str(
@@ -8135,7 +8546,7 @@ fn sync_dry_run() -> Result<()> {
----- stderr -----
Using CPython 3.9.[X] interpreter at: [PYTHON-3.9]
- Would replace existing virtual environment at: .venv
+ Would replace project environment at: .venv
warning: Ignoring existing lockfile due to fork markers being disjoint with `requires-python`: `python_full_version >= '3.12'` vs `python_full_version == '3.9.*'`
Resolved 2 packages in [TIME]
Would update lockfile at: uv.lock
@@ -8175,7 +8586,7 @@ fn sync_dry_run() -> Result<()> {
----- stdout -----
----- stderr -----
- Discovered existing environment at: .venv
+ Would use project environment at: .venv
Resolved 2 packages in [TIME]
Found up-to-date lockfile at: uv.lock
Audited 1 package in [TIME]
@@ -8223,7 +8634,7 @@ fn sync_dry_run_and_locked() -> Result<()> {
----- stdout -----
----- stderr -----
- Discovered existing environment at: .venv
+ Would use project environment at: .venv
Resolved 2 packages in [TIME]
Would download 1 package
Would install 1 package
@@ -8275,8 +8686,7 @@ fn sync_dry_run_and_frozen() -> Result<()> {
----- stdout -----
----- stderr -----
- Discovered existing environment at: .venv
- Found up-to-date lockfile at: uv.lock
+ Would use project environment at: .venv
Would download 3 packages
Would install 3 packages
+ anyio==3.7.0
@@ -8371,7 +8781,7 @@ fn sync_script() -> Result<()> {
"#
})?;
- uv_snapshot!(&filters, context.sync().arg("--script").arg("script.py"), @r###"
+ uv_snapshot!(&filters, context.sync().arg("--script").arg("script.py"), @r"
success: true
exit_code: 0
----- stdout -----
@@ -8381,7 +8791,7 @@ fn sync_script() -> Result<()> {
Resolved 3 packages in [TIME]
Uninstalled 1 package in [TIME]
- iniconfig==2.0.0
- "###);
+ ");
// Modify the `requires-python`.
script.write_str(indoc! { r#"
@@ -8396,13 +8806,13 @@ fn sync_script() -> Result<()> {
"#
})?;
- uv_snapshot!(&filters, context.sync().arg("--script").arg("script.py"), @r###"
+ uv_snapshot!(&filters, context.sync().arg("--script").arg("script.py"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
- Recreating script environment at: [CACHE_DIR]/environments-v2/script-[HASH]
+ Updating script environment at: [CACHE_DIR]/environments-v2/script-[HASH]
Resolved 5 packages in [TIME]
Prepared 2 packages in [TIME]
Installed 5 packages in [TIME]
@@ -8411,7 +8821,7 @@ fn sync_script() -> Result<()> {
+ idna==3.6
+ sniffio==1.3.1
+ typing-extensions==4.10.0
- "###);
+ ");
// `--locked` and `--frozen` should fail with helpful error messages.
uv_snapshot!(&filters, context.sync().arg("--script").arg("script.py").arg("--locked"), @r"
@@ -8662,7 +9072,7 @@ fn sync_locked_script() -> Result<()> {
----- stdout -----
----- stderr -----
- Recreating script environment at: [CACHE_DIR]/environments-v2/script-[HASH]
+ Updating script environment at: [CACHE_DIR]/environments-v2/script-[HASH]
warning: Ignoring existing lockfile due to fork markers being disjoint with `requires-python`: `python_full_version >= '3.11'` vs `python_full_version >= '3.8' and python_full_version < '3.11'`
Resolved 6 packages in [TIME]
error: The lockfile at `uv.lock` needs to be updated, but `--locked` was provided. To update the lockfile, run `uv lock`.
@@ -8722,7 +9132,7 @@ fn sync_script_with_compatible_build_constraints() -> Result<()> {
)])
.collect::>();
- uv_snapshot!(&filters, context.sync().arg("--script").arg("script.py"), @r###"
+ uv_snapshot!(&filters, context.sync().arg("--script").arg("script.py"), @r"
success: true
exit_code: 0
----- stdout -----
@@ -8738,7 +9148,7 @@ fn sync_script_with_compatible_build_constraints() -> Result<()> {
+ requests==1.2.0
+ sniffio==1.3.1
+ typing-extensions==4.10.0
- "###);
+ ");
Ok(())
}
@@ -8774,7 +9184,7 @@ fn sync_script_with_incompatible_build_constraints() -> Result<()> {
"#
})?;
- uv_snapshot!(&filters, context.sync().arg("--script").arg("script.py"), @r###"
+ uv_snapshot!(&filters, context.sync().arg("--script").arg("script.py"), @r"
success: false
exit_code: 1
----- stdout -----
@@ -8785,7 +9195,7 @@ fn sync_script_with_incompatible_build_constraints() -> Result<()> {
├─▶ Failed to resolve requirements from `setup.py` build
├─▶ No solution found when resolving: `setuptools>=40.8.0`
╰─▶ Because you require setuptools>=40.8.0 and setuptools==1, we can conclude that your requirements are unsatisfiable.
- "###);
+ ");
Ok(())
}
@@ -8808,7 +9218,7 @@ fn unsupported_git_scheme() -> Result<()> {
"#},
)?;
- uv_snapshot!(context.filters(), context.sync(), @r###"
+ uv_snapshot!(context.filters(), context.sync(), @r"
success: false
exit_code: 1
----- stdout -----
@@ -8819,7 +9229,7 @@ fn unsupported_git_scheme() -> Result<()> {
× Failed to build `foo @ file://[TEMP_DIR]/`
├─▶ Failed to parse entry: `foo`
╰─▶ Unsupported Git URL scheme `c:` in `c:/home/ferris/projects/foo` (expected one of `https:`, `ssh:`, or `file:`)
- "###);
+ ");
Ok(())
}
@@ -8858,7 +9268,7 @@ fn multiple_group_conflicts() -> Result<()> {
"#,
)?;
- uv_snapshot!(context.filters(), context.sync(), @r###"
+ uv_snapshot!(context.filters(), context.sync(), @r"
success: true
exit_code: 0
----- stdout -----
@@ -8866,9 +9276,9 @@ fn multiple_group_conflicts() -> Result<()> {
----- stderr -----
Resolved 3 packages in [TIME]
Audited in [TIME]
- "###);
+ ");
- uv_snapshot!(context.filters(), context.sync().arg("--group").arg("baz"), @r###"
+ uv_snapshot!(context.filters(), context.sync().arg("--group").arg("baz"), @r"
success: true
exit_code: 0
----- stdout -----
@@ -8878,9 +9288,9 @@ fn multiple_group_conflicts() -> Result<()> {
Prepared 1 package in [TIME]
Installed 1 package in [TIME]
+ iniconfig==2.0.0
- "###);
+ ");
- uv_snapshot!(context.filters(), context.sync().arg("--group").arg("foo").arg("--group").arg("baz"), @r###"
+ uv_snapshot!(context.filters(), context.sync().arg("--group").arg("foo").arg("--group").arg("baz"), @r"
success: true
exit_code: 0
----- stdout -----
@@ -8888,9 +9298,9 @@ fn multiple_group_conflicts() -> Result<()> {
----- stderr -----
Resolved 3 packages in [TIME]
Audited 1 package in [TIME]
- "###);
+ ");
- uv_snapshot!(context.filters(), context.sync().arg("--group").arg("bar").arg("--group").arg("baz"), @r###"
+ uv_snapshot!(context.filters(), context.sync().arg("--group").arg("bar").arg("--group").arg("baz"), @r"
success: true
exit_code: 0
----- stdout -----
@@ -8902,7 +9312,7 @@ fn multiple_group_conflicts() -> Result<()> {
Installed 1 package in [TIME]
- iniconfig==2.0.0
+ iniconfig==1.1.1
- "###);
+ ");
uv_snapshot!(context.filters(), context.sync().arg("--group").arg("foo").arg("--group").arg("bar"), @r"
success: false
@@ -9316,7 +9726,7 @@ fn prune_cache_url_subdirectory() -> Result<()> {
context.prune().arg("--ci").assert().success();
// Install the project.
- uv_snapshot!(context.filters(), context.sync(), @r###"
+ uv_snapshot!(context.filters(), context.sync(), @r"
success: true
exit_code: 0
----- stdout -----
@@ -9329,7 +9739,7 @@ fn prune_cache_url_subdirectory() -> Result<()> {
+ idna==3.6
+ root==0.0.1 (from https://github.com/user-attachments/files/18216295/subdirectory-test.tar.gz#subdirectory=packages/root)
+ sniffio==1.3.1
- "###);
+ ");
Ok(())
}
@@ -9745,7 +10155,7 @@ fn sync_upload_time() -> Result<()> {
"#)?;
// Install from the lockfile.
- uv_snapshot!(context.filters(), context.sync().arg("--frozen"), @r###"
+ uv_snapshot!(context.filters(), context.sync().arg("--frozen"), @r"
success: true
exit_code: 0
----- stdout -----
@@ -9756,17 +10166,17 @@ fn sync_upload_time() -> Result<()> {
+ anyio==3.7.0
+ idna==3.6
+ sniffio==1.3.1
- "###);
+ ");
// Re-install from the lockfile.
- uv_snapshot!(context.filters(), context.sync().arg("--frozen"), @r###"
+ uv_snapshot!(context.filters(), context.sync().arg("--frozen"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Audited 3 packages in [TIME]
- "###);
+ ");
Ok(())
}
@@ -9994,7 +10404,7 @@ fn read_only() -> Result<()> {
"#,
)?;
- uv_snapshot!(context.filters(), context.sync(), @r###"
+ uv_snapshot!(context.filters(), context.sync(), @r"
success: true
exit_code: 0
----- stdout -----
@@ -10004,7 +10414,7 @@ fn read_only() -> Result<()> {
Prepared 1 package in [TIME]
Installed 1 package in [TIME]
+ iniconfig==2.0.0
- "###);
+ ");
assert!(context.temp_dir.child("uv.lock").exists());
diff --git a/docs/reference/cli.md b/docs/reference/cli.md
index 0364703c2..13df63c19 100644
--- a/docs/reference/cli.md
+++ b/docs/reference/cli.md
@@ -1114,7 +1114,12 @@ uv sync [OPTIONS]
--only-group only-group Only include dependencies from the specified dependency group.
The project and its dependencies will be omitted.
May be provided multiple times. Implies --no-default-groups.
---package package Sync for a specific package in the workspace.
+--output-format output-format Select the output format
+[default: text]
Possible values:
+
+text: Display the result in a human-readable format
+json: Display the result in JSON format
+ --package package Sync for a specific package in the workspace.
The workspace's environment (.venv) is updated to reflect the subset of dependencies declared by the specified workspace member package.
If the workspace member does not exist, uv will exit with an error.
--prerelease prerelease The strategy to use when considering pre-release versions.
From 4d82e8886340821b435f36e6eccccf95654ca23a Mon Sep 17 00:00:00 2001
From: Ivan Smirnov
Date: Mon, 14 Jul 2025 16:35:34 +0100
Subject: [PATCH 028/130] Follow links when cache-key is a glob (#13438)
## Summary
There's some inconsistent behaviour in handling symlinks when
`cache-key` is a glob or a file path. This PR attempts to address that.
- When cache-key is a path,
[`Path::metadata()`](https://doc.rust-lang.org/std/path/struct.Path.html#method.metadata)
is used to check if it's a file or not. According to the docs:
> This function will traverse symbolic links to query information about
the destination file.
So, if the target file is a symlink, it will be resolved and the
metadata will be queried for the underlying file.
- When cache-key is a glob, `globwalk` is used, specifically allowing
for symlinks:
```rust
.file_type(globwalk::FileType::FILE | globwalk::FileType::SYMLINK)
```
- However, without enabling link following, `DirEntry::metadata()` will
return an equivalent of `Path::symlink_metadata()` (and not
`Path::metadata()`), which will have a file type that looks like
```rust
FileType {
is_file: false,
is_dir: false,
is_symlink: true,
..
}
```
- Then, there's a check for `metadata.is_file()` which fails and
complains that the target entry "is a directory when file was expected".
- TLDR: glob cache-keys don't work with symlinks.
## Solutions
Option 1 (current PR): follow symlinks.
Option 2 (also doable): don't follow symlinks, but resolve the resulting
target entry manually in case its file type is a symlink. However, this
would be a little weird and unobvious in that we resolve files but not
directories for some reason. Also, symlinking directories is pretty
useful if you want to symlink directories of local dependencies that are
not under the project's path.
## Test Plan
This has been tested manually:
```rust
fn main() {
for follow_links in [false, true] {
let walker = globwalk::GlobWalkerBuilder::from_patterns(".", &["a/*"])
.file_type(globwalk::FileType::FILE | globwalk::FileType::SYMLINK)
.follow_links(follow_links)
.build()
.unwrap();
let entry = walker.into_iter().next().unwrap().unwrap();
dbg!(&entry);
dbg!(entry.file_type());
dbg!(entry.path_is_symlink());
dbg!(entry.path());
let meta = entry.metadata().unwrap();
dbg!(meta.is_file());
}
let path = std::path::PathBuf::from("./a/b");
dbg!(path.metadata().unwrap().file_type());
dbg!(path.symlink_metadata().unwrap().file_type());
}
```
Current behaviour (glob cache-key, don't follow links):
```
[src/main.rs:9:9] &entry = DirEntry("./a/b")
[src/main.rs:10:9] entry.file_type() = FileType {
is_file: false,
is_dir: false,
is_symlink: true,
..
}
[src/main.rs:11:9] entry.path_is_symlink() = true
[src/main.rs:12:9] entry.path() = "./a/b"
[src/main.rs:14:9] meta.is_file() = false
```
Glob cache-key, follow links:
```
[src/main.rs:9:9] &entry = DirEntry("./a/b")
[src/main.rs:10:9] entry.file_type() = FileType {
is_file: true,
is_dir: false,
is_symlink: false,
..
}
[src/main.rs:11:9] entry.path_is_symlink() = true
[src/main.rs:12:9] entry.path() = "./a/b"
[src/main.rs:14:9] meta.is_file() = true
```
Using `path.metadata()` for a non-glob cache key:
```
[src/main.rs:18:5] path.metadata().unwrap().file_type() = FileType {
is_file: true,
is_dir: false,
is_symlink: false,
..
}
[src/main.rs:19:5] path.symlink_metadata().unwrap().file_type() = FileType {
is_file: false,
is_dir: false,
is_symlink: true,
..
}
```
---
Cargo.lock | 2 +
crates/uv-cache-info/Cargo.toml | 4 +
crates/uv-cache-info/src/cache_info.rs | 100 ++++++++++++++++++++++---
3 files changed, 97 insertions(+), 9 deletions(-)
diff --git a/Cargo.lock b/Cargo.lock
index f2bebefc9..0069cbb65 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -4907,10 +4907,12 @@ dependencies = [
name = "uv-cache-info"
version = "0.0.1"
dependencies = [
+ "anyhow",
"fs-err 3.1.1",
"globwalk",
"schemars",
"serde",
+ "tempfile",
"thiserror 2.0.12",
"toml",
"tracing",
diff --git a/crates/uv-cache-info/Cargo.toml b/crates/uv-cache-info/Cargo.toml
index 6b10bbebe..83df384be 100644
--- a/crates/uv-cache-info/Cargo.toml
+++ b/crates/uv-cache-info/Cargo.toml
@@ -24,3 +24,7 @@ thiserror = { workspace = true }
toml = { workspace = true }
tracing = { workspace = true }
walkdir = { workspace = true }
+
+[dev-dependencies]
+anyhow = { workspace = true }
+tempfile = { workspace = true }
diff --git a/crates/uv-cache-info/src/cache_info.rs b/crates/uv-cache-info/src/cache_info.rs
index 27a98ab54..d2f836c84 100644
--- a/crates/uv-cache-info/src/cache_info.rs
+++ b/crates/uv-cache-info/src/cache_info.rs
@@ -230,18 +230,32 @@ impl CacheInfo {
continue;
}
};
- let metadata = match entry.metadata() {
- Ok(metadata) => metadata,
- Err(err) => {
- warn!("Failed to read metadata for glob entry: {err}");
- continue;
+ let metadata = if entry.path_is_symlink() {
+ // resolve symlinks for leaf entries without following symlinks while globbing
+ match fs_err::metadata(entry.path()) {
+ Ok(metadata) => metadata,
+ Err(err) => {
+ warn!("Failed to resolve symlink for glob entry: {err}");
+ continue;
+ }
+ }
+ } else {
+ match entry.metadata() {
+ Ok(metadata) => metadata,
+ Err(err) => {
+ warn!("Failed to read metadata for glob entry: {err}");
+ continue;
+ }
}
};
if !metadata.is_file() {
- warn!(
- "Expected file for cache key, but found directory: `{}`",
- entry.path().display()
- );
+ if !entry.path_is_symlink() {
+ // don't warn if it was a symlink - it may legitimately resolve to a directory
+ warn!(
+ "Expected file for cache key, but found directory: `{}`",
+ entry.path().display()
+ );
+ }
continue;
}
timestamp = max(timestamp, Some(Timestamp::from_metadata(&metadata)));
@@ -346,3 +360,71 @@ enum DirectoryTimestamp {
Timestamp(Timestamp),
Inode(u64),
}
+
+#[cfg(all(test, unix))]
+mod tests_unix {
+ use anyhow::Result;
+
+ use super::{CacheInfo, Timestamp};
+
+ #[test]
+ fn test_cache_info_symlink_resolve() -> Result<()> {
+ let dir = tempfile::tempdir()?;
+ let dir = dir.path().join("dir");
+ fs_err::create_dir_all(&dir)?;
+
+ let write_manifest = |cache_key: &str| {
+ fs_err::write(
+ dir.join("pyproject.toml"),
+ format!(
+ r#"
+ [tool.uv]
+ cache-keys = [
+ "{cache_key}"
+ ]
+ "#
+ ),
+ )
+ };
+
+ let touch = |path: &str| -> Result<_> {
+ let path = dir.join(path);
+ fs_err::create_dir_all(path.parent().unwrap())?;
+ fs_err::write(&path, "")?;
+ Ok(Timestamp::from_metadata(&path.metadata()?))
+ };
+
+ let cache_timestamp = || -> Result<_> { Ok(CacheInfo::from_directory(&dir)?.timestamp) };
+
+ write_manifest("x/**")?;
+ assert_eq!(cache_timestamp()?, None);
+ let y = touch("x/y")?;
+ assert_eq!(cache_timestamp()?, Some(y));
+ let z = touch("x/z")?;
+ assert_eq!(cache_timestamp()?, Some(z));
+
+ // leaf entry symlink should be resolved
+ let a = touch("../a")?;
+ fs_err::os::unix::fs::symlink(dir.join("../a"), dir.join("x/a"))?;
+ assert_eq!(cache_timestamp()?, Some(a));
+
+ // symlink directories should not be followed while globbing
+ let c = touch("../b/c")?;
+ fs_err::os::unix::fs::symlink(dir.join("../b"), dir.join("x/b"))?;
+ assert_eq!(cache_timestamp()?, Some(a));
+
+ // no globs, should work as expected
+ write_manifest("x/y")?;
+ assert_eq!(cache_timestamp()?, Some(y));
+ write_manifest("x/a")?;
+ assert_eq!(cache_timestamp()?, Some(a));
+ write_manifest("x/b/c")?;
+ assert_eq!(cache_timestamp()?, Some(c));
+
+ // symlink pointing to a directory
+ write_manifest("x/*b*")?;
+ assert_eq!(cache_timestamp()?, None);
+
+ Ok(())
+ }
+}
From 77c771c7f33df1040c854aeea462c18b74d39987 Mon Sep 17 00:00:00 2001
From: Geoffrey Thomas
Date: Mon, 14 Jul 2025 14:01:28 -0400
Subject: [PATCH 029/130] Bump version to 0.7.21 (#14611)
---
CHANGELOG.md | 35 +++++++++++++++++++++++++++
Cargo.lock | 6 ++---
crates/uv-build/Cargo.toml | 2 +-
crates/uv-build/pyproject.toml | 2 +-
crates/uv-version/Cargo.toml | 2 +-
crates/uv/Cargo.toml | 2 +-
docs/concepts/build-backend.md | 2 +-
docs/getting-started/installation.md | 4 +--
docs/guides/integration/aws-lambda.md | 4 +--
docs/guides/integration/docker.md | 10 ++++----
docs/guides/integration/github.md | 2 +-
docs/guides/integration/pre-commit.md | 10 ++++----
pyproject.toml | 2 +-
13 files changed, 59 insertions(+), 24 deletions(-)
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 9fa93fb03..0c7d62c75 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -3,6 +3,41 @@
+## 0.7.21
+
+### Python
+
+- Restore the SQLite `fts4`, `fts5`, `rtree`, and `geopoly` extensions on macOS and Linux
+
+See the
+[`python-build-standalone` release notes](https://github.com/astral-sh/python-build-standalone/releases/tag/20250712)
+for more details.
+
+### Enhancements
+
+- Add `--python-platform` to `uv sync` ([#14320](https://github.com/astral-sh/uv/pull/14320))
+- Support pre-releases in `uv version --bump` ([#13578](https://github.com/astral-sh/uv/pull/13578))
+- Add `-w` shorthand for `--with` ([#14530](https://github.com/astral-sh/uv/pull/14530))
+- Add an exception handler on Windows to display information on crash ([#14582](https://github.com/astral-sh/uv/pull/14582))
+- Add hint when Python downloads are disabled ([#14522](https://github.com/astral-sh/uv/pull/14522))
+- Add `UV_HTTP_RETRIES` to customize retry counts ([#14544](https://github.com/astral-sh/uv/pull/14544))
+
+### Preview features
+
+- Add `uv sync --output-format json` ([#13689](https://github.com/astral-sh/uv/pull/13689))
+
+### Bug fixes
+
+- Do not re-resolve with a new Python version in `uv tool` if it is incompatible with `--python` ([#14606](https://github.com/astral-sh/uv/pull/14606))
+- Fix handling of globs in `cache-key`: follow symlinks ([#13438](https://github.com/astral-sh/uv/pull/13438)) and `..` and improve performance ([#13469](https://github.com/astral-sh/uv/pull/13469))
+
+### Documentation
+
+- Document how to nest dependency groups with `include-group` ([#14539](https://github.com/astral-sh/uv/pull/14539))
+- Fix repeated word in Pyodide doc ([#14554](https://github.com/astral-sh/uv/pull/14554))
+- Update CONTRIBUTING.md with instructions to format markdown files ([#14246](https://github.com/astral-sh/uv/pull/14246))
+- Fix version number for `setup-python` ([#14533](https://github.com/astral-sh/uv/pull/14533))
+
## 0.7.20
### Python
diff --git a/Cargo.lock b/Cargo.lock
index 0069cbb65..c43f4872d 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -4633,7 +4633,7 @@ dependencies = [
[[package]]
name = "uv"
-version = "0.7.20"
+version = "0.7.21"
dependencies = [
"anstream",
"anyhow",
@@ -4798,7 +4798,7 @@ dependencies = [
[[package]]
name = "uv-build"
-version = "0.7.20"
+version = "0.7.21"
dependencies = [
"anyhow",
"uv-build-backend",
@@ -5991,7 +5991,7 @@ dependencies = [
[[package]]
name = "uv-version"
-version = "0.7.20"
+version = "0.7.21"
[[package]]
name = "uv-virtualenv"
diff --git a/crates/uv-build/Cargo.toml b/crates/uv-build/Cargo.toml
index ffbea0ea9..f943010ae 100644
--- a/crates/uv-build/Cargo.toml
+++ b/crates/uv-build/Cargo.toml
@@ -1,6 +1,6 @@
[package]
name = "uv-build"
-version = "0.7.20"
+version = "0.7.21"
edition.workspace = true
rust-version.workspace = true
homepage.workspace = true
diff --git a/crates/uv-build/pyproject.toml b/crates/uv-build/pyproject.toml
index 13c21edd8..5a2209155 100644
--- a/crates/uv-build/pyproject.toml
+++ b/crates/uv-build/pyproject.toml
@@ -1,6 +1,6 @@
[project]
name = "uv-build"
-version = "0.7.20"
+version = "0.7.21"
description = "The uv build backend"
authors = [{ name = "Astral Software Inc.", email = "hey@astral.sh" }]
requires-python = ">=3.8"
diff --git a/crates/uv-version/Cargo.toml b/crates/uv-version/Cargo.toml
index f1b47dd1d..a9fe788a5 100644
--- a/crates/uv-version/Cargo.toml
+++ b/crates/uv-version/Cargo.toml
@@ -1,6 +1,6 @@
[package]
name = "uv-version"
-version = "0.7.20"
+version = "0.7.21"
edition = { workspace = true }
rust-version = { workspace = true }
homepage = { workspace = true }
diff --git a/crates/uv/Cargo.toml b/crates/uv/Cargo.toml
index 904cc8fc3..fe2f2200c 100644
--- a/crates/uv/Cargo.toml
+++ b/crates/uv/Cargo.toml
@@ -1,6 +1,6 @@
[package]
name = "uv"
-version = "0.7.20"
+version = "0.7.21"
edition = { workspace = true }
rust-version = { workspace = true }
homepage = { workspace = true }
diff --git a/docs/concepts/build-backend.md b/docs/concepts/build-backend.md
index a34bc7658..69694f317 100644
--- a/docs/concepts/build-backend.md
+++ b/docs/concepts/build-backend.md
@@ -36,7 +36,7 @@ To use uv as a build backend in an existing project, add `uv_build` to the
```toml title="pyproject.toml"
[build-system]
-requires = ["uv_build>=0.7.20,<0.8.0"]
+requires = ["uv_build>=0.7.21,<0.8.0"]
build-backend = "uv_build"
```
diff --git a/docs/getting-started/installation.md b/docs/getting-started/installation.md
index 12895b56e..fa68d210a 100644
--- a/docs/getting-started/installation.md
+++ b/docs/getting-started/installation.md
@@ -25,7 +25,7 @@ uv provides a standalone installer to download and install uv:
Request a specific version by including it in the URL:
```console
- $ curl -LsSf https://astral.sh/uv/0.7.20/install.sh | sh
+ $ curl -LsSf https://astral.sh/uv/0.7.21/install.sh | sh
```
=== "Windows"
@@ -41,7 +41,7 @@ uv provides a standalone installer to download and install uv:
Request a specific version by including it in the URL:
```pwsh-session
- PS> powershell -ExecutionPolicy ByPass -c "irm https://astral.sh/uv/0.7.20/install.ps1 | iex"
+ PS> powershell -ExecutionPolicy ByPass -c "irm https://astral.sh/uv/0.7.21/install.ps1 | iex"
```
!!! tip
diff --git a/docs/guides/integration/aws-lambda.md b/docs/guides/integration/aws-lambda.md
index 1e6c7c47a..4cdb75b7a 100644
--- a/docs/guides/integration/aws-lambda.md
+++ b/docs/guides/integration/aws-lambda.md
@@ -92,7 +92,7 @@ the second stage, we'll copy this directory over to the final image, omitting th
other unnecessary files.
```dockerfile title="Dockerfile"
-FROM ghcr.io/astral-sh/uv:0.7.20 AS uv
+FROM ghcr.io/astral-sh/uv:0.7.21 AS uv
# First, bundle the dependencies into the task root.
FROM public.ecr.aws/lambda/python:3.13 AS builder
@@ -334,7 +334,7 @@ And confirm that opening http://127.0.0.1:8000/ in a web browser displays, "Hell
Finally, we'll update the Dockerfile to include the local library in the deployment package:
```dockerfile title="Dockerfile"
-FROM ghcr.io/astral-sh/uv:0.7.20 AS uv
+FROM ghcr.io/astral-sh/uv:0.7.21 AS uv
# First, bundle the dependencies into the task root.
FROM public.ecr.aws/lambda/python:3.13 AS builder
diff --git a/docs/guides/integration/docker.md b/docs/guides/integration/docker.md
index 0445b155c..bbea9b264 100644
--- a/docs/guides/integration/docker.md
+++ b/docs/guides/integration/docker.md
@@ -31,7 +31,7 @@ $ docker run --rm -it ghcr.io/astral-sh/uv:debian uv --help
The following distroless images are available:
- `ghcr.io/astral-sh/uv:latest`
-- `ghcr.io/astral-sh/uv:{major}.{minor}.{patch}`, e.g., `ghcr.io/astral-sh/uv:0.7.20`
+- `ghcr.io/astral-sh/uv:{major}.{minor}.{patch}`, e.g., `ghcr.io/astral-sh/uv:0.7.21`
- `ghcr.io/astral-sh/uv:{major}.{minor}`, e.g., `ghcr.io/astral-sh/uv:0.7` (the latest patch
version)
@@ -75,7 +75,7 @@ And the following derived images are available:
As with the distroless image, each derived image is published with uv version tags as
`ghcr.io/astral-sh/uv:{major}.{minor}.{patch}-{base}` and
-`ghcr.io/astral-sh/uv:{major}.{minor}-{base}`, e.g., `ghcr.io/astral-sh/uv:0.7.20-alpine`.
+`ghcr.io/astral-sh/uv:{major}.{minor}-{base}`, e.g., `ghcr.io/astral-sh/uv:0.7.21-alpine`.
For more details, see the [GitHub Container](https://github.com/astral-sh/uv/pkgs/container/uv)
page.
@@ -113,7 +113,7 @@ Note this requires `curl` to be available.
In either case, it is best practice to pin to a specific uv version, e.g., with:
```dockerfile
-COPY --from=ghcr.io/astral-sh/uv:0.7.20 /uv /uvx /bin/
+COPY --from=ghcr.io/astral-sh/uv:0.7.21 /uv /uvx /bin/
```
!!! tip
@@ -131,7 +131,7 @@ COPY --from=ghcr.io/astral-sh/uv:0.7.20 /uv /uvx /bin/
Or, with the installer:
```dockerfile
-ADD https://astral.sh/uv/0.7.20/install.sh /uv-installer.sh
+ADD https://astral.sh/uv/0.7.21/install.sh /uv-installer.sh
```
### Installing a project
@@ -557,5 +557,5 @@ Verified OK
!!! tip
These examples use `latest`, but best practice is to verify the attestation for a specific
- version tag, e.g., `ghcr.io/astral-sh/uv:0.7.20`, or (even better) the specific image digest,
+ version tag, e.g., `ghcr.io/astral-sh/uv:0.7.21`, or (even better) the specific image digest,
such as `ghcr.io/astral-sh/uv:0.5.27@sha256:5adf09a5a526f380237408032a9308000d14d5947eafa687ad6c6a2476787b4f`.
diff --git a/docs/guides/integration/github.md b/docs/guides/integration/github.md
index e7fea7b29..d206febd1 100644
--- a/docs/guides/integration/github.md
+++ b/docs/guides/integration/github.md
@@ -47,7 +47,7 @@ jobs:
uses: astral-sh/setup-uv@v6
with:
# Install a specific version of uv.
- version: "0.7.20"
+ version: "0.7.21"
```
## Setting up Python
diff --git a/docs/guides/integration/pre-commit.md b/docs/guides/integration/pre-commit.md
index 0495581c2..912ff0213 100644
--- a/docs/guides/integration/pre-commit.md
+++ b/docs/guides/integration/pre-commit.md
@@ -19,7 +19,7 @@ To make sure your `uv.lock` file is up to date even if your `pyproject.toml` fil
repos:
- repo: https://github.com/astral-sh/uv-pre-commit
# uv version.
- rev: 0.7.20
+ rev: 0.7.21
hooks:
- id: uv-lock
```
@@ -30,7 +30,7 @@ To keep a `requirements.txt` file in sync with your `uv.lock` file:
repos:
- repo: https://github.com/astral-sh/uv-pre-commit
# uv version.
- rev: 0.7.20
+ rev: 0.7.21
hooks:
- id: uv-export
```
@@ -41,7 +41,7 @@ To compile requirements files:
repos:
- repo: https://github.com/astral-sh/uv-pre-commit
# uv version.
- rev: 0.7.20
+ rev: 0.7.21
hooks:
# Compile requirements
- id: pip-compile
@@ -54,7 +54,7 @@ To compile alternative requirements files, modify `args` and `files`:
repos:
- repo: https://github.com/astral-sh/uv-pre-commit
# uv version.
- rev: 0.7.20
+ rev: 0.7.21
hooks:
# Compile requirements
- id: pip-compile
@@ -68,7 +68,7 @@ To run the hook over multiple files at the same time, add additional entries:
repos:
- repo: https://github.com/astral-sh/uv-pre-commit
# uv version.
- rev: 0.7.20
+ rev: 0.7.21
hooks:
# Compile requirements
- id: pip-compile
diff --git a/pyproject.toml b/pyproject.toml
index df118d720..f3c9c4f64 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -4,7 +4,7 @@ build-backend = "maturin"
[project]
name = "uv"
-version = "0.7.20"
+version = "0.7.21"
description = "An extremely fast Python package and project manager, written in Rust."
authors = [{ name = "Astral Software Inc.", email = "hey@astral.sh" }]
requires-python = ">=3.8"
From b046e7f3be3b495f695a474ce0088f9b2280eb5c Mon Sep 17 00:00:00 2001
From: InSync
Date: Tue, 15 Jul 2025 02:06:05 +0700
Subject: [PATCH 030/130] Add missing comma in `projects/dependencies.md`
(#14613)
## Summary
Diff:
```diff
[dependency-groups]
dev = [
- {include-group = "lint"}
+ {include-group = "lint"},
{include-group = "test"}
]
```
## Test Plan
None.
---
docs/concepts/projects/dependencies.md | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/docs/concepts/projects/dependencies.md b/docs/concepts/projects/dependencies.md
index e5c64a3ee..2eabbf4dc 100644
--- a/docs/concepts/projects/dependencies.md
+++ b/docs/concepts/projects/dependencies.md
@@ -686,7 +686,7 @@ A dependency group can include other dependency groups, e.g.:
```toml title="pyproject.toml"
[dependency-groups]
dev = [
- {include-group = "lint"}
+ {include-group = "lint"},
{include-group = "test"}
]
lint = [
From 9871bbdc7931c5ca664a798fec7df2da9b71e1ba Mon Sep 17 00:00:00 2001
From: Geoffrey Thomas
Date: Mon, 14 Jul 2025 16:29:02 -0400
Subject: [PATCH 031/130] Fix 0.7.21 changelog (#14615)
---
CHANGELOG.md | 6 ++++--
1 file changed, 4 insertions(+), 2 deletions(-)
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 0c7d62c75..38be00d2d 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -21,6 +21,9 @@ for more details.
- Add an exception handler on Windows to display information on crash ([#14582](https://github.com/astral-sh/uv/pull/14582))
- Add hint when Python downloads are disabled ([#14522](https://github.com/astral-sh/uv/pull/14522))
- Add `UV_HTTP_RETRIES` to customize retry counts ([#14544](https://github.com/astral-sh/uv/pull/14544))
+- Follow leaf symlinks matched by globs in `cache-key` ([#13438](https://github.com/astral-sh/uv/pull/13438))
+- Support parent path components (`..`) in globs in `cache-key` ([#13469](https://github.com/astral-sh/uv/pull/13469))
+- Improve `cache-key` performance ([#13469](https://github.com/astral-sh/uv/pull/13469))
### Preview features
@@ -29,13 +32,12 @@ for more details.
### Bug fixes
- Do not re-resolve with a new Python version in `uv tool` if it is incompatible with `--python` ([#14606](https://github.com/astral-sh/uv/pull/14606))
-- Fix handling of globs in `cache-key`: follow symlinks ([#13438](https://github.com/astral-sh/uv/pull/13438)) and `..` and improve performance ([#13469](https://github.com/astral-sh/uv/pull/13469))
### Documentation
- Document how to nest dependency groups with `include-group` ([#14539](https://github.com/astral-sh/uv/pull/14539))
- Fix repeated word in Pyodide doc ([#14554](https://github.com/astral-sh/uv/pull/14554))
-- Update CONTRIBUTING.md with instructions to format markdown files ([#14246](https://github.com/astral-sh/uv/pull/14246))
+- Update CONTRIBUTING.md with instructions to format Markdown files via Docker ([#14246](https://github.com/astral-sh/uv/pull/14246))
- Fix version number for `setup-python` ([#14533](https://github.com/astral-sh/uv/pull/14533))
## 0.7.20
From 405ef66cef6cb67817d039277f05c924a5cff19e Mon Sep 17 00:00:00 2001
From: Charlie Marsh
Date: Tue, 15 Jul 2025 10:00:04 -0400
Subject: [PATCH 032/130] Allow users to override index `cache-control` headers
(#14620)
## Summary
You can now override the cache control headers for the Simple API, file
downloads, or both:
```toml
[[tool.uv.index]]
name = "example"
url = "https://example.com/simple"
cache-control = { api = "max-age=600", files = "max-age=365000000, immutable" }
```
Closes https://github.com/astral-sh/uv/issues/10444.
---
crates/uv-client/src/cached_client.rs | 25 ++++--
crates/uv-client/src/error.rs | 3 +
crates/uv-client/src/registry_client.rs | 58 +++++++++----
crates/uv-distribution-types/src/index.rs | 83 +++++++++++++++++++
crates/uv-distribution-types/src/index_url.rs | 80 ++++++++++++++++++
crates/uv/tests/it/show_settings.rs | 29 +++++++
docs/concepts/indexes.md | 37 +++++++++
uv.schema.json | 32 +++++++
8 files changed, 323 insertions(+), 24 deletions(-)
diff --git a/crates/uv-client/src/cached_client.rs b/crates/uv-client/src/cached_client.rs
index ee3314d1c..f888ea5f1 100644
--- a/crates/uv-client/src/cached_client.rs
+++ b/crates/uv-client/src/cached_client.rs
@@ -196,16 +196,18 @@ impl + std::error::Error + 'static> From> for
}
#[derive(Debug, Clone, Copy)]
-pub enum CacheControl {
+pub enum CacheControl<'a> {
/// Respect the `cache-control` header from the response.
None,
/// Apply `max-age=0, must-revalidate` to the request.
MustRevalidate,
/// Allow the client to return stale responses.
AllowStale,
+ /// Override the cache control header with a custom value.
+ Override(&'a str),
}
-impl From for CacheControl {
+impl From for CacheControl<'_> {
fn from(value: Freshness) -> Self {
match value {
Freshness::Fresh => Self::None,
@@ -259,7 +261,7 @@ impl CachedClient {
&self,
req: Request,
cache_entry: &CacheEntry,
- cache_control: CacheControl,
+ cache_control: CacheControl<'_>,
response_callback: Callback,
) -> Result> {
let payload = self
@@ -292,7 +294,7 @@ impl CachedClient {
&self,
req: Request,
cache_entry: &CacheEntry,
- cache_control: CacheControl,
+ cache_control: CacheControl<'_>,
response_callback: Callback,
) -> Result> {
let fresh_req = req.try_clone().expect("HTTP request must be cloneable");
@@ -469,7 +471,7 @@ impl CachedClient {
async fn send_cached(
&self,
mut req: Request,
- cache_control: CacheControl,
+ cache_control: CacheControl<'_>,
cached: DataWithCachePolicy,
) -> Result {
// Apply the cache control header, if necessary.
@@ -481,6 +483,13 @@ impl CachedClient {
http::HeaderValue::from_static("no-cache"),
);
}
+ CacheControl::Override(value) => {
+ req.headers_mut().insert(
+ http::header::CACHE_CONTROL,
+ http::HeaderValue::from_str(value)
+ .map_err(|_| ErrorKind::InvalidCacheControl(value.to_string()))?,
+ );
+ }
}
Ok(match cached.cache_policy.before_request(&mut req) {
BeforeRequest::Fresh => {
@@ -488,7 +497,7 @@ impl CachedClient {
CachedResponse::FreshCache(cached)
}
BeforeRequest::Stale(new_cache_policy_builder) => match cache_control {
- CacheControl::None | CacheControl::MustRevalidate => {
+ CacheControl::None | CacheControl::MustRevalidate | CacheControl::Override(_) => {
debug!("Found stale response for: {}", req.url());
self.send_cached_handle_stale(req, cached, new_cache_policy_builder)
.await?
@@ -599,7 +608,7 @@ impl CachedClient {
&self,
req: Request,
cache_entry: &CacheEntry,
- cache_control: CacheControl,
+ cache_control: CacheControl<'_>,
response_callback: Callback,
) -> Result> {
let payload = self
@@ -623,7 +632,7 @@ impl CachedClient {
&self,
req: Request,
cache_entry: &CacheEntry,
- cache_control: CacheControl,
+ cache_control: CacheControl<'_>,
response_callback: Callback,
) -> Result> {
let mut past_retries = 0;
diff --git a/crates/uv-client/src/error.rs b/crates/uv-client/src/error.rs
index 754237fe2..035cdea71 100644
--- a/crates/uv-client/src/error.rs
+++ b/crates/uv-client/src/error.rs
@@ -259,6 +259,9 @@ pub enum ErrorKind {
"Network connectivity is disabled, but the requested data wasn't found in the cache for: `{0}`"
)]
Offline(String),
+
+ #[error("Invalid cache control header: `{0}`")]
+ InvalidCacheControl(String),
}
impl ErrorKind {
diff --git a/crates/uv-client/src/registry_client.rs b/crates/uv-client/src/registry_client.rs
index afa1b03ae..1d12c5adf 100644
--- a/crates/uv-client/src/registry_client.rs
+++ b/crates/uv-client/src/registry_client.rs
@@ -511,11 +511,17 @@ impl RegistryClient {
format!("{package_name}.rkyv"),
);
let cache_control = match self.connectivity {
- Connectivity::Online => CacheControl::from(
- self.cache
- .freshness(&cache_entry, Some(package_name), None)
- .map_err(ErrorKind::Io)?,
- ),
+ Connectivity::Online => {
+ if let Some(header) = self.index_urls.simple_api_cache_control_for(index) {
+ CacheControl::Override(header)
+ } else {
+ CacheControl::from(
+ self.cache
+ .freshness(&cache_entry, Some(package_name), None)
+ .map_err(ErrorKind::Io)?,
+ )
+ }
+ }
Connectivity::Offline => CacheControl::AllowStale,
};
@@ -571,7 +577,7 @@ impl RegistryClient {
package_name: &PackageName,
url: &DisplaySafeUrl,
cache_entry: &CacheEntry,
- cache_control: CacheControl,
+ cache_control: CacheControl<'_>,
) -> Result, Error> {
let simple_request = self
.uncached_client(url)
@@ -783,11 +789,17 @@ impl RegistryClient {
format!("{}.msgpack", filename.cache_key()),
);
let cache_control = match self.connectivity {
- Connectivity::Online => CacheControl::from(
- self.cache
- .freshness(&cache_entry, Some(&filename.name), None)
- .map_err(ErrorKind::Io)?,
- ),
+ Connectivity::Online => {
+ if let Some(header) = self.index_urls.artifact_cache_control_for(index) {
+ CacheControl::Override(header)
+ } else {
+ CacheControl::from(
+ self.cache
+ .freshness(&cache_entry, Some(&filename.name), None)
+ .map_err(ErrorKind::Io)?,
+ )
+ }
+ }
Connectivity::Offline => CacheControl::AllowStale,
};
@@ -853,11 +865,25 @@ impl RegistryClient {
format!("{}.msgpack", filename.cache_key()),
);
let cache_control = match self.connectivity {
- Connectivity::Online => CacheControl::from(
- self.cache
- .freshness(&cache_entry, Some(&filename.name), None)
- .map_err(ErrorKind::Io)?,
- ),
+ Connectivity::Online => {
+ if let Some(index) = index {
+ if let Some(header) = self.index_urls.artifact_cache_control_for(index) {
+ CacheControl::Override(header)
+ } else {
+ CacheControl::from(
+ self.cache
+ .freshness(&cache_entry, Some(&filename.name), None)
+ .map_err(ErrorKind::Io)?,
+ )
+ }
+ } else {
+ CacheControl::from(
+ self.cache
+ .freshness(&cache_entry, Some(&filename.name), None)
+ .map_err(ErrorKind::Io)?,
+ )
+ }
+ }
Connectivity::Offline => CacheControl::AllowStale,
};
diff --git a/crates/uv-distribution-types/src/index.rs b/crates/uv-distribution-types/src/index.rs
index 8ac7c3cd4..04614a18e 100644
--- a/crates/uv-distribution-types/src/index.rs
+++ b/crates/uv-distribution-types/src/index.rs
@@ -6,11 +6,23 @@ use thiserror::Error;
use uv_auth::{AuthPolicy, Credentials};
use uv_redacted::DisplaySafeUrl;
+use uv_small_str::SmallString;
use crate::index_name::{IndexName, IndexNameError};
use crate::origin::Origin;
use crate::{IndexStatusCodeStrategy, IndexUrl, IndexUrlError, SerializableStatusCode};
+/// Cache control configuration for an index.
+#[derive(Debug, Clone, Hash, Eq, PartialEq, Ord, PartialOrd, Serialize, Deserialize, Default)]
+#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
+#[serde(rename_all = "kebab-case")]
+pub struct IndexCacheControl {
+ /// Cache control header for Simple API requests.
+ pub api: Option,
+ /// Cache control header for file downloads.
+ pub files: Option,
+}
+
#[derive(Debug, Clone, Hash, Eq, PartialEq, Ord, PartialOrd, Serialize, Deserialize)]
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
#[serde(rename_all = "kebab-case")]
@@ -104,6 +116,19 @@ pub struct Index {
/// ```
#[serde(default)]
pub ignore_error_codes: Option>,
+ /// Cache control configuration for this index.
+ ///
+ /// When set, these headers will override the server's cache control headers
+ /// for both package metadata requests and artifact downloads.
+ ///
+ /// ```toml
+ /// [[tool.uv.index]]
+ /// name = "my-index"
+ /// url = "https:///simple"
+ /// cache-control = { api = "max-age=600", files = "max-age=3600" }
+ /// ```
+ #[serde(default)]
+ pub cache_control: Option,
}
#[derive(
@@ -142,6 +167,7 @@ impl Index {
publish_url: None,
authenticate: AuthPolicy::default(),
ignore_error_codes: None,
+ cache_control: None,
}
}
@@ -157,6 +183,7 @@ impl Index {
publish_url: None,
authenticate: AuthPolicy::default(),
ignore_error_codes: None,
+ cache_control: None,
}
}
@@ -172,6 +199,7 @@ impl Index {
publish_url: None,
authenticate: AuthPolicy::default(),
ignore_error_codes: None,
+ cache_control: None,
}
}
@@ -250,6 +278,7 @@ impl From for Index {
publish_url: None,
authenticate: AuthPolicy::default(),
ignore_error_codes: None,
+ cache_control: None,
}
}
}
@@ -273,6 +302,7 @@ impl FromStr for Index {
publish_url: None,
authenticate: AuthPolicy::default(),
ignore_error_codes: None,
+ cache_control: None,
});
}
}
@@ -289,6 +319,7 @@ impl FromStr for Index {
publish_url: None,
authenticate: AuthPolicy::default(),
ignore_error_codes: None,
+ cache_control: None,
})
}
}
@@ -384,3 +415,55 @@ pub enum IndexSourceError {
#[error("Index included a name, but the name was empty")]
EmptyName,
}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ #[test]
+ fn test_index_cache_control_headers() {
+ // Test that cache control headers are properly parsed from TOML
+ let toml_str = r#"
+ name = "test-index"
+ url = "https://test.example.com/simple"
+ cache-control = { api = "max-age=600", files = "max-age=3600" }
+ "#;
+
+ let index: Index = toml::from_str(toml_str).unwrap();
+ assert_eq!(index.name.as_ref().unwrap().as_ref(), "test-index");
+ assert!(index.cache_control.is_some());
+ let cache_control = index.cache_control.as_ref().unwrap();
+ assert_eq!(cache_control.api.as_deref(), Some("max-age=600"));
+ assert_eq!(cache_control.files.as_deref(), Some("max-age=3600"));
+ }
+
+ #[test]
+ fn test_index_without_cache_control() {
+ // Test that indexes work without cache control headers
+ let toml_str = r#"
+ name = "test-index"
+ url = "https://test.example.com/simple"
+ "#;
+
+ let index: Index = toml::from_str(toml_str).unwrap();
+ assert_eq!(index.name.as_ref().unwrap().as_ref(), "test-index");
+ assert_eq!(index.cache_control, None);
+ }
+
+ #[test]
+ fn test_index_partial_cache_control() {
+ // Test that cache control can have just one field
+ let toml_str = r#"
+ name = "test-index"
+ url = "https://test.example.com/simple"
+ cache-control = { api = "max-age=300" }
+ "#;
+
+ let index: Index = toml::from_str(toml_str).unwrap();
+ assert_eq!(index.name.as_ref().unwrap().as_ref(), "test-index");
+ assert!(index.cache_control.is_some());
+ let cache_control = index.cache_control.as_ref().unwrap();
+ assert_eq!(cache_control.api.as_deref(), Some("max-age=300"));
+ assert_eq!(cache_control.files, None);
+ }
+}
diff --git a/crates/uv-distribution-types/src/index_url.rs b/crates/uv-distribution-types/src/index_url.rs
index 1c8cd0a76..bd3e9abc2 100644
--- a/crates/uv-distribution-types/src/index_url.rs
+++ b/crates/uv-distribution-types/src/index_url.rs
@@ -599,6 +599,26 @@ impl<'a> IndexUrls {
}
IndexStatusCodeStrategy::Default
}
+
+ /// Return the Simple API cache control header for an [`IndexUrl`], if configured.
+ pub fn simple_api_cache_control_for(&self, url: &IndexUrl) -> Option<&str> {
+ for index in &self.indexes {
+ if index.url() == url {
+ return index.cache_control.as_ref()?.api.as_deref();
+ }
+ }
+ None
+ }
+
+ /// Return the artifact cache control header for an [`IndexUrl`], if configured.
+ pub fn artifact_cache_control_for(&self, url: &IndexUrl) -> Option<&str> {
+ for index in &self.indexes {
+ if index.url() == url {
+ return index.cache_control.as_ref()?.files.as_deref();
+ }
+ }
+ None
+ }
}
bitflags::bitflags! {
@@ -717,4 +737,64 @@ mod tests {
"git+https://github.com/example/repo.git"
));
}
+
+ #[test]
+ fn test_cache_control_lookup() {
+ use std::str::FromStr;
+
+ use uv_small_str::SmallString;
+
+ use crate::IndexFormat;
+ use crate::index_name::IndexName;
+
+ let indexes = vec![
+ Index {
+ name: Some(IndexName::from_str("index1").unwrap()),
+ url: IndexUrl::from_str("https://index1.example.com/simple").unwrap(),
+ cache_control: Some(crate::IndexCacheControl {
+ api: Some(SmallString::from("max-age=300")),
+ files: Some(SmallString::from("max-age=1800")),
+ }),
+ explicit: false,
+ default: false,
+ origin: None,
+ format: IndexFormat::Simple,
+ publish_url: None,
+ authenticate: uv_auth::AuthPolicy::default(),
+ ignore_error_codes: None,
+ },
+ Index {
+ name: Some(IndexName::from_str("index2").unwrap()),
+ url: IndexUrl::from_str("https://index2.example.com/simple").unwrap(),
+ cache_control: None,
+ explicit: false,
+ default: false,
+ origin: None,
+ format: IndexFormat::Simple,
+ publish_url: None,
+ authenticate: uv_auth::AuthPolicy::default(),
+ ignore_error_codes: None,
+ },
+ ];
+
+ let index_urls = IndexUrls::from_indexes(indexes);
+
+ let url1 = IndexUrl::from_str("https://index1.example.com/simple").unwrap();
+ assert_eq!(
+ index_urls.simple_api_cache_control_for(&url1),
+ Some("max-age=300")
+ );
+ assert_eq!(
+ index_urls.artifact_cache_control_for(&url1),
+ Some("max-age=1800")
+ );
+
+ let url2 = IndexUrl::from_str("https://index2.example.com/simple").unwrap();
+ assert_eq!(index_urls.simple_api_cache_control_for(&url2), None);
+ assert_eq!(index_urls.artifact_cache_control_for(&url2), None);
+
+ let url3 = IndexUrl::from_str("https://index3.example.com/simple").unwrap();
+ assert_eq!(index_urls.simple_api_cache_control_for(&url3), None);
+ assert_eq!(index_urls.artifact_cache_control_for(&url3), None);
+ }
}
diff --git a/crates/uv/tests/it/show_settings.rs b/crates/uv/tests/it/show_settings.rs
index 7635bd523..2637af8ac 100644
--- a/crates/uv/tests/it/show_settings.rs
+++ b/crates/uv/tests/it/show_settings.rs
@@ -139,6 +139,7 @@ fn resolve_uv_toml() -> anyhow::Result<()> {
publish_url: None,
authenticate: Auto,
ignore_error_codes: None,
+ cache_control: None,
},
],
flat_index: [],
@@ -320,6 +321,7 @@ fn resolve_uv_toml() -> anyhow::Result<()> {
publish_url: None,
authenticate: Auto,
ignore_error_codes: None,
+ cache_control: None,
},
],
flat_index: [],
@@ -502,6 +504,7 @@ fn resolve_uv_toml() -> anyhow::Result<()> {
publish_url: None,
authenticate: Auto,
ignore_error_codes: None,
+ cache_control: None,
},
],
flat_index: [],
@@ -716,6 +719,7 @@ fn resolve_pyproject_toml() -> anyhow::Result<()> {
publish_url: None,
authenticate: Auto,
ignore_error_codes: None,
+ cache_control: None,
},
],
flat_index: [],
@@ -1059,6 +1063,7 @@ fn resolve_pyproject_toml() -> anyhow::Result<()> {
publish_url: None,
authenticate: Auto,
ignore_error_codes: None,
+ cache_control: None,
},
],
flat_index: [],
@@ -1267,6 +1272,7 @@ fn resolve_index_url() -> anyhow::Result<()> {
publish_url: None,
authenticate: Auto,
ignore_error_codes: None,
+ cache_control: None,
},
Index {
name: None,
@@ -1299,6 +1305,7 @@ fn resolve_index_url() -> anyhow::Result<()> {
publish_url: None,
authenticate: Auto,
ignore_error_codes: None,
+ cache_control: None,
},
],
flat_index: [],
@@ -1484,6 +1491,7 @@ fn resolve_index_url() -> anyhow::Result<()> {
publish_url: None,
authenticate: Auto,
ignore_error_codes: None,
+ cache_control: None,
},
Index {
name: None,
@@ -1516,6 +1524,7 @@ fn resolve_index_url() -> anyhow::Result<()> {
publish_url: None,
authenticate: Auto,
ignore_error_codes: None,
+ cache_control: None,
},
Index {
name: None,
@@ -1548,6 +1557,7 @@ fn resolve_index_url() -> anyhow::Result<()> {
publish_url: None,
authenticate: Auto,
ignore_error_codes: None,
+ cache_control: None,
},
],
flat_index: [],
@@ -1755,6 +1765,7 @@ fn resolve_find_links() -> anyhow::Result<()> {
publish_url: None,
authenticate: Auto,
ignore_error_codes: None,
+ cache_control: None,
},
],
no_index: true,
@@ -2124,6 +2135,7 @@ fn resolve_top_level() -> anyhow::Result<()> {
publish_url: None,
authenticate: Auto,
ignore_error_codes: None,
+ cache_control: None,
},
Index {
name: None,
@@ -2156,6 +2168,7 @@ fn resolve_top_level() -> anyhow::Result<()> {
publish_url: None,
authenticate: Auto,
ignore_error_codes: None,
+ cache_control: None,
},
],
flat_index: [],
@@ -2337,6 +2350,7 @@ fn resolve_top_level() -> anyhow::Result<()> {
publish_url: None,
authenticate: Auto,
ignore_error_codes: None,
+ cache_control: None,
},
Index {
name: None,
@@ -2369,6 +2383,7 @@ fn resolve_top_level() -> anyhow::Result<()> {
publish_url: None,
authenticate: Auto,
ignore_error_codes: None,
+ cache_control: None,
},
],
flat_index: [],
@@ -3564,6 +3579,7 @@ fn resolve_both() -> anyhow::Result<()> {
publish_url: None,
authenticate: Auto,
ignore_error_codes: None,
+ cache_control: None,
},
],
flat_index: [],
@@ -3870,6 +3886,7 @@ fn resolve_config_file() -> anyhow::Result<()> {
publish_url: None,
authenticate: Auto,
ignore_error_codes: None,
+ cache_control: None,
},
],
flat_index: [],
@@ -4658,6 +4675,7 @@ fn index_priority() -> anyhow::Result<()> {
publish_url: None,
authenticate: Auto,
ignore_error_codes: None,
+ cache_control: None,
},
Index {
name: None,
@@ -4690,6 +4708,7 @@ fn index_priority() -> anyhow::Result<()> {
publish_url: None,
authenticate: Auto,
ignore_error_codes: None,
+ cache_control: None,
},
],
flat_index: [],
@@ -4873,6 +4892,7 @@ fn index_priority() -> anyhow::Result<()> {
publish_url: None,
authenticate: Auto,
ignore_error_codes: None,
+ cache_control: None,
},
Index {
name: None,
@@ -4905,6 +4925,7 @@ fn index_priority() -> anyhow::Result<()> {
publish_url: None,
authenticate: Auto,
ignore_error_codes: None,
+ cache_control: None,
},
],
flat_index: [],
@@ -5094,6 +5115,7 @@ fn index_priority() -> anyhow::Result<()> {
publish_url: None,
authenticate: Auto,
ignore_error_codes: None,
+ cache_control: None,
},
Index {
name: None,
@@ -5126,6 +5148,7 @@ fn index_priority() -> anyhow::Result<()> {
publish_url: None,
authenticate: Auto,
ignore_error_codes: None,
+ cache_control: None,
},
],
flat_index: [],
@@ -5310,6 +5333,7 @@ fn index_priority() -> anyhow::Result<()> {
publish_url: None,
authenticate: Auto,
ignore_error_codes: None,
+ cache_control: None,
},
Index {
name: None,
@@ -5342,6 +5366,7 @@ fn index_priority() -> anyhow::Result<()> {
publish_url: None,
authenticate: Auto,
ignore_error_codes: None,
+ cache_control: None,
},
],
flat_index: [],
@@ -5533,6 +5558,7 @@ fn index_priority() -> anyhow::Result<()> {
publish_url: None,
authenticate: Auto,
ignore_error_codes: None,
+ cache_control: None,
},
Index {
name: None,
@@ -5565,6 +5591,7 @@ fn index_priority() -> anyhow::Result<()> {
publish_url: None,
authenticate: Auto,
ignore_error_codes: None,
+ cache_control: None,
},
],
flat_index: [],
@@ -5749,6 +5776,7 @@ fn index_priority() -> anyhow::Result<()> {
publish_url: None,
authenticate: Auto,
ignore_error_codes: None,
+ cache_control: None,
},
Index {
name: None,
@@ -5781,6 +5809,7 @@ fn index_priority() -> anyhow::Result<()> {
publish_url: None,
authenticate: Auto,
ignore_error_codes: None,
+ cache_control: None,
},
],
flat_index: [],
diff --git a/docs/concepts/indexes.md b/docs/concepts/indexes.md
index 6c03bae66..5e6c3866c 100644
--- a/docs/concepts/indexes.md
+++ b/docs/concepts/indexes.md
@@ -244,6 +244,43 @@ authenticate = "never"
When `authenticate` is set to `never`, uv will never search for credentials for the given index and
will error if credentials are provided directly.
+### Customizing cache control headers
+
+By default, uv will respect the cache control headers provided by the index. For example, PyPI
+serves package metadata with a `max-age=600` header, thereby allowing uv to cache package metadata
+for 10 minutes; and wheels and source distributions with a `max-age=365000000, immutable` header,
+thereby allowing uv to cache artifacts indefinitely.
+
+To override the cache control headers for an index, use the `cache-control` setting:
+
+```toml
+[[tool.uv.index]]
+name = "example"
+url = "https://example.com/simple"
+cache-control = { api = "max-age=600", files = "max-age=365000000, immutable" }
+```
+
+The `cache-control` setting accepts an object with two optional keys:
+
+- `api`: Controls caching for Simple API requests (package metadata).
+- `files`: Controls caching for artifact downloads (wheels and source distributions).
+
+The values for these keys are strings that follow the
+[HTTP Cache-Control](https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Cache-Control)
+syntax. For example, to force uv to always revalidate package metadata, set `api = "no-cache"`:
+
+```toml
+[[tool.uv.index]]
+name = "example"
+url = "https://example.com/simple"
+cache-control = { api = "no-cache" }
+```
+
+This setting is most commonly used to override the default cache control headers for private indexes
+that otherwise disable caching, often unintentionally. We typically recommend following PyPI's
+approach to caching headers, i.e., setting `api = "max-age=600"` and
+`files = "max-age=365000000, immutable"`.
+
## "Flat" indexes
By default, `[[tool.uv.index]]` entries are assumed to be PyPI-style registries that implement the
diff --git a/uv.schema.json b/uv.schema.json
index 4190672e9..e418f37f0 100644
--- a/uv.schema.json
+++ b/uv.schema.json
@@ -907,6 +907,18 @@
],
"default": "auto"
},
+ "cache-control": {
+ "description": "Cache control configuration for this index.\n\nWhen set, these headers will override the server's cache control headers\nfor both package metadata requests and artifact downloads.\n\n```toml\n[[tool.uv.index]]\nname = \"my-index\"\nurl = \"https:///simple\"\ncache-control = { api = \"max-age=600\", files = \"max-age=3600\" }\n```",
+ "anyOf": [
+ {
+ "$ref": "#/definitions/IndexCacheControl"
+ },
+ {
+ "type": "null"
+ }
+ ],
+ "default": null
+ },
"default": {
"description": "Mark the index as the default index.\n\nBy default, uv uses PyPI as the default index, such that even if additional indexes are\ndefined via `[[tool.uv.index]]`, PyPI will still be used as a fallback for packages that\naren't found elsewhere. To disable the PyPI default, set `default = true` on at least one\nother index.\n\nMarking an index as default will move it to the front of the list of indexes, such that it\nis given the highest priority when resolving packages.",
"type": "boolean",
@@ -972,6 +984,26 @@
"url"
]
},
+ "IndexCacheControl": {
+ "description": "Cache control configuration for an index.",
+ "type": "object",
+ "properties": {
+ "api": {
+ "description": "Cache control header for Simple API requests.",
+ "type": [
+ "string",
+ "null"
+ ]
+ },
+ "files": {
+ "description": "Cache control header for file downloads.",
+ "type": [
+ "string",
+ "null"
+ ]
+ }
+ }
+ },
"IndexFormat": {
"oneOf": [
{
From cd0d5d4748af11007e05718005437c0780d09048 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Alex=20Preng=C3=A8re?=
<2138730+alexprengere@users.noreply.github.com>
Date: Tue, 15 Jul 2025 19:03:01 +0200
Subject: [PATCH 033/130] Fix --all-arches when paired with --only-downloads
(#14629)
## Summary
On current main, and on the latest released version 0.7.21, I have:
```
$ uv python list --only-downloads --all-arches
cpython-3.14.0b4-linux-x86_64-gnu
cpython-3.14.0b4+freethreaded-linux-x86_64-gnu
cpython-3.13.5-linux-x86_64-gnu
cpython-3.13.5+freethreaded-linux-x86_64-gnu
cpython-3.12.11-linux-x86_64-gnu
cpython-3.11.13-linux-x86_64-gnu
cpython-3.10.18-linux-x86_64-gnu
cpython-3.9.23-linux-x86_64-gnu
cpython-3.8.20-linux-x86_64-gnu
pypy-3.11.13-linux-x86_64-gnu
pypy-3.10.16-linux-x86_64-gnu
pypy-3.9.19-linux-x86_64-gnu
pypy-3.8.16-linux-x86_64-gnu
graalpy-3.11.0-linux-x86_64-gnu
graalpy-3.10.0-linux-x86_64-gnu
graalpy-3.8.5-linux-x86_64-gnu
```
As you can see, `--all-arches` is not respected here.
## Test Plan
With the patch:
```
$ cargo run python list --only-downloads --all-arches
cpython-3.14.0b4-linux-x86_64-gnu
cpython-3.14.0b4+freethreaded-linux-x86_64-gnu
cpython-3.14.0b4-linux-x86_64_v2-gnu
cpython-3.14.0b4+freethreaded-linux-x86_64_v2-gnu
cpython-3.14.0b4-linux-x86_64_v3-gnu
cpython-3.14.0b4+freethreaded-linux-x86_64_v3-gnu
cpython-3.14.0b4-linux-x86_64_v4-gnu
cpython-3.14.0b4+freethreaded-linux-x86_64_v4-gnu
cpython-3.14.0b4-linux-aarch64-gnu
cpython-3.14.0b4+freethreaded-linux-aarch64-gnu
cpython-3.14.0b4-linux-powerpc64le-gnu
cpython-3.14.0b4+freethreaded-linux-powerpc64le-gnu
cpython-3.14.0b4-linux-riscv64gc-gnu
cpython-3.14.0b4+freethreaded-linux-riscv64gc-gnu
cpython-3.14.0b4-linux-s390x-gnu
cpython-3.14.0b4+freethreaded-linux-s390x-gnu
cpython-3.13.5-linux-x86_64-gnu
cpython-3.13.5+freethreaded-linux-x86_64-gnu
cpython-3.13.5-linux-x86_64_v2-gnu
cpython-3.13.5+freethreaded-linux-x86_64_v2-gnu
cpython-3.13.5-linux-x86_64_v3-gnu
cpython-3.13.5+freethreaded-linux-x86_64_v3-gnu
cpython-3.13.5-linux-x86_64_v4-gnu
cpython-3.13.5+freethreaded-linux-x86_64_v4-gnu
cpython-3.13.5-linux-aarch64-gnu
cpython-3.13.5+freethreaded-linux-aarch64-gnu
cpython-3.13.5-linux-powerpc64le-gnu
cpython-3.13.5+freethreaded-linux-powerpc64le-gnu
cpython-3.13.5-linux-riscv64gc-gnu
cpython-3.13.5+freethreaded-linux-riscv64gc-gnu
cpython-3.13.5-linux-s390x-gnu
cpython-3.13.5+freethreaded-linux-s390x-gnu
cpython-3.12.11-linux-x86_64-gnu
cpython-3.12.11-linux-x86_64_v2-gnu
cpython-3.12.11-linux-x86_64_v3-gnu
cpython-3.12.11-linux-x86_64_v4-gnu
cpython-3.12.11-linux-aarch64-gnu
cpython-3.12.11-linux-powerpc64le-gnu
cpython-3.12.11-linux-riscv64gc-gnu
cpython-3.12.11-linux-s390x-gnu
cpython-3.11.13-linux-x86_64-gnu
cpython-3.11.13-linux-x86_64_v2-gnu
cpython-3.11.13-linux-x86_64_v3-gnu
cpython-3.11.13-linux-x86_64_v4-gnu
cpython-3.11.13-linux-aarch64-gnu
cpython-3.11.13-linux-powerpc64le-gnu
cpython-3.11.13-linux-riscv64gc-gnu
cpython-3.11.13-linux-s390x-gnu
cpython-3.11.5-linux-x86-gnu
cpython-3.10.18-linux-x86_64-gnu
cpython-3.10.18-linux-x86_64_v2-gnu
cpython-3.10.18-linux-x86_64_v3-gnu
cpython-3.10.18-linux-x86_64_v4-gnu
cpython-3.10.18-linux-aarch64-gnu
cpython-3.10.18-linux-powerpc64le-gnu
cpython-3.10.18-linux-riscv64gc-gnu
cpython-3.10.18-linux-s390x-gnu
cpython-3.10.13-linux-x86-gnu
cpython-3.9.23-linux-x86_64-gnu
cpython-3.9.23-linux-x86_64_v2-gnu
cpython-3.9.23-linux-x86_64_v3-gnu
cpython-3.9.23-linux-x86_64_v4-gnu
cpython-3.9.23-linux-aarch64-gnu
cpython-3.9.23-linux-powerpc64le-gnu
cpython-3.9.23-linux-riscv64gc-gnu
cpython-3.9.23-linux-s390x-gnu
cpython-3.9.18-linux-x86-gnu
cpython-3.8.20-linux-x86_64-gnu
cpython-3.8.20-linux-aarch64-gnu
cpython-3.8.17-linux-x86-gnu
pypy-3.11.13-linux-x86_64-gnu
pypy-3.11.13-linux-aarch64-gnu
pypy-3.11.13-linux-x86-gnu
pypy-3.10.16-linux-x86_64-gnu
pypy-3.10.16-linux-aarch64-gnu
pypy-3.10.16-linux-x86-gnu
pypy-3.10.14-linux-s390x-gnu
pypy-3.9.19-linux-x86_64-gnu
pypy-3.9.19-linux-aarch64-gnu
pypy-3.9.19-linux-x86-gnu
pypy-3.9.19-linux-s390x-gnu
pypy-3.8.16-linux-x86_64-gnu
pypy-3.8.16-linux-aarch64-gnu
pypy-3.8.16-linux-x86-gnu
pypy-3.8.16-linux-s390x-gnu
graalpy-3.11.0-linux-x86_64-gnu
graalpy-3.11.0-linux-aarch64-gnu
graalpy-3.10.0-linux-x86_64-gnu
graalpy-3.10.0-linux-aarch64-gnu
graalpy-3.8.5-linux-x86_64-gnu
graalpy-3.8.5-linux-aarch64-gnu
```
---
crates/uv/src/commands/python/list.rs | 2 ++
1 file changed, 2 insertions(+)
diff --git a/crates/uv/src/commands/python/list.rs b/crates/uv/src/commands/python/list.rs
index 2cd54747c..17528a11e 100644
--- a/crates/uv/src/commands/python/list.rs
+++ b/crates/uv/src/commands/python/list.rs
@@ -81,6 +81,8 @@ pub(crate) async fn list(
PythonListKinds::Installed => None,
PythonListKinds::Downloads => Some(if all_platforms {
base_download_request
+ } else if all_arches {
+ base_download_request.fill_platform()?.with_any_arch()
} else {
base_download_request.fill_platform()?
}),
From bb1e9a247c5e488a712e8f1cc040f025f9751337 Mon Sep 17 00:00:00 2001
From: Zanie Blue
Date: Tue, 15 Jul 2025 12:12:36 -0500
Subject: [PATCH 034/130] Update preview installation of Python executables to
be non-fatal (#14612)
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
Previously, if installation of executables into the bin directory failed
we'd with a non-zero code. However, if we make this behavior the default
we don't want it to be fatal. There's a `--bin` opt-in to _require_
successful executable installation and a `--no-bin` opt-out to silence
the warning / opt-out of installation entirely.
Part of https://github.com/astral-sh/uv/issues/14296 — we need this
before we can stabilize the behavior.
In #14614 we do the same for writing entries to the Windows registry.
---
crates/uv-cli/src/lib.rs | 15 ++-
crates/uv-python/src/windows_registry.rs | 7 +-
crates/uv/src/commands/python/install.rs | 145 +++++++++++++++++------
crates/uv/src/lib.rs | 2 +
crates/uv/src/settings.rs | 7 ++
crates/uv/tests/it/help.rs | 5 +
crates/uv/tests/it/python_install.rs | 68 ++++++++++-
docs/reference/cli.md | 3 +-
8 files changed, 212 insertions(+), 40 deletions(-)
diff --git a/crates/uv-cli/src/lib.rs b/crates/uv-cli/src/lib.rs
index 0f3652341..70d5322d9 100644
--- a/crates/uv-cli/src/lib.rs
+++ b/crates/uv-cli/src/lib.rs
@@ -4941,6 +4941,19 @@ pub struct PythonInstallArgs {
#[arg(long, short, env = EnvVars::UV_PYTHON_INSTALL_DIR)]
pub install_dir: Option,
+ /// Install a Python executable into the `bin` directory.
+ ///
+ /// This is the default behavior. If this flag is provided explicitly, uv will error if the
+ /// executable cannot be installed.
+ ///
+ /// See `UV_PYTHON_BIN_DIR` to customize the target directory.
+ #[arg(long, overrides_with("no_bin"), hide = true)]
+ pub bin: bool,
+
+ /// Do not install a Python executable into the `bin` directory.
+ #[arg(long, overrides_with("bin"), conflicts_with("default"))]
+ pub no_bin: bool,
+
/// The Python version(s) to install.
///
/// If not provided, the requested Python version(s) will be read from the `UV_PYTHON`
@@ -5003,7 +5016,7 @@ pub struct PythonInstallArgs {
/// and `python`.
///
/// If multiple Python versions are requested, uv will exit with an error.
- #[arg(long)]
+ #[arg(long, conflicts_with("no_bin"))]
pub default: bool,
}
diff --git a/crates/uv-python/src/windows_registry.rs b/crates/uv-python/src/windows_registry.rs
index 69e179bbf..7c6f6f307 100644
--- a/crates/uv-python/src/windows_registry.rs
+++ b/crates/uv-python/src/windows_registry.rs
@@ -129,12 +129,13 @@ fn read_registry_entry(company: &str, tag: &str, tag_key: &Key) -> Option,
) -> Result<(), ManagedPep514Error> {
let pointer_width = match installation.key().arch().family().pointer_width() {
Ok(PointerWidth::U32) => 32,
@@ -146,9 +147,7 @@ pub fn create_registry_entry(
}
};
- if let Err(err) = write_registry_entry(installation, pointer_width) {
- errors.push((installation.key().clone(), err.into()));
- }
+ write_registry_entry(installation, pointer_width)?;
Ok(())
}
diff --git a/crates/uv/src/commands/python/install.rs b/crates/uv/src/commands/python/install.rs
index 8c8387d07..b22d6010e 100644
--- a/crates/uv/src/commands/python/install.rs
+++ b/crates/uv/src/commands/python/install.rs
@@ -135,6 +135,14 @@ impl Changelog {
}
}
+#[derive(Debug, Clone, Copy)]
+enum InstallErrorKind {
+ DownloadUnpack,
+ Bin,
+ #[cfg(windows)]
+ Registry,
+}
+
/// Download and install Python versions.
#[allow(clippy::fn_params_excessive_bools)]
pub(crate) async fn install(
@@ -143,6 +151,7 @@ pub(crate) async fn install(
targets: Vec,
reinstall: bool,
upgrade: bool,
+ bin: Option,
force: bool,
python_install_mirror: Option,
pypy_install_mirror: Option,
@@ -432,12 +441,16 @@ pub(crate) async fn install(
downloaded.push(installation.clone());
}
Err(err) => {
- errors.push((download.key().clone(), anyhow::Error::new(err)));
+ errors.push((
+ InstallErrorKind::DownloadUnpack,
+ download.key().clone(),
+ anyhow::Error::new(err),
+ ));
}
}
}
- let bin = if preview.is_enabled() {
+ let bin_dir = if matches!(bin, Some(true)) || preview.is_enabled() {
Some(python_executable_dir()?)
} else {
None
@@ -460,7 +473,7 @@ pub(crate) async fn install(
continue;
}
- let bin = bin
+ let bin_dir = bin_dir
.as_ref()
.expect("We should have a bin directory with preview enabled")
.as_path();
@@ -468,27 +481,38 @@ pub(crate) async fn install(
let upgradeable = (default || is_default_install)
|| requested_minor_versions.contains(&installation.key().version().python_version());
- create_bin_links(
- installation,
- bin,
- reinstall,
- force,
- default,
- upgradeable,
- upgrade,
- is_default_install,
- first_request,
- &existing_installations,
- &installations,
- &mut changelog,
- &mut errors,
- preview,
- )?;
+ if !matches!(bin, Some(false)) {
+ create_bin_links(
+ installation,
+ bin_dir,
+ reinstall,
+ force,
+ default,
+ upgradeable,
+ upgrade,
+ is_default_install,
+ first_request,
+ &existing_installations,
+ &installations,
+ &mut changelog,
+ &mut errors,
+ preview,
+ );
+ }
if preview.is_enabled() {
#[cfg(windows)]
{
- uv_python::windows_registry::create_registry_entry(installation, &mut errors)?;
+ match uv_python::windows_registry::create_registry_entry(installation) {
+ Ok(()) => {}
+ Err(err) => {
+ errors.push((
+ InstallErrorKind::Registry,
+ installation.key().clone(),
+ err.into(),
+ ));
+ }
+ }
}
}
}
@@ -636,24 +660,47 @@ pub(crate) async fn install(
}
}
- if preview.is_enabled() {
- let bin = bin
+ if preview.is_enabled() && !matches!(bin, Some(false)) {
+ let bin_dir = bin_dir
.as_ref()
.expect("We should have a bin directory with preview enabled")
.as_path();
- warn_if_not_on_path(bin);
+ warn_if_not_on_path(bin_dir);
}
}
if !errors.is_empty() {
- for (key, err) in errors
+ // If there are only bin install errors and the user didn't opt-in, we're only going to warn
+ let fatal = errors
+ .iter()
+ .all(|(kind, _, _)| matches!(kind, InstallErrorKind::Bin))
+ && bin.is_none();
+
+ for (kind, key, err) in errors
.into_iter()
- .sorted_unstable_by(|(key_a, _), (key_b, _)| key_a.cmp(key_b))
+ .sorted_unstable_by(|(_, key_a, _), (_, key_b, _)| key_a.cmp(key_b))
{
+ let (level, verb) = match kind {
+ InstallErrorKind::DownloadUnpack => ("error".red().bold().to_string(), "install"),
+ InstallErrorKind::Bin => {
+ let level = match bin {
+ None => "warning".yellow().bold().to_string(),
+ Some(false) => continue,
+ Some(true) => "error".red().bold().to_string(),
+ };
+ (level, "install executable for")
+ }
+ #[cfg(windows)]
+ InstallErrorKind::Registry => (
+ "error".red().bold().to_string(),
+ "install registry entry for",
+ ),
+ };
+
writeln!(
printer.stderr(),
- "{}: Failed to install {}",
- "error".red().bold(),
+ "{level}{} Failed to {verb} {}",
+ ":".bold(),
key.green()
)?;
for err in err.chain() {
@@ -665,6 +712,11 @@ pub(crate) async fn install(
)?;
}
}
+
+ if fatal {
+ return Ok(ExitStatus::Success);
+ }
+
return Ok(ExitStatus::Failure);
}
@@ -672,6 +724,8 @@ pub(crate) async fn install(
}
/// Link the binaries of a managed Python installation to the bin directory.
+///
+/// This function is fallible, but errors are pushed to `errors` instead of being thrown.
#[allow(clippy::fn_params_excessive_bools)]
fn create_bin_links(
installation: &ManagedPythonInstallation,
@@ -686,9 +740,9 @@ fn create_bin_links(
existing_installations: &[ManagedPythonInstallation],
installations: &[&ManagedPythonInstallation],
changelog: &mut Changelog,
- errors: &mut Vec<(PythonInstallationKey, Error)>,
+ errors: &mut Vec<(InstallErrorKind, PythonInstallationKey, Error)>,
preview: PreviewMode,
-) -> Result<(), Error> {
+) {
let targets =
if (default || is_default_install) && first_request.matches_installation(installation) {
vec![
@@ -773,6 +827,7 @@ fn create_bin_links(
);
} else {
errors.push((
+ InstallErrorKind::Bin,
installation.key().clone(),
anyhow::anyhow!(
"Executable already exists at `{}` but is not managed by uv; use `--force` to replace it",
@@ -848,7 +903,17 @@ fn create_bin_links(
}
// Replace the existing link
- fs_err::remove_file(&to)?;
+ if let Err(err) = fs_err::remove_file(&to) {
+ errors.push((
+ InstallErrorKind::Bin,
+ installation.key().clone(),
+ anyhow::anyhow!(
+ "Executable already exists at `{}` but could not be removed: {err}",
+ to.simplified_display()
+ ),
+ ));
+ continue;
+ }
if let Some(existing) = existing {
// Ensure we do not report installation of this executable for an existing
@@ -860,7 +925,18 @@ fn create_bin_links(
.remove(&target);
}
- create_link_to_executable(&target, executable)?;
+ if let Err(err) = create_link_to_executable(&target, executable) {
+ errors.push((
+ InstallErrorKind::Bin,
+ installation.key().clone(),
+ anyhow::anyhow!(
+ "Failed to create link at `{}`: {err}",
+ target.simplified_display()
+ ),
+ ));
+ continue;
+ }
+
debug!(
"Updated executable at `{}` to {}",
target.simplified_display(),
@@ -874,11 +950,14 @@ fn create_bin_links(
.insert(target.clone());
}
Err(err) => {
- errors.push((installation.key().clone(), anyhow::Error::new(err)));
+ errors.push((
+ InstallErrorKind::Bin,
+ installation.key().clone(),
+ anyhow::Error::new(err),
+ ));
}
}
}
- Ok(())
}
pub(crate) fn format_executables(
diff --git a/crates/uv/src/lib.rs b/crates/uv/src/lib.rs
index 0b4d0bb82..3a700b965 100644
--- a/crates/uv/src/lib.rs
+++ b/crates/uv/src/lib.rs
@@ -1402,6 +1402,7 @@ async fn run(mut cli: Cli) -> Result {
args.targets,
args.reinstall,
upgrade,
+ args.bin,
args.force,
args.python_install_mirror,
args.pypy_install_mirror,
@@ -1430,6 +1431,7 @@ async fn run(mut cli: Cli) -> Result {
args.targets,
reinstall,
upgrade,
+ args.bin,
args.force,
args.python_install_mirror,
args.pypy_install_mirror,
diff --git a/crates/uv/src/settings.rs b/crates/uv/src/settings.rs
index 8a325d538..d373250ac 100644
--- a/crates/uv/src/settings.rs
+++ b/crates/uv/src/settings.rs
@@ -933,6 +933,7 @@ pub(crate) struct PythonInstallSettings {
pub(crate) targets: Vec,
pub(crate) reinstall: bool,
pub(crate) force: bool,
+ pub(crate) bin: Option,
pub(crate) python_install_mirror: Option,
pub(crate) pypy_install_mirror: Option,
pub(crate) python_downloads_json_url: Option,
@@ -961,6 +962,8 @@ impl PythonInstallSettings {
install_dir,
targets,
reinstall,
+ bin,
+ no_bin,
force,
mirror: _,
pypy_mirror: _,
@@ -973,6 +976,7 @@ impl PythonInstallSettings {
targets,
reinstall,
force,
+ bin: flag(bin, no_bin, "bin"),
python_install_mirror: python_mirror,
pypy_install_mirror: pypy_mirror,
python_downloads_json_url,
@@ -992,6 +996,7 @@ pub(crate) struct PythonUpgradeSettings {
pub(crate) pypy_install_mirror: Option,
pub(crate) python_downloads_json_url: Option,
pub(crate) default: bool,
+ pub(crate) bin: Option,
}
impl PythonUpgradeSettings {
@@ -1013,6 +1018,7 @@ impl PythonUpgradeSettings {
args.python_downloads_json_url.or(python_downloads_json_url);
let force = false;
let default = false;
+ let bin = None;
let PythonUpgradeArgs {
install_dir,
@@ -1030,6 +1036,7 @@ impl PythonUpgradeSettings {
pypy_install_mirror: pypy_mirror,
python_downloads_json_url,
default,
+ bin,
}
}
}
diff --git a/crates/uv/tests/it/help.rs b/crates/uv/tests/it/help.rs
index 8faebd040..a6230108c 100644
--- a/crates/uv/tests/it/help.rs
+++ b/crates/uv/tests/it/help.rs
@@ -504,6 +504,9 @@ fn help_subsubcommand() {
[env: UV_PYTHON_INSTALL_DIR=]
+ --no-bin
+ Do not install a Python executable into the `bin` directory
+
--mirror
Set the URL to use as the source for downloading Python installations.
@@ -790,6 +793,8 @@ fn help_flag_subsubcommand() {
Options:
-i, --install-dir
The directory to store the Python installation in [env: UV_PYTHON_INSTALL_DIR=]
+ --no-bin
+ Do not install a Python executable into the `bin` directory
--mirror
Set the URL to use as the source for downloading Python installations [env:
UV_PYTHON_INSTALL_MIRROR=]
diff --git a/crates/uv/tests/it/python_install.rs b/crates/uv/tests/it/python_install.rs
index bd723e5d1..0cb952054 100644
--- a/crates/uv/tests/it/python_install.rs
+++ b/crates/uv/tests/it/python_install.rs
@@ -430,15 +430,35 @@ fn python_install_preview() {
bin_python.touch().unwrap();
uv_snapshot!(context.filters(), context.python_install().arg("--preview").arg("3.13"), @r"
+ success: true
+ exit_code: 0
+ ----- stdout -----
+
+ ----- stderr -----
+ warning: Failed to install executable for cpython-3.13.5-[PLATFORM]
+ Caused by: Executable already exists at `[BIN]/python3.13` but is not managed by uv; use `--force` to replace it
+ ");
+
+ // With `--bin`, this should error instead of warn
+ uv_snapshot!(context.filters(), context.python_install().arg("--preview").arg("--bin").arg("3.13"), @r"
success: false
exit_code: 1
----- stdout -----
----- stderr -----
- error: Failed to install cpython-3.13.5-[PLATFORM]
+ error: Failed to install executable for cpython-3.13.5-[PLATFORM]
Caused by: Executable already exists at `[BIN]/python3.13` but is not managed by uv; use `--force` to replace it
");
+ // With `--no-bin`, this should be silent
+ uv_snapshot!(context.filters(), context.python_install().arg("--preview").arg("--no-bin").arg("3.13"), @r"
+ success: true
+ exit_code: 0
+ ----- stdout -----
+
+ ----- stderr -----
+ ");
+
uv_snapshot!(context.filters(), context.python_install().arg("--preview").arg("--force").arg("3.13"), @r"
success: true
exit_code: 0
@@ -565,6 +585,52 @@ fn python_install_preview() {
}
}
+#[test]
+fn python_install_preview_no_bin() {
+ let context: TestContext = TestContext::new_with_versions(&[])
+ .with_filtered_python_keys()
+ .with_filtered_exe_suffix()
+ .with_managed_python_dirs();
+
+ // Install the latest version
+ uv_snapshot!(context.filters(), context.python_install().arg("--preview").arg("--no-bin"), @r"
+ success: true
+ exit_code: 0
+ ----- stdout -----
+
+ ----- stderr -----
+ Installed Python 3.13.5 in [TIME]
+ + cpython-3.13.5-[PLATFORM]
+ ");
+
+ let bin_python = context
+ .bin_dir
+ .child(format!("python3.13{}", std::env::consts::EXE_SUFFIX));
+
+ // The executable should not be installed in the bin directory
+ bin_python.assert(predicate::path::missing());
+
+ uv_snapshot!(context.filters(), context.python_install().arg("--preview").arg("--no-bin").arg("--default"), @r"
+ success: false
+ exit_code: 2
+ ----- stdout -----
+
+ ----- stderr -----
+ error: the argument '--no-bin' cannot be used with '--default'
+
+ Usage: uv python install --no-bin --install-dir [TARGETS]...
+
+ For more information, try '--help'.
+ ");
+
+ let bin_python = context
+ .bin_dir
+ .child(format!("python{}", std::env::consts::EXE_SUFFIX));
+
+ // The executable should not be installed in the bin directory
+ bin_python.assert(predicate::path::missing());
+}
+
#[test]
fn python_install_preview_upgrade() {
let context = TestContext::new_with_versions(&[])
diff --git a/docs/reference/cli.md b/docs/reference/cli.md
index 13df63c19..93d928518 100644
--- a/docs/reference/cli.md
+++ b/docs/reference/cli.md
@@ -2795,7 +2795,8 @@ uv python install [OPTIONS] [TARGETS]...
May also be set with the UV_PYTHON_INSTALL_MIRROR environment variable.
--native-tlsWhether to load TLS certificates from the platform's native certificate store.
By default, uv loads certificates from the bundled webpki-roots crate. The webpki-roots are a reliable set of trust roots from Mozilla, and including them in uv improves portability and performance (especially on macOS).
However, in some cases, you may want to use the platform's native certificate store, especially if you're relying on a corporate trust root (e.g., for a mandatory proxy) that's included in your system's certificate store.
-May also be set with the UV_NATIVE_TLS environment variable.
--no-cache , --no-cache-dir, -nAvoid reading from or writing to the cache, instead using a temporary directory for the duration of the operation
+May also be set with the UV_NATIVE_TLS environment variable.
--no-binDo not install a Python executable into the bin directory
+--no-cache , --no-cache-dir, -nAvoid reading from or writing to the cache, instead using a temporary directory for the duration of the operation
May also be set with the UV_NO_CACHE environment variable.
--no-configAvoid discovering configuration files (pyproject.toml, uv.toml).
Normally, configuration files are discovered in the current directory, parent directories, or user configuration directories.
May also be set with the UV_NO_CONFIG environment variable.
--no-managed-pythonDisable use of uv-managed Python versions.
From d2c81e503f19cf63bd335e8a08df1bf6b542dac0 Mon Sep 17 00:00:00 2001
From: Zanie Blue
Date: Tue, 15 Jul 2025 12:29:11 -0500
Subject: [PATCH 035/130] Make preview Python registration on Windows non-fatal
(#14614)
Same as #14612 for registration with the Windows Registry.
---
crates/uv-cli/src/lib.rs | 11 ++++++++
crates/uv/src/commands/python/install.rs | 32 ++++++++++++++----------
crates/uv/src/lib.rs | 2 ++
crates/uv/src/settings.rs | 7 ++++++
crates/uv/tests/it/help.rs | 5 ++++
docs/reference/cli.md | 1 +
6 files changed, 45 insertions(+), 13 deletions(-)
diff --git a/crates/uv-cli/src/lib.rs b/crates/uv-cli/src/lib.rs
index 70d5322d9..2efb30724 100644
--- a/crates/uv-cli/src/lib.rs
+++ b/crates/uv-cli/src/lib.rs
@@ -4954,6 +4954,17 @@ pub struct PythonInstallArgs {
#[arg(long, overrides_with("bin"), conflicts_with("default"))]
pub no_bin: bool,
+ /// Register the Python installation in the Windows registry.
+ ///
+ /// This is the default behavior on Windows. If this flag is provided explicitly, uv will error if the
+ /// registry entry cannot be created.
+ #[arg(long, overrides_with("no_registry"), hide = true)]
+ pub registry: bool,
+
+ /// Do not register the Python installation in the Windows registry.
+ #[arg(long, overrides_with("registry"))]
+ pub no_registry: bool,
+
/// The Python version(s) to install.
///
/// If not provided, the requested Python version(s) will be read from the `UV_PYTHON`
diff --git a/crates/uv/src/commands/python/install.rs b/crates/uv/src/commands/python/install.rs
index b22d6010e..bbab7cbb1 100644
--- a/crates/uv/src/commands/python/install.rs
+++ b/crates/uv/src/commands/python/install.rs
@@ -152,6 +152,7 @@ pub(crate) async fn install(
reinstall: bool,
upgrade: bool,
bin: Option,
+ registry: Option,
force: bool,
python_install_mirror: Option,
pypy_install_mirror: Option,
@@ -500,7 +501,7 @@ pub(crate) async fn install(
);
}
- if preview.is_enabled() {
+ if preview.is_enabled() && !matches!(registry, Some(false)) {
#[cfg(windows)]
{
match uv_python::windows_registry::create_registry_entry(installation) {
@@ -670,11 +671,14 @@ pub(crate) async fn install(
}
if !errors.is_empty() {
- // If there are only bin install errors and the user didn't opt-in, we're only going to warn
- let fatal = errors
- .iter()
- .all(|(kind, _, _)| matches!(kind, InstallErrorKind::Bin))
- && bin.is_none();
+ // If there are only side-effect install errors and the user didn't opt-in, we're only going
+ // to warn
+ let fatal = !errors.iter().all(|(kind, _, _)| match kind {
+ InstallErrorKind::Bin => bin.is_none(),
+ #[cfg(windows)]
+ InstallErrorKind::Registry => registry.is_none(),
+ InstallErrorKind::DownloadUnpack => false,
+ });
for (kind, key, err) in errors
.into_iter()
@@ -691,10 +695,14 @@ pub(crate) async fn install(
(level, "install executable for")
}
#[cfg(windows)]
- InstallErrorKind::Registry => (
- "error".red().bold().to_string(),
- "install registry entry for",
- ),
+ InstallErrorKind::Registry => {
+ let level = match registry {
+ None => "warning".yellow().bold().to_string(),
+ Some(false) => continue,
+ Some(true) => "error".red().bold().to_string(),
+ };
+ (level, "install registry entry for")
+ }
};
writeln!(
@@ -714,10 +722,8 @@ pub(crate) async fn install(
}
if fatal {
- return Ok(ExitStatus::Success);
+ return Ok(ExitStatus::Failure);
}
-
- return Ok(ExitStatus::Failure);
}
Ok(ExitStatus::Success)
diff --git a/crates/uv/src/lib.rs b/crates/uv/src/lib.rs
index 3a700b965..e6fea035f 100644
--- a/crates/uv/src/lib.rs
+++ b/crates/uv/src/lib.rs
@@ -1403,6 +1403,7 @@ async fn run(mut cli: Cli) -> Result {
args.reinstall,
upgrade,
args.bin,
+ args.registry,
args.force,
args.python_install_mirror,
args.pypy_install_mirror,
@@ -1432,6 +1433,7 @@ async fn run(mut cli: Cli) -> Result {
reinstall,
upgrade,
args.bin,
+ args.registry,
args.force,
args.python_install_mirror,
args.pypy_install_mirror,
diff --git a/crates/uv/src/settings.rs b/crates/uv/src/settings.rs
index d373250ac..b221f0f5d 100644
--- a/crates/uv/src/settings.rs
+++ b/crates/uv/src/settings.rs
@@ -934,6 +934,7 @@ pub(crate) struct PythonInstallSettings {
pub(crate) reinstall: bool,
pub(crate) force: bool,
pub(crate) bin: Option,
+ pub(crate) registry: Option,
pub(crate) python_install_mirror: Option,
pub(crate) pypy_install_mirror: Option,
pub(crate) python_downloads_json_url: Option,
@@ -964,6 +965,8 @@ impl PythonInstallSettings {
reinstall,
bin,
no_bin,
+ registry,
+ no_registry,
force,
mirror: _,
pypy_mirror: _,
@@ -977,6 +980,7 @@ impl PythonInstallSettings {
reinstall,
force,
bin: flag(bin, no_bin, "bin"),
+ registry: flag(registry, no_registry, "registry"),
python_install_mirror: python_mirror,
pypy_install_mirror: pypy_mirror,
python_downloads_json_url,
@@ -992,6 +996,7 @@ pub(crate) struct PythonUpgradeSettings {
pub(crate) install_dir: Option,
pub(crate) targets: Vec,
pub(crate) force: bool,
+ pub(crate) registry: Option,
pub(crate) python_install_mirror: Option,
pub(crate) pypy_install_mirror: Option,
pub(crate) python_downloads_json_url: Option,
@@ -1019,6 +1024,7 @@ impl PythonUpgradeSettings {
let force = false;
let default = false;
let bin = None;
+ let registry = None;
let PythonUpgradeArgs {
install_dir,
@@ -1032,6 +1038,7 @@ impl PythonUpgradeSettings {
install_dir,
targets,
force,
+ registry,
python_install_mirror: python_mirror,
pypy_install_mirror: pypy_mirror,
python_downloads_json_url,
diff --git a/crates/uv/tests/it/help.rs b/crates/uv/tests/it/help.rs
index a6230108c..a557b0eff 100644
--- a/crates/uv/tests/it/help.rs
+++ b/crates/uv/tests/it/help.rs
@@ -507,6 +507,9 @@ fn help_subsubcommand() {
--no-bin
Do not install a Python executable into the `bin` directory
+ --no-registry
+ Do not register the Python installation in the Windows registry
+
--mirror
Set the URL to use as the source for downloading Python installations.
@@ -795,6 +798,8 @@ fn help_flag_subsubcommand() {
The directory to store the Python installation in [env: UV_PYTHON_INSTALL_DIR=]
--no-bin
Do not install a Python executable into the `bin` directory
+ --no-registry
+ Do not register the Python installation in the Windows registry
--mirror
Set the URL to use as the source for downloading Python installations [env:
UV_PYTHON_INSTALL_MIRROR=]
diff --git a/docs/reference/cli.md b/docs/reference/cli.md
index 93d928518..f6bc028df 100644
--- a/docs/reference/cli.md
+++ b/docs/reference/cli.md
@@ -2804,6 +2804,7 @@ uv python install [OPTIONS] [TARGETS]...
May also be set with the UV_NO_MANAGED_PYTHON environment variable.
--no-progressHide all progress outputs.
For example, spinners or progress bars.
May also be set with the UV_NO_PROGRESS environment variable.
--no-python-downloadsDisable automatic downloads of Python.
+--no-registryDo not register the Python installation in the Windows registry
--offlineDisable network access.
When disabled, uv will only use locally cached data and locally available files.
May also be set with the UV_OFFLINE environment variable.
--project project Run the command within the given project directory.
From c226d66f35b17df57018015c5f9c21d446d51849 Mon Sep 17 00:00:00 2001
From: Zanie Blue
Date: Tue, 15 Jul 2025 12:55:57 -0500
Subject: [PATCH 036/130] Rename "Dependency specifiers" section to exclude PEP
508 reference (#14631)
---
docs/concepts/projects/dependencies.md | 8 ++++----
docs/js/extra.js | 2 ++
2 files changed, 6 insertions(+), 4 deletions(-)
diff --git a/docs/concepts/projects/dependencies.md b/docs/concepts/projects/dependencies.md
index 2eabbf4dc..022db4d7e 100644
--- a/docs/concepts/projects/dependencies.md
+++ b/docs/concepts/projects/dependencies.md
@@ -808,12 +808,12 @@ Or, to opt-out of using an editable dependency in a workspace:
$ uv add --no-editable ./path/foo
```
-## Dependency specifiers (PEP 508)
+## Dependency specifiers
-uv uses
+uv uses standard
[dependency specifiers](https://packaging.python.org/en/latest/specifications/dependency-specifiers/),
-previously known as [PEP 508](https://peps.python.org/pep-0508/). A dependency specifier is composed
-of, in order:
+originally defined in [PEP 508](https://peps.python.org/pep-0508/). A dependency specifier is
+composed of, in order:
- The dependency name
- The extras you want (optional)
diff --git a/docs/js/extra.js b/docs/js/extra.js
index bfb34c7fa..58a71e98d 100644
--- a/docs/js/extra.js
+++ b/docs/js/extra.js
@@ -78,6 +78,8 @@ document$.subscribe(function () {
"concepts/projects/#building-projects": "concepts/projects/build/",
"concepts/projects/#build-isolation":
"concepts/projects/config/#build-isolation",
+ "concepts/projects/dependencies/#dependency-specifiers-pep-508":
+ "concepts/projects/dependencies/#dependency-specifiers",
};
// The prefix for the site, see `site_dir` in `mkdocs.yml`
From d5257202662773b6794b1b2de6c490dd1404d7b5 Mon Sep 17 00:00:00 2001
From: Zanie Blue
Date: Tue, 15 Jul 2025 13:47:02 -0500
Subject: [PATCH 037/130] Add `uv python update-shell` (#14627)
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
Part of #14296
This is the same as `uv tool update-shell` but handles the case where
the Python bin directory is configured to a different path.
```
❯ UV_PYTHON_BIN_DIR=/tmp/foo cargo run -q -- python install --preview 3.13.3
Installed Python 3.13.3 in 1.75s
+ cpython-3.13.3-macos-aarch64-none
warning: `/tmp/foo` is not on your PATH. To use installed Python executables, run `export PATH="/tmp/foo:$PATH"` or `uv python update-shell`.
❯ UV_PYTHON_BIN_DIR=/tmp/foo cargo run -q -- python update-shell
Created configuration file: /Users/zb/.zshenv
Restart your shell to apply changes
❯ cat /Users/zb/.zshenv
# uv
export PATH="/tmp/foo:$PATH"
❯ UV_TOOL_BIN_DIR=/tmp/bar cargo run -q -- tool update-shell
Updated configuration file: /Users/zb/.zshenv
Restart your shell to apply changes
❯ cat /Users/zb/.zshenv
# uv
export PATH="/tmp/foo:$PATH"
# uv
export PATH="/tmp/bar:$PATH"
```
---
crates/uv-cli/src/lib.rs | 13 ++
crates/uv/src/commands/mod.rs | 1 +
crates/uv/src/commands/python/install.rs | 23 ++-
crates/uv/src/commands/python/mod.rs | 1 +
crates/uv/src/commands/python/update_shell.rs | 153 ++++++++++++++++++
crates/uv/src/lib.rs | 6 +
crates/uv/tests/it/help.rs | 35 ++--
docs/reference/cli.md | 65 ++++++++
8 files changed, 274 insertions(+), 23 deletions(-)
create mode 100644 crates/uv/src/commands/python/update_shell.rs
diff --git a/crates/uv-cli/src/lib.rs b/crates/uv-cli/src/lib.rs
index 2efb30724..a846aec59 100644
--- a/crates/uv-cli/src/lib.rs
+++ b/crates/uv-cli/src/lib.rs
@@ -4856,6 +4856,19 @@ pub enum PythonCommand {
/// Uninstall Python versions.
Uninstall(PythonUninstallArgs),
+
+ /// Ensure that the Python executable directory is on the `PATH`.
+ ///
+ /// If the Python executable directory is not present on the `PATH`, uv will attempt to add it to
+ /// the relevant shell configuration files.
+ ///
+ /// If the shell configuration files already include a blurb to add the executable directory to
+ /// the path, but the directory is not present on the `PATH`, uv will exit with an error.
+ ///
+ /// The Python executable directory is determined according to the XDG standard and can be
+ /// retrieved with `uv python dir --bin`.
+ #[command(alias = "ensurepath")]
+ UpdateShell,
}
#[derive(Args)]
diff --git a/crates/uv/src/commands/mod.rs b/crates/uv/src/commands/mod.rs
index d1e647363..405aad955 100644
--- a/crates/uv/src/commands/mod.rs
+++ b/crates/uv/src/commands/mod.rs
@@ -38,6 +38,7 @@ pub(crate) use python::install::install as python_install;
pub(crate) use python::list::list as python_list;
pub(crate) use python::pin::pin as python_pin;
pub(crate) use python::uninstall::uninstall as python_uninstall;
+pub(crate) use python::update_shell::update_shell as python_update_shell;
#[cfg(feature = "self-update")]
pub(crate) use self_update::self_update;
pub(crate) use tool::dir::dir as tool_dir;
diff --git a/crates/uv/src/commands/python/install.rs b/crates/uv/src/commands/python/install.rs
index bbab7cbb1..feb0cf7c7 100644
--- a/crates/uv/src/commands/python/install.rs
+++ b/crates/uv/src/commands/python/install.rs
@@ -993,20 +993,29 @@ fn warn_if_not_on_path(bin: &Path) {
if !Shell::contains_path(bin) {
if let Some(shell) = Shell::from_env() {
if let Some(command) = shell.prepend_path(bin) {
- warn_user!(
- "`{}` is not on your PATH. To use the installed Python executable, run `{}`.",
- bin.simplified_display().cyan(),
- command.green(),
- );
+ if shell.supports_update() {
+ warn_user!(
+ "`{}` is not on your PATH. To use installed Python executables, run `{}` or `{}`.",
+ bin.simplified_display().cyan(),
+ command.green(),
+ "uv python update-shell".green()
+ );
+ } else {
+ warn_user!(
+ "`{}` is not on your PATH. To use installed Python executables, run `{}`.",
+ bin.simplified_display().cyan(),
+ command.green()
+ );
+ }
} else {
warn_user!(
- "`{}` is not on your PATH. To use the installed Python executable, add the directory to your PATH.",
+ "`{}` is not on your PATH. To use installed Python executables, add the directory to your PATH.",
bin.simplified_display().cyan(),
);
}
} else {
warn_user!(
- "`{}` is not on your PATH. To use the installed Python executable, add the directory to your PATH.",
+ "`{}` is not on your PATH. To use installed Python executables, add the directory to your PATH.",
bin.simplified_display().cyan(),
);
}
diff --git a/crates/uv/src/commands/python/mod.rs b/crates/uv/src/commands/python/mod.rs
index afc700d23..6f7a5c980 100644
--- a/crates/uv/src/commands/python/mod.rs
+++ b/crates/uv/src/commands/python/mod.rs
@@ -4,6 +4,7 @@ pub(crate) mod install;
pub(crate) mod list;
pub(crate) mod pin;
pub(crate) mod uninstall;
+pub(crate) mod update_shell;
#[derive(Debug, Copy, Clone, Eq, PartialEq, Ord, PartialOrd)]
pub(super) enum ChangeEventKind {
diff --git a/crates/uv/src/commands/python/update_shell.rs b/crates/uv/src/commands/python/update_shell.rs
new file mode 100644
index 000000000..18757ff9e
--- /dev/null
+++ b/crates/uv/src/commands/python/update_shell.rs
@@ -0,0 +1,153 @@
+#![cfg_attr(windows, allow(unreachable_code))]
+
+use std::fmt::Write;
+
+use anyhow::Result;
+use owo_colors::OwoColorize;
+use tokio::io::AsyncWriteExt;
+use tracing::debug;
+
+use uv_fs::Simplified;
+use uv_python::managed::python_executable_dir;
+use uv_shell::Shell;
+
+use crate::commands::ExitStatus;
+use crate::printer::Printer;
+
+/// Ensure that the executable directory is in PATH.
+pub(crate) async fn update_shell(printer: Printer) -> Result {
+ let executable_directory = python_executable_dir()?;
+ debug!(
+ "Ensuring that the executable directory is in PATH: {}",
+ executable_directory.simplified_display()
+ );
+
+ #[cfg(windows)]
+ {
+ if uv_shell::windows::prepend_path(&executable_directory)? {
+ writeln!(
+ printer.stderr(),
+ "Updated PATH to include executable directory {}",
+ executable_directory.simplified_display().cyan()
+ )?;
+ writeln!(printer.stderr(), "Restart your shell to apply changes")?;
+ } else {
+ writeln!(
+ printer.stderr(),
+ "Executable directory {} is already in PATH",
+ executable_directory.simplified_display().cyan()
+ )?;
+ }
+
+ return Ok(ExitStatus::Success);
+ }
+
+ if Shell::contains_path(&executable_directory) {
+ writeln!(
+ printer.stderr(),
+ "Executable directory {} is already in PATH",
+ executable_directory.simplified_display().cyan()
+ )?;
+ return Ok(ExitStatus::Success);
+ }
+
+ // Determine the current shell.
+ let Some(shell) = Shell::from_env() else {
+ return Err(anyhow::anyhow!(
+ "The executable directory {} is not in PATH, but the current shell could not be determined",
+ executable_directory.simplified_display().cyan()
+ ));
+ };
+
+ // Look up the configuration files (e.g., `.bashrc`, `.zshrc`) for the shell.
+ let files = shell.configuration_files();
+ if files.is_empty() {
+ return Err(anyhow::anyhow!(
+ "The executable directory {} is not in PATH, but updating {shell} is currently unsupported",
+ executable_directory.simplified_display().cyan()
+ ));
+ }
+
+ // Prepare the command (e.g., `export PATH="$HOME/.cargo/bin:$PATH"`).
+ let Some(command) = shell.prepend_path(&executable_directory) else {
+ return Err(anyhow::anyhow!(
+ "The executable directory {} is not in PATH, but the necessary command to update {shell} could not be determined",
+ executable_directory.simplified_display().cyan()
+ ));
+ };
+
+ // Update each file, as necessary.
+ let mut updated = false;
+ for file in files {
+ // Search for the command in the file, to avoid redundant updates.
+ match fs_err::tokio::read_to_string(&file).await {
+ Ok(contents) => {
+ if contents
+ .lines()
+ .map(str::trim)
+ .filter(|line| !line.starts_with('#'))
+ .any(|line| line.contains(&command))
+ {
+ debug!(
+ "Skipping already-updated configuration file: {}",
+ file.simplified_display()
+ );
+ continue;
+ }
+
+ // Append the command to the file.
+ fs_err::tokio::OpenOptions::new()
+ .create(true)
+ .truncate(true)
+ .write(true)
+ .open(&file)
+ .await?
+ .write_all(format!("{contents}\n# uv\n{command}\n").as_bytes())
+ .await?;
+
+ writeln!(
+ printer.stderr(),
+ "Updated configuration file: {}",
+ file.simplified_display().cyan()
+ )?;
+ updated = true;
+ }
+ Err(err) if err.kind() == std::io::ErrorKind::NotFound => {
+ // Ensure that the directory containing the file exists.
+ if let Some(parent) = file.parent() {
+ fs_err::tokio::create_dir_all(&parent).await?;
+ }
+
+ // Append the command to the file.
+ fs_err::tokio::OpenOptions::new()
+ .create(true)
+ .truncate(true)
+ .write(true)
+ .open(&file)
+ .await?
+ .write_all(format!("# uv\n{command}\n").as_bytes())
+ .await?;
+
+ writeln!(
+ printer.stderr(),
+ "Created configuration file: {}",
+ file.simplified_display().cyan()
+ )?;
+ updated = true;
+ }
+ Err(err) => {
+ return Err(err.into());
+ }
+ }
+ }
+
+ if updated {
+ writeln!(printer.stderr(), "Restart your shell to apply changes")?;
+ Ok(ExitStatus::Success)
+ } else {
+ Err(anyhow::anyhow!(
+ "The executable directory {} is not in PATH, but the {shell} configuration files are already up-to-date",
+ executable_directory.simplified_display().cyan()
+ ))
+ }
+}
diff --git a/crates/uv/src/lib.rs b/crates/uv/src/lib.rs
index e6fea035f..384f48ac4 100644
--- a/crates/uv/src/lib.rs
+++ b/crates/uv/src/lib.rs
@@ -1537,6 +1537,12 @@ async fn run(mut cli: Cli) -> Result {
commands::python_dir(args.bin)?;
Ok(ExitStatus::Success)
}
+ Commands::Python(PythonNamespace {
+ command: PythonCommand::UpdateShell,
+ }) => {
+ commands::python_update_shell(printer).await?;
+ Ok(ExitStatus::Success)
+ }
Commands::Publish(args) => {
show_settings!(args);
diff --git a/crates/uv/tests/it/help.rs b/crates/uv/tests/it/help.rs
index a557b0eff..39de4c6f9 100644
--- a/crates/uv/tests/it/help.rs
+++ b/crates/uv/tests/it/help.rs
@@ -290,14 +290,15 @@ fn help_subcommand() {
Usage: uv python [OPTIONS]
Commands:
- list List the available Python installations
- install Download and install Python versions
- upgrade Upgrade installed Python versions to the latest supported patch release (requires the
- `--preview` flag)
- find Search for a Python installation
- pin Pin to a specific Python version
- dir Show the uv Python installation directory
- uninstall Uninstall Python versions
+ list List the available Python installations
+ install Download and install Python versions
+ upgrade Upgrade installed Python versions to the latest supported patch release (requires
+ the `--preview` flag)
+ find Search for a Python installation
+ pin Pin to a specific Python version
+ dir Show the uv Python installation directory
+ uninstall Uninstall Python versions
+ update-shell Ensure that the Python executable directory is on the `PATH`
Cache options:
-n, --no-cache
@@ -725,14 +726,15 @@ fn help_flag_subcommand() {
Usage: uv python [OPTIONS]
Commands:
- list List the available Python installations
- install Download and install Python versions
- upgrade Upgrade installed Python versions to the latest supported patch release (requires the
- `--preview` flag)
- find Search for a Python installation
- pin Pin to a specific Python version
- dir Show the uv Python installation directory
- uninstall Uninstall Python versions
+ list List the available Python installations
+ install Download and install Python versions
+ upgrade Upgrade installed Python versions to the latest supported patch release (requires
+ the `--preview` flag)
+ find Search for a Python installation
+ pin Pin to a specific Python version
+ dir Show the uv Python installation directory
+ uninstall Uninstall Python versions
+ update-shell Ensure that the Python executable directory is on the `PATH`
Cache options:
-n, --no-cache Avoid reading from or writing to the cache, instead using a temporary
@@ -934,6 +936,7 @@ fn help_unknown_subsubcommand() {
pin
dir
uninstall
+ update-shell
");
}
diff --git a/docs/reference/cli.md b/docs/reference/cli.md
index f6bc028df..66c46ae0c 100644
--- a/docs/reference/cli.md
+++ b/docs/reference/cli.md
@@ -2633,6 +2633,7 @@ uv python [OPTIONS]
uv python pinPin to a specific Python version
uv python dirShow the uv Python installation directory
uv python uninstallUninstall Python versions
+uv python update-shellEnsure that the Python executable directory is on the PATH
### uv python list
@@ -3206,6 +3207,70 @@ uv python uninstall [OPTIONS] ...
You can configure fine-grained logging using the RUST_LOG environment variable. (https://docs.rs/tracing-subscriber/latest/tracing_subscriber/filter/struct.EnvFilter.html#directives )
+### uv python update-shell
+
+Ensure that the Python executable directory is on the `PATH`.
+
+If the Python executable directory is not present on the `PATH`, uv will attempt to add it to the relevant shell configuration files.
+
+If the shell configuration files already include a blurb to add the executable directory to the path, but the directory is not present on the `PATH`, uv will exit with an error.
+
+The Python executable directory is determined according to the XDG standard and can be retrieved with `uv python dir --bin`.
+
+Usage
+
+```
+uv python update-shell [OPTIONS]
+```
+
+Options
+
+--allow-insecure-host , --trusted-host allow-insecure-host Allow insecure connections to a host.
+Can be provided multiple times.
+Expects to receive either a hostname (e.g., localhost), a host-port pair (e.g., localhost:8080), or a URL (e.g., https://localhost).
+WARNING: Hosts included in this list will not be verified against the system's certificate store. Only use --allow-insecure-host in a secure network with verified sources, as it bypasses SSL verification and could expose you to MITM attacks.
+May also be set with the UV_INSECURE_HOST environment variable.
--cache-dir cache-dir Path to the cache directory.
+Defaults to $XDG_CACHE_HOME/uv or $HOME/.cache/uv on macOS and Linux, and %LOCALAPPDATA%\uv\cache on Windows.
+To view the location of the cache directory, run uv cache dir.
+May also be set with the UV_CACHE_DIR environment variable.
--color color-choice Control the use of color in output.
+By default, uv will automatically detect support for colors when writing to a terminal.
+Possible values:
+
+auto: Enables colored output only when the output is going to a terminal or TTY with support
+always: Enables colored output regardless of the detected environment
+never: Disables colored output
+ --config-file config-file The path to a uv.toml file to use for configuration.
+While uv configuration can be included in a pyproject.toml file, it is not allowed in this context.
+May also be set with the UV_CONFIG_FILE environment variable.
--directory directory Change to the given directory prior to running the command.
+Relative paths are resolved with the given directory as the base.
+See --project to only change the project root directory.
+--help , -hDisplay the concise help for this command
+--managed-pythonRequire use of uv-managed Python versions.
+By default, uv prefers using Python versions it manages. However, it will use system Python versions if a uv-managed Python is not installed. This option disables use of system Python versions.
+May also be set with the UV_MANAGED_PYTHON environment variable.
--native-tlsWhether to load TLS certificates from the platform's native certificate store.
+By default, uv loads certificates from the bundled webpki-roots crate. The webpki-roots are a reliable set of trust roots from Mozilla, and including them in uv improves portability and performance (especially on macOS).
+However, in some cases, you may want to use the platform's native certificate store, especially if you're relying on a corporate trust root (e.g., for a mandatory proxy) that's included in your system's certificate store.
+May also be set with the UV_NATIVE_TLS environment variable.
--no-cache , --no-cache-dir, -nAvoid reading from or writing to the cache, instead using a temporary directory for the duration of the operation
+May also be set with the UV_NO_CACHE environment variable.
--no-configAvoid discovering configuration files (pyproject.toml, uv.toml).
+Normally, configuration files are discovered in the current directory, parent directories, or user configuration directories.
+May also be set with the UV_NO_CONFIG environment variable.
--no-managed-pythonDisable use of uv-managed Python versions.
+Instead, uv will search for a suitable Python version on the system.
+May also be set with the UV_NO_MANAGED_PYTHON environment variable.
--no-progressHide all progress outputs.
+For example, spinners or progress bars.
+May also be set with the UV_NO_PROGRESS environment variable.
--no-python-downloadsDisable automatic downloads of Python.
+--offlineDisable network access.
+When disabled, uv will only use locally cached data and locally available files.
+May also be set with the UV_OFFLINE environment variable.
--project project Run the command within the given project directory.
+All pyproject.toml, uv.toml, and .python-version files will be discovered by walking up the directory tree from the project root, as will the project's virtual environment (.venv).
+Other command-line arguments (such as relative paths) will be resolved relative to the current working directory.
+See --directory to change the working directory entirely.
+This setting has no effect when used in the uv pip interface.
+May also be set with the UV_PROJECT environment variable.
--quiet , -qUse quiet output.
+Repeating this option, e.g., -qq, will enable a silent mode in which uv will write no output to stdout.
+--verbose , -vUse verbose output.
+You can configure fine-grained logging using the RUST_LOG environment variable. (https://docs.rs/tracing-subscriber/latest/tracing_subscriber/filter/struct.EnvFilter.html#directives )
+
+
## uv pip
Manage Python packages with a pip-compatible interface
From ab2bd0179bac888188b69524a20f8d336e439a64 Mon Sep 17 00:00:00 2001
From: Zanie Blue
Date: Tue, 15 Jul 2025 14:35:54 -0500
Subject: [PATCH 038/130] Mention the `revision` in the lockfile versioning doc
(#14634)
---
docs/concepts/resolution.md | 4 ++++
1 file changed, 4 insertions(+)
diff --git a/docs/concepts/resolution.md b/docs/concepts/resolution.md
index ec28d71a3..e857e7b1d 100644
--- a/docs/concepts/resolution.md
+++ b/docs/concepts/resolution.md
@@ -535,3 +535,7 @@ The schema version is considered part of the public API, and so is only bumped i
a breaking change (see [Versioning](../reference/policies/versioning.md)). As such, all uv patch
versions within a given minor uv release are guaranteed to have full lockfile compatibility. In
other words, lockfiles may only be rejected across minor releases.
+
+The `revision` field of the lockfile is used to track backwards compatible changes to the lockfile.
+For example, adding a new field to distributions. Changes to the revision will not cause older
+versions of uv to error.
From 863e73a841ca6360a611196e9c5e3ccee894f9d8 Mon Sep 17 00:00:00 2001
From: Zanie Blue
Date: Tue, 15 Jul 2025 16:47:35 -0500
Subject: [PATCH 039/130] Skip Windows Python interpreters that return a broken
MSIX package code (#14636)
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
Currently we treat all spawn failures as fatal, because they indicate a
broken interpreter. In this case, I think we should just skip these
broken interpreters — though I don't know the root cause of why it's
broken yet.
Closes https://github.com/astral-sh/uv/issues/14637
See
https://discord.com/channels/1039017663004942429/1039017663512449056/1394758502647333025
---
crates/uv-python/src/discovery.rs | 8 ++++++++
crates/uv-python/src/interpreter.rs | 31 +++++++++++++++++++++++++----
2 files changed, 35 insertions(+), 4 deletions(-)
diff --git a/crates/uv-python/src/discovery.rs b/crates/uv-python/src/discovery.rs
index 67f8f37ff..c067082dd 100644
--- a/crates/uv-python/src/discovery.rs
+++ b/crates/uv-python/src/discovery.rs
@@ -884,6 +884,14 @@ impl Error {
);
false
}
+ #[cfg(windows)]
+ InterpreterError::CorruptWindowsPackage { path, err } => {
+ debug!(
+ "Skipping bad interpreter at {} from {source}: {err}",
+ path.display()
+ );
+ false
+ }
InterpreterError::NotFound(path)
| InterpreterError::BrokenSymlink(BrokenSymlink { path, .. }) => {
// If the interpreter is from an active, valid virtual environment, we should
diff --git a/crates/uv-python/src/interpreter.rs b/crates/uv-python/src/interpreter.rs
index 0f074ebb6..fc5adb833 100644
--- a/crates/uv-python/src/interpreter.rs
+++ b/crates/uv-python/src/interpreter.rs
@@ -34,6 +34,9 @@ use crate::{
VirtualEnvironment,
};
+#[cfg(windows)]
+use windows_sys::Win32::Foundation::{APPMODEL_ERROR_NO_PACKAGE, ERROR_CANT_ACCESS_FILE};
+
/// A Python executable and its associated platform markers.
#[derive(Debug, Clone)]
pub struct Interpreter {
@@ -760,6 +763,13 @@ pub enum Error {
#[source]
err: io::Error,
},
+ #[cfg(windows)]
+ #[error("Failed to query Python interpreter at `{path}`")]
+ CorruptWindowsPackage {
+ path: PathBuf,
+ #[source]
+ err: io::Error,
+ },
#[error("{0}")]
UnexpectedResponse(UnexpectedResponseError),
#[error("{0}")]
@@ -872,10 +882,23 @@ impl InterpreterInfo {
.arg("-c")
.arg(script)
.output()
- .map_err(|err| Error::SpawnFailed {
- path: interpreter.to_path_buf(),
- err,
- })?;
+ .map_err(
+ |err| match err.raw_os_error().and_then(|code| u32::try_from(code).ok()) {
+ // These error codes are returned if the Python interpreter is a corrupt MSIX
+ // package, which we want to differentiate from a typical spawn failure.
+ #[cfg(windows)]
+ Some(APPMODEL_ERROR_NO_PACKAGE | ERROR_CANT_ACCESS_FILE) => {
+ Error::CorruptWindowsPackage {
+ path: interpreter.to_path_buf(),
+ err,
+ }
+ }
+ _ => Error::SpawnFailed {
+ path: interpreter.to_path_buf(),
+ err,
+ },
+ },
+ )?;
if !output.status.success() {
let stderr = String::from_utf8_lossy(&output.stderr).trim().to_string();
From 8d6d0678a71d86020caaf20107b1e81af29f471d Mon Sep 17 00:00:00 2001
From: Zanie Blue
Date: Tue, 15 Jul 2025 16:47:43 -0500
Subject: [PATCH 040/130] Move "Conflicting dependencies" to the "Resolution"
page (#14633)
---
docs/concepts/projects/config.md | 121 +++++++++++--------------------
docs/concepts/resolution.md | 81 +++++++++++++++++++--
2 files changed, 118 insertions(+), 84 deletions(-)
diff --git a/docs/concepts/projects/config.md b/docs/concepts/projects/config.md
index f9d33ed90..8efb667a1 100644
--- a/docs/concepts/projects/config.md
+++ b/docs/concepts/projects/config.md
@@ -196,41 +196,6 @@ To target this environment, you'd export `UV_PROJECT_ENVIRONMENT=/usr/local`.
environment. The `--active` flag can be used to opt-in to respecting `VIRTUAL_ENV`. The
`--no-active` flag can be used to silence the warning.
-## Limited resolution environments
-
-If your project supports a more limited set of platforms or Python versions, you can constrain the
-set of solved platforms via the `environments` setting, which accepts a list of PEP 508 environment
-markers. For example, to constrain the lockfile to macOS and Linux, and exclude Windows:
-
-```toml title="pyproject.toml"
-[tool.uv]
-environments = [
- "sys_platform == 'darwin'",
- "sys_platform == 'linux'",
-]
-```
-
-See the [resolution documentation](../resolution.md#limited-resolution-environments) for more.
-
-## Required environments
-
-If your project _must_ support a specific platform or Python version, you can mark that platform as
-required via the `required-environments` setting. For example, to require that the project supports
-Intel macOS:
-
-```toml title="pyproject.toml"
-[tool.uv]
-required-environments = [
- "sys_platform == 'darwin' and platform_machine == 'x86_64'",
-]
-```
-
-The `required-environments` setting is only relevant for packages that do not publish a source
-distribution (like PyTorch), as such packages can _only_ be installed on environments covered by the
-set of pre-built binary distributions (wheels) published by that package.
-
-See the [resolution documentation](../resolution.md#required-environments) for more.
-
## Build isolation
By default, uv builds all packages in isolated virtual environments, as per
@@ -401,33 +366,12 @@ in the deployed environment without a dependency on the originating source code.
## Conflicting dependencies
-uv requires that all optional dependencies ("extras") declared by the project are compatible with
-each other and resolves all optional dependencies together when creating the lockfile.
+uv requires resolves all project dependencies together, including optional dependencies ("extras")
+and dependency groups. If dependencies declared in one section are not compatible with those in
+another section, uv will fail to resolve the requirements of the project with an error.
-If optional dependencies declared in one extra are not compatible with those in another extra, uv
-will fail to resolve the requirements of the project with an error.
-
-To work around this, uv supports declaring conflicting extras. For example, consider two sets of
-optional dependencies that conflict with one another:
-
-```toml title="pyproject.toml"
-[project.optional-dependencies]
-extra1 = ["numpy==2.1.2"]
-extra2 = ["numpy==2.0.0"]
-```
-
-If you run `uv lock` with the above dependencies, resolution will fail:
-
-```console
-$ uv lock
- x No solution found when resolving dependencies:
- `-> Because myproject[extra2] depends on numpy==2.0.0 and myproject[extra1] depends on numpy==2.1.2, we can conclude that myproject[extra1] and
- myproject[extra2] are incompatible.
- And because your project requires myproject[extra1] and myproject[extra2], we can conclude that your projects's requirements are unsatisfiable.
-```
-
-But if you specify that `extra1` and `extra2` are conflicting, uv will resolve them separately.
-Specify conflicts in the `tool.uv` section:
+uv supports explicit declaration of conflicting dependency groups. For example, to declare that the
+`optional-dependency` groups `extra1` and `extra2` are incompatible:
```toml title="pyproject.toml"
[tool.uv]
@@ -439,25 +383,9 @@ conflicts = [
]
```
-Now, running `uv lock` will succeed. Note though, that now you cannot install both `extra1` and
-`extra2` at the same time:
-
-```console
-$ uv sync --extra extra1 --extra extra2
-Resolved 3 packages in 14ms
-error: extra `extra1`, extra `extra2` are incompatible with the declared conflicts: {`myproject[extra1]`, `myproject[extra2]`}
-```
-
-This error occurs because installing both `extra1` and `extra2` would result in installing two
-different versions of a package into the same environment.
-
-The above strategy for dealing with conflicting extras also works with dependency groups:
+Or, to declare the development dependency groups `group1` and `group2` incompatible:
```toml title="pyproject.toml"
-[dependency-groups]
-group1 = ["numpy==2.1.2"]
-group2 = ["numpy==2.0.0"]
-
[tool.uv]
conflicts = [
[
@@ -467,4 +395,39 @@ conflicts = [
]
```
-The only difference with conflicting extras is that you need to use `group` instead of `extra`.
+See the [resolution documentation](../resolution.md#conflicting-dependencies) for more.
+
+## Limited resolution environments
+
+If your project supports a more limited set of platforms or Python versions, you can constrain the
+set of solved platforms via the `environments` setting, which accepts a list of PEP 508 environment
+markers. For example, to constrain the lockfile to macOS and Linux, and exclude Windows:
+
+```toml title="pyproject.toml"
+[tool.uv]
+environments = [
+ "sys_platform == 'darwin'",
+ "sys_platform == 'linux'",
+]
+```
+
+See the [resolution documentation](../resolution.md#limited-resolution-environments) for more.
+
+## Required environments
+
+If your project _must_ support a specific platform or Python version, you can mark that platform as
+required via the `required-environments` setting. For example, to require that the project supports
+Intel macOS:
+
+```toml title="pyproject.toml"
+[tool.uv]
+required-environments = [
+ "sys_platform == 'darwin' and platform_machine == 'x86_64'",
+]
+```
+
+The `required-environments` setting is only relevant for packages that do not publish a source
+distribution (like PyTorch), as such packages can _only_ be installed on environments covered by the
+set of pre-built binary distributions (wheels) published by that package.
+
+See the [resolution documentation](../resolution.md#required-environments) for more.
diff --git a/docs/concepts/resolution.md b/docs/concepts/resolution.md
index e857e7b1d..278289ea9 100644
--- a/docs/concepts/resolution.md
+++ b/docs/concepts/resolution.md
@@ -453,6 +453,77 @@ though only `name`, `version`, `requires-dist`, `requires-python`, and `provides
uv. The `version` field is also considered optional. If omitted, the metadata will be used for all
versions of the specified package.
+## Conflicting dependencies
+
+uv requires that all optional dependencies ("extras") declared by the project are compatible with
+each other and resolves all optional dependencies together when creating the lockfile.
+
+If optional dependencies declared in one extra are not compatible with those in another extra, uv
+will fail to resolve the requirements of the project with an error.
+
+To work around this, uv supports declaring conflicting extras. For example, consider two sets of
+optional dependencies that conflict with one another:
+
+```toml title="pyproject.toml"
+[project.optional-dependencies]
+extra1 = ["numpy==2.1.2"]
+extra2 = ["numpy==2.0.0"]
+```
+
+If you run `uv lock` with the above dependencies, resolution will fail:
+
+```console
+$ uv lock
+ x No solution found when resolving dependencies:
+ `-> Because myproject[extra2] depends on numpy==2.0.0 and myproject[extra1] depends on numpy==2.1.2, we can conclude that myproject[extra1] and
+ myproject[extra2] are incompatible.
+ And because your project requires myproject[extra1] and myproject[extra2], we can conclude that your projects's requirements are unsatisfiable.
+```
+
+But if you specify that `extra1` and `extra2` are conflicting, uv will resolve them separately.
+Specify conflicts in the `tool.uv` section:
+
+```toml title="pyproject.toml"
+[tool.uv]
+conflicts = [
+ [
+ { extra = "extra1" },
+ { extra = "extra2" },
+ ],
+]
+```
+
+Now, running `uv lock` will succeed. Note though, that now you cannot install both `extra1` and
+`extra2` at the same time:
+
+```console
+$ uv sync --extra extra1 --extra extra2
+Resolved 3 packages in 14ms
+error: extra `extra1`, extra `extra2` are incompatible with the declared conflicts: {`myproject[extra1]`, `myproject[extra2]`}
+```
+
+This error occurs because installing both `extra1` and `extra2` would result in installing two
+different versions of a package into the same environment.
+
+The above strategy for dealing with conflicting extras also works with dependency groups:
+
+```toml title="pyproject.toml"
+[dependency-groups]
+group1 = ["numpy==2.1.2"]
+group2 = ["numpy==2.0.0"]
+
+[tool.uv]
+conflicts = [
+ [
+ { group = "group1" },
+ { group = "group2" },
+ ],
+]
+```
+
+The only difference from conflicting extras is that you need to use the `group` key instead of
+`extra`.
+
## Lower bounds
By default, `uv add` adds lower bounds to dependencies and, when using uv to manage projects, uv
@@ -513,11 +584,6 @@ reading and extracting archives in the following formats:
- lzma tarball (`.tar.lzma`)
- zip (`.zip`)
-## Learn more
-
-For more details about the internals of the resolver, see the
-[resolver reference](../reference/resolver-internals.md) documentation.
-
## Lockfile versioning
The `uv.lock` file uses a versioned schema. The schema version is included in the `version` field of
@@ -539,3 +605,8 @@ other words, lockfiles may only be rejected across minor releases.
The `revision` field of the lockfile is used to track backwards compatible changes to the lockfile.
For example, adding a new field to distributions. Changes to the revision will not cause older
versions of uv to error.
+
+## Learn more
+
+For more details about the internals of the resolver, see the
+[resolver reference](../reference/resolver-internals.md) documentation.
From 861f7a1c42e366eec0529ef98eeed06665ccba6c Mon Sep 17 00:00:00 2001
From: Gilles Peiffer
Date: Wed, 16 Jul 2025 15:44:29 +0200
Subject: [PATCH 041/130] docs: add missing backtick (#14654)
Subject is message :)
---
docs/pip/packages.md | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/docs/pip/packages.md b/docs/pip/packages.md
index a47b1aa0e..cdce527b6 100644
--- a/docs/pip/packages.md
+++ b/docs/pip/packages.md
@@ -128,7 +128,7 @@ $ uv pip install --group some/path/pyproject.toml:foo --group other/pyproject.to
!!! note
- As in pip, `--group` flags do not apply to other sources specified with flags like `-r` or -e`.
+ As in pip, `--group` flags do not apply to other sources specified with flags like `-r` or `-e`.
For instance, `uv pip install -r some/path/pyproject.toml --group foo` sources `foo`
from `./pyproject.toml` and **not** `some/path/pyproject.toml`.
From 03de6c36e34032a754fd49ecbd300246954d8042 Mon Sep 17 00:00:00 2001
From: Charlie Marsh
Date: Wed, 16 Jul 2025 09:48:16 -0400
Subject: [PATCH 042/130] Warn on invalid `uv.toml` when provided via direct
path (#14653)
## Summary
We validate the `uv.toml` when it's discovered automatically, but not
when provided via `--config-file`. The same limitations exist, though --
I think the lack of enforcement is just an oversight.
Closes https://github.com/astral-sh/uv/issues/14650.
---
crates/uv-settings/src/lib.rs | 7 ++++++-
crates/uv/tests/it/pip_install.rs | 26 +++++++++++++++++++++++++-
2 files changed, 31 insertions(+), 2 deletions(-)
diff --git a/crates/uv-settings/src/lib.rs b/crates/uv-settings/src/lib.rs
index 54ae4e261..d676cc060 100644
--- a/crates/uv-settings/src/lib.rs
+++ b/crates/uv-settings/src/lib.rs
@@ -170,7 +170,12 @@ impl FilesystemOptions {
/// Load a [`FilesystemOptions`] from a `uv.toml` file.
pub fn from_file(path: impl AsRef) -> Result {
- Ok(Self(read_file(path.as_ref())?))
+ let path = path.as_ref();
+ tracing::debug!("Reading user configuration from: `{}`", path.display());
+
+ let options = read_file(path)?;
+ validate_uv_toml(path, &options)?;
+ Ok(Self(options))
}
}
diff --git a/crates/uv/tests/it/pip_install.rs b/crates/uv/tests/it/pip_install.rs
index bc27228c7..123d9066b 100644
--- a/crates/uv/tests/it/pip_install.rs
+++ b/crates/uv/tests/it/pip_install.rs
@@ -267,7 +267,7 @@ fn invalid_toml_filename() -> Result<()> {
}
#[test]
-fn invalid_uv_toml_option_disallowed() -> Result<()> {
+fn invalid_uv_toml_option_disallowed_automatic_discovery() -> Result<()> {
let context = TestContext::new("3.12");
let uv_toml = context.temp_dir.child("uv.toml");
uv_toml.write_str(indoc! {r"
@@ -288,6 +288,30 @@ fn invalid_uv_toml_option_disallowed() -> Result<()> {
Ok(())
}
+#[test]
+fn invalid_uv_toml_option_disallowed_command_line() -> Result<()> {
+ let context = TestContext::new("3.12");
+ let uv_toml = context.temp_dir.child("foo.toml");
+ uv_toml.write_str(indoc! {r"
+ managed = true
+ "})?;
+
+ uv_snapshot!(context.pip_install()
+ .arg("iniconfig")
+ .arg("--config-file")
+ .arg("foo.toml"), @r"
+ success: false
+ exit_code: 2
+ ----- stdout -----
+
+ ----- stderr -----
+ error: Failed to parse: `foo.toml`. The `managed` field is not allowed in a `uv.toml` file. `managed` is only applicable in the context of a project, and should be placed in a `pyproject.toml` file instead.
+ "
+ );
+
+ Ok(())
+}
+
#[test]
fn cache_uv_toml_credentials() -> Result<()> {
let context = TestContext::new("3.12");
From e547527587fc47d3c83b200afa0d6bd15897081a Mon Sep 17 00:00:00 2001
From: Nathan Cain <13713501+nathanscain@users.noreply.github.com>
Date: Wed, 16 Jul 2025 08:52:17 -0500
Subject: [PATCH 043/130] Add UV_LIBC to allow libc selection in multi-libc
environment (#14646)
Closes #14262
## Description
Adds `UV_LIBC` environment variable and implements check within
`Libc::from_env` as recommended here:
https://github.com/astral-sh/uv/issues/14262#issuecomment-3014600313
Gave this a few passes to make sure I follow dev practices within uv as
best I am able. Feel free to call out anything that could be improved.
## Test Plan
Planned to simply run existing test suite. Open to adding more tests
once implementation is validated due to my limited Rust experience.
---
crates/uv-python/src/platform.rs | 42 +++++++++++++++++++++-----------
crates/uv-static/src/env_vars.rs | 4 +++
docs/reference/environment.md | 5 ++++
3 files changed, 37 insertions(+), 14 deletions(-)
diff --git a/crates/uv-python/src/platform.rs b/crates/uv-python/src/platform.rs
index ce8620ae2..606e05e28 100644
--- a/crates/uv-python/src/platform.rs
+++ b/crates/uv-python/src/platform.rs
@@ -5,6 +5,8 @@ use std::ops::Deref;
use std::{fmt, str::FromStr};
use thiserror::Error;
+use uv_static::EnvVars;
+
#[derive(Error, Debug)]
pub enum Error {
#[error("Unknown operating system: {0}")]
@@ -15,6 +17,8 @@ pub enum Error {
UnknownLibc(String),
#[error("Unsupported variant `{0}` for architecture `{1}`")]
UnsupportedVariant(String, String),
+ #[error(transparent)]
+ LibcDetectionError(#[from] LibcDetectionError),
}
/// Architecture variants, e.g., with support for different instruction sets
@@ -95,22 +99,32 @@ pub enum Libc {
}
impl Libc {
- pub(crate) fn from_env() -> Result {
+ pub(crate) fn from_env() -> Result {
match std::env::consts::OS {
- "linux" => Ok(Self::Some(match detect_linux_libc()? {
- LibcVersion::Manylinux { .. } => match std::env::consts::ARCH {
- // Checks if the CPU supports hardware floating-point operations.
- // Depending on the result, it selects either the `gnueabihf` (hard-float) or `gnueabi` (soft-float) environment.
- // download-metadata.json only includes armv7.
- "arm" | "armv5te" | "armv7" => match detect_hardware_floating_point_support() {
- Ok(true) => target_lexicon::Environment::Gnueabihf,
- Ok(false) => target_lexicon::Environment::Gnueabi,
- Err(_) => target_lexicon::Environment::Gnu,
+ "linux" => {
+ if let Ok(libc) = std::env::var(EnvVars::UV_LIBC) {
+ if !libc.is_empty() {
+ return Self::from_str(&libc);
+ }
+ }
+
+ Ok(Self::Some(match detect_linux_libc()? {
+ LibcVersion::Manylinux { .. } => match std::env::consts::ARCH {
+ // Checks if the CPU supports hardware floating-point operations.
+ // Depending on the result, it selects either the `gnueabihf` (hard-float) or `gnueabi` (soft-float) environment.
+ // download-metadata.json only includes armv7.
+ "arm" | "armv5te" | "armv7" => {
+ match detect_hardware_floating_point_support() {
+ Ok(true) => target_lexicon::Environment::Gnueabihf,
+ Ok(false) => target_lexicon::Environment::Gnueabi,
+ Err(_) => target_lexicon::Environment::Gnu,
+ }
+ }
+ _ => target_lexicon::Environment::Gnu,
},
- _ => target_lexicon::Environment::Gnu,
- },
- LibcVersion::Musllinux { .. } => target_lexicon::Environment::Musl,
- })),
+ LibcVersion::Musllinux { .. } => target_lexicon::Environment::Musl,
+ }))
+ }
"windows" | "macos" => Ok(Self::None),
// Use `None` on platforms without explicit support.
_ => Ok(Self::None),
diff --git a/crates/uv-static/src/env_vars.rs b/crates/uv-static/src/env_vars.rs
index 5b91fccea..ae981cac3 100644
--- a/crates/uv-static/src/env_vars.rs
+++ b/crates/uv-static/src/env_vars.rs
@@ -154,6 +154,10 @@ impl EnvVars {
/// `--no-python-downloads` option. Whether uv should allow Python downloads.
pub const UV_PYTHON_DOWNLOADS: &'static str = "UV_PYTHON_DOWNLOADS";
+ /// Overrides the environment-determined libc on linux systems when filling in the current platform
+ /// within Python version requests. Options are: `gnu`, `gnueabi`, `gnueabihf`, `musl`, and `none`.
+ pub const UV_LIBC: &'static str = "UV_LIBC";
+
/// Equivalent to the `--compile-bytecode` command-line argument. If set, uv
/// will compile Python source files to bytecode after installation.
pub const UV_COMPILE_BYTECODE: &'static str = "UV_COMPILE_BYTECODE";
diff --git a/docs/reference/environment.md b/docs/reference/environment.md
index bf8bf29ec..47e4d8db9 100644
--- a/docs/reference/environment.md
+++ b/docs/reference/environment.md
@@ -167,6 +167,11 @@ Defaults to `~/.local/bin`.
Equivalent to the `--keyring-provider` command-line argument. If set, uv
will use this value as the keyring provider.
+### `UV_LIBC`
+
+Overrides the environment-determined libc on linux systems when filling in the current platform
+within Python version requests. Options are: `gnu`, `gnueabi`, `gnueabihf`, `musl`, and `none`.
+
### `UV_LINK_MODE`
Equivalent to the `--link-mode` command-line argument. If set, uv will use this as
From 0cf5ecf8413c54d7607acdf67cc41f8285f291ed Mon Sep 17 00:00:00 2001
From: Zanie Blue
Date: Wed, 16 Jul 2025 09:04:58 -0500
Subject: [PATCH 044/130] Request arm64 Python in aarch64-windows smoke test
(#14655)
The Python interpreter selected by `py` recently changed to x64 instead
of arm64.
Closes https://github.com/astral-sh/uv/pull/14652
See https://github.com/astral-sh/uv/pull/14652
---
.github/workflows/ci.yml | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index 0ccc9ea4e..bb357f4a3 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -2261,7 +2261,7 @@ jobs:
name: uv-windows-aarch64-${{ github.sha }}
- name: "Validate global Python install"
- run: py -3.13 ./scripts/check_system_python.py --uv ./uv.exe
+ run: py -3.13-arm64 ./scripts/check_system_python.py --uv ./uv.exe
# Test our PEP 514 integration that installs Python into the Windows registry.
system-test-windows-registry:
From 1b2f212e8b2f91069b858cb7f5905589c9d15add Mon Sep 17 00:00:00 2001
From: Zanie Blue
Date: Wed, 16 Jul 2025 09:05:10 -0500
Subject: [PATCH 045/130] Use `[PYTHON]` placeholder in filtered Python names
(#14640)
We should never replace with a non-placeholder, it is very confusing
when trying to understand test behavior
---
crates/uv/tests/it/common/mod.rs | 15 ++++-
crates/uv/tests/it/pip_sync.rs | 50 ++++++++---------
crates/uv/tests/it/python_find.rs | 82 ++++++++++++++--------------
crates/uv/tests/it/python_install.rs | 12 ++--
crates/uv/tests/it/python_list.rs | 4 +-
crates/uv/tests/it/run.rs | 40 +++++++-------
crates/uv/tests/it/sync.rs | 28 ++++------
crates/uv/tests/it/tool_list.rs | 6 +-
8 files changed, 121 insertions(+), 116 deletions(-)
diff --git a/crates/uv/tests/it/common/mod.rs b/crates/uv/tests/it/common/mod.rs
index 2dc72fa1d..d4a73f953 100644
--- a/crates/uv/tests/it/common/mod.rs
+++ b/crates/uv/tests/it/common/mod.rs
@@ -210,12 +210,14 @@ impl TestContext {
pub fn with_filtered_python_names(mut self) -> Self {
if cfg!(windows) {
self.filters
- .push((r"python\.exe".to_string(), "python".to_string()));
+ .push((r"python\.exe".to_string(), "[PYTHON]".to_string()));
} else {
self.filters
- .push((r"python\d.\d\d".to_string(), "python".to_string()));
+ .push((r"python\d.\d\d".to_string(), "[PYTHON]".to_string()));
self.filters
- .push((r"python\d".to_string(), "python".to_string()));
+ .push((r"python\d".to_string(), "[PYTHON]".to_string()));
+ self.filters
+ .push((r"/python".to_string(), "/[PYTHON]".to_string()));
}
self
}
@@ -224,6 +226,13 @@ impl TestContext {
/// `Scripts` on Windows and `bin` on Unix.
#[must_use]
pub fn with_filtered_virtualenv_bin(mut self) -> Self {
+ self.filters.push((
+ format!(
+ r"[\\/]{}[\\/]",
+ venv_bin_path(PathBuf::new()).to_string_lossy()
+ ),
+ "/[BIN]/".to_string(),
+ ));
self.filters.push((
format!(r"[\\/]{}", venv_bin_path(PathBuf::new()).to_string_lossy()),
"/[BIN]".to_string(),
diff --git a/crates/uv/tests/it/pip_sync.rs b/crates/uv/tests/it/pip_sync.rs
index 43cbc26c7..537c5dff2 100644
--- a/crates/uv/tests/it/pip_sync.rs
+++ b/crates/uv/tests/it/pip_sync.rs
@@ -43,15 +43,15 @@ fn missing_venv() -> Result<()> {
requirements.write_str("anyio")?;
fs::remove_dir_all(&context.venv)?;
- uv_snapshot!(context.filters(), context.pip_sync().arg("requirements.txt"), @r###"
+ uv_snapshot!(context.filters(), context.pip_sync().arg("requirements.txt"), @r"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
- error: Failed to inspect Python interpreter from active virtual environment at `.venv/[BIN]/python`
- Caused by: Python interpreter not found at `[VENV]/[BIN]/python`
- "###);
+ error: Failed to inspect Python interpreter from active virtual environment at `.venv/[BIN]/[PYTHON]`
+ Caused by: Python interpreter not found at `[VENV]/[BIN]/[PYTHON]`
+ ");
assert!(predicates::path::missing().eval(&context.venv));
@@ -5191,18 +5191,18 @@ fn target_built_distribution() -> Result<()> {
uv_snapshot!(context.filters(), context.pip_sync()
.arg("requirements.in")
.arg("--target")
- .arg("target"), @r###"
+ .arg("target"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
- Using CPython 3.12.[X] interpreter at: .venv/[BIN]/python
+ Using CPython 3.12.[X] interpreter at: .venv/[BIN]/[PYTHON]
Resolved 1 package in [TIME]
Prepared 1 package in [TIME]
Installed 1 package in [TIME]
+ iniconfig==2.0.0
- "###);
+ ");
// Ensure that the package is present in the target directory.
assert!(context.temp_dir.child("target").child("iniconfig").is_dir());
@@ -5227,20 +5227,20 @@ fn target_built_distribution() -> Result<()> {
uv_snapshot!(context.filters(), context.pip_sync()
.arg("requirements.in")
.arg("--target")
- .arg("target"), @r###"
+ .arg("target"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
- Using CPython 3.12.[X] interpreter at: .venv/[BIN]/python
+ Using CPython 3.12.[X] interpreter at: .venv/[BIN]/[PYTHON]
Resolved 1 package in [TIME]
Prepared 1 package in [TIME]
Uninstalled 1 package in [TIME]
Installed 1 package in [TIME]
- iniconfig==2.0.0
+ iniconfig==1.1.1
- "###);
+ ");
// Remove it, and replace with `flask`, which includes a binary.
let requirements_in = context.temp_dir.child("requirements.in");
@@ -5249,20 +5249,20 @@ fn target_built_distribution() -> Result<()> {
uv_snapshot!(context.filters(), context.pip_sync()
.arg("requirements.in")
.arg("--target")
- .arg("target"), @r###"
+ .arg("target"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
- Using CPython 3.12.[X] interpreter at: .venv/[BIN]/python
+ Using CPython 3.12.[X] interpreter at: .venv/[BIN]/[PYTHON]
Resolved 1 package in [TIME]
Prepared 1 package in [TIME]
Uninstalled 1 package in [TIME]
Installed 1 package in [TIME]
+ flask==3.0.2
- iniconfig==1.1.1
- "###);
+ ");
// Ensure that the binary is present in the target directory.
assert!(
context
@@ -5293,18 +5293,18 @@ fn target_source_distribution() -> Result<()> {
.arg("--no-binary")
.arg("iniconfig")
.arg("--target")
- .arg("target"), @r###"
+ .arg("target"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
- Using CPython 3.12.[X] interpreter at: .venv/[BIN]/python
+ Using CPython 3.12.[X] interpreter at: .venv/[BIN]/[PYTHON]
Resolved 1 package in [TIME]
Prepared 1 package in [TIME]
Installed 1 package in [TIME]
+ iniconfig==2.0.0
- "###);
+ ");
// Ensure that the build requirements are not present in the target directory.
assert!(!context.temp_dir.child("target").child("hatchling").is_dir());
@@ -5364,18 +5364,18 @@ fn target_no_build_isolation() -> Result<()> {
.arg("--no-binary")
.arg("wheel")
.arg("--target")
- .arg("target"), @r###"
+ .arg("target"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
- Using CPython 3.12.[X] interpreter at: .venv/[BIN]/python
+ Using CPython 3.12.[X] interpreter at: .venv/[BIN]/[PYTHON]
Resolved 1 package in [TIME]
Prepared 1 package in [TIME]
Installed 1 package in [TIME]
+ wheel==0.43.0
- "###);
+ ");
// Ensure that the build requirements are not present in the target directory.
assert!(!context.temp_dir.child("target").child("flit_core").is_dir());
@@ -5447,18 +5447,18 @@ fn prefix() -> Result<()> {
uv_snapshot!(context.filters(), context.pip_sync()
.arg("requirements.in")
.arg("--prefix")
- .arg(prefix.path()), @r###"
+ .arg(prefix.path()), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
- Using CPython 3.12.[X] interpreter at: .venv/[BIN]/python
+ Using CPython 3.12.[X] interpreter at: .venv/[BIN]/[PYTHON]
Resolved 1 package in [TIME]
Prepared 1 package in [TIME]
Installed 1 package in [TIME]
+ iniconfig==2.0.0
- "###);
+ ");
// Ensure that we can't import the package.
context.assert_command("import iniconfig").failure();
@@ -5483,20 +5483,20 @@ fn prefix() -> Result<()> {
uv_snapshot!(context.filters(), context.pip_sync()
.arg("requirements.in")
.arg("--prefix")
- .arg(prefix.path()), @r###"
+ .arg(prefix.path()), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
- Using CPython 3.12.[X] interpreter at: .venv/[BIN]/python
+ Using CPython 3.12.[X] interpreter at: .venv/[BIN]/[PYTHON]
Resolved 1 package in [TIME]
Prepared 1 package in [TIME]
Uninstalled 1 package in [TIME]
Installed 1 package in [TIME]
- iniconfig==2.0.0
+ iniconfig==1.1.1
- "###);
+ ");
Ok(())
}
diff --git a/crates/uv/tests/it/python_find.rs b/crates/uv/tests/it/python_find.rs
index b8b42d61b..49e60c068 100644
--- a/crates/uv/tests/it/python_find.rs
+++ b/crates/uv/tests/it/python_find.rs
@@ -425,25 +425,25 @@ fn python_find_venv() {
// is super annoying and requires some changes to how we represent working directories in the
// test context to resolve.
#[cfg(not(windows))]
- uv_snapshot!(context.filters(), context.python_find(), @r###"
+ uv_snapshot!(context.filters(), context.python_find(), @r"
success: true
exit_code: 0
----- stdout -----
- [VENV]/[BIN]/python
+ [VENV]/[BIN]/[PYTHON]
----- stderr -----
- "###);
+ ");
// Even if the `VIRTUAL_ENV` is not set (the test context includes this by default)
#[cfg(not(windows))]
- uv_snapshot!(context.filters(), context.python_find().env_remove(EnvVars::VIRTUAL_ENV), @r###"
+ uv_snapshot!(context.filters(), context.python_find().env_remove(EnvVars::VIRTUAL_ENV), @r"
success: true
exit_code: 0
----- stdout -----
- [VENV]/[BIN]/python
+ [VENV]/[BIN]/[PYTHON]
----- stderr -----
- "###);
+ ");
let child_dir = context.temp_dir.child("child");
child_dir.create_dir_all().unwrap();
@@ -485,14 +485,14 @@ fn python_find_venv() {
// We should find virtual environments from a child directory
#[cfg(not(windows))]
- uv_snapshot!(context.filters(), context.python_find().current_dir(&child_dir).env_remove(EnvVars::VIRTUAL_ENV), @r###"
+ uv_snapshot!(context.filters(), context.python_find().current_dir(&child_dir).env_remove(EnvVars::VIRTUAL_ENV), @r"
success: true
exit_code: 0
----- stdout -----
- [VENV]/[BIN]/python
+ [VENV]/[BIN]/[PYTHON]
----- stderr -----
- "###);
+ ");
// A virtual environment in the child directory takes precedence over the parent
uv_snapshot!(context.filters(), context.venv().arg("--python").arg("3.11").arg("-q").current_dir(&child_dir), @r###"
@@ -504,14 +504,14 @@ fn python_find_venv() {
"###);
#[cfg(not(windows))]
- uv_snapshot!(context.filters(), context.python_find().current_dir(&child_dir).env_remove(EnvVars::VIRTUAL_ENV), @r###"
+ uv_snapshot!(context.filters(), context.python_find().current_dir(&child_dir).env_remove(EnvVars::VIRTUAL_ENV), @r"
success: true
exit_code: 0
----- stdout -----
- [TEMP_DIR]/child/.venv/[BIN]/python
+ [TEMP_DIR]/child/.venv/[BIN]/[PYTHON]
----- stderr -----
- "###);
+ ");
// But if we delete the parent virtual environment
fs_err::remove_dir_all(context.temp_dir.child(".venv")).unwrap();
@@ -528,36 +528,36 @@ fn python_find_venv() {
// Unless, it is requested by path
#[cfg(not(windows))]
- uv_snapshot!(context.filters(), context.python_find().arg("child/.venv"), @r###"
+ uv_snapshot!(context.filters(), context.python_find().arg("child/.venv"), @r"
success: true
exit_code: 0
----- stdout -----
- [TEMP_DIR]/child/.venv/[BIN]/python
+ [TEMP_DIR]/child/.venv/[BIN]/[PYTHON]
----- stderr -----
- "###);
+ ");
// Or activated via `VIRTUAL_ENV`
#[cfg(not(windows))]
- uv_snapshot!(context.filters(), context.python_find().env(EnvVars::VIRTUAL_ENV, child_dir.join(".venv").as_os_str()), @r###"
+ uv_snapshot!(context.filters(), context.python_find().env(EnvVars::VIRTUAL_ENV, child_dir.join(".venv").as_os_str()), @r"
success: true
exit_code: 0
----- stdout -----
- [TEMP_DIR]/child/.venv/[BIN]/python
+ [TEMP_DIR]/child/.venv/[BIN]/[PYTHON]
----- stderr -----
- "###);
+ ");
// Or at the front of the PATH
#[cfg(not(windows))]
- uv_snapshot!(context.filters(), context.python_find().env(EnvVars::UV_TEST_PYTHON_PATH, child_dir.join(".venv").join("bin").as_os_str()), @r###"
+ uv_snapshot!(context.filters(), context.python_find().env(EnvVars::UV_TEST_PYTHON_PATH, child_dir.join(".venv").join("bin").as_os_str()), @r"
success: true
exit_code: 0
----- stdout -----
- [TEMP_DIR]/child/.venv/[BIN]/python
+ [TEMP_DIR]/child/.venv/[BIN]/[PYTHON]
----- stderr -----
- "###);
+ ");
// This holds even if there are other directories before it in the path, as long as they do
// not contain a Python executable
@@ -569,14 +569,14 @@ fn python_find_venv() {
])
.unwrap();
- uv_snapshot!(context.filters(), context.python_find().env(EnvVars::UV_TEST_PYTHON_PATH, path.as_os_str()), @r###"
+ uv_snapshot!(context.filters(), context.python_find().env(EnvVars::UV_TEST_PYTHON_PATH, path.as_os_str()), @r"
success: true
exit_code: 0
----- stdout -----
- [TEMP_DIR]/child/.venv/[BIN]/python
+ [TEMP_DIR]/child/.venv/[BIN]/[PYTHON]
----- stderr -----
- "###);
+ ");
}
// But, if there's an executable _before_ the virtual environment — we prefer that
@@ -678,33 +678,32 @@ fn python_find_unsupported_version() {
#[test]
fn python_find_venv_invalid() {
let context: TestContext = TestContext::new("3.12")
- // Enable additional filters for Windows compatibility
- .with_filtered_exe_suffix()
.with_filtered_python_names()
- .with_filtered_virtualenv_bin();
+ .with_filtered_virtualenv_bin()
+ .with_filtered_exe_suffix();
// We find the virtual environment
- uv_snapshot!(context.filters(), context.python_find().env(EnvVars::VIRTUAL_ENV, context.venv.as_os_str()), @r###"
+ uv_snapshot!(context.filters(), context.python_find().env(EnvVars::VIRTUAL_ENV, context.venv.as_os_str()), @r"
success: true
exit_code: 0
----- stdout -----
- [VENV]/[BIN]/python
+ [VENV]/[BIN]/[PYTHON]
----- stderr -----
- "###);
+ ");
// If the binaries are missing from a virtual environment, we fail
fs_err::remove_dir_all(venv_bin_path(&context.venv)).unwrap();
- uv_snapshot!(context.filters(), context.python_find().env(EnvVars::VIRTUAL_ENV, context.venv.as_os_str()), @r###"
+ uv_snapshot!(context.filters(), context.python_find().env(EnvVars::VIRTUAL_ENV, context.venv.as_os_str()), @r"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
- error: Failed to inspect Python interpreter from active virtual environment at `.venv/[BIN]/python`
- Caused by: Python interpreter not found at `[VENV]/[BIN]/python`
- "###);
+ error: Failed to inspect Python interpreter from active virtual environment at `.venv/[BIN]/[PYTHON]`
+ Caused by: Python interpreter not found at `[VENV]/[BIN]/[PYTHON]`
+ ");
// Unless the virtual environment is not active
uv_snapshot!(context.filters(), context.python_find(), @r###"
@@ -783,9 +782,10 @@ fn python_required_python_major_minor() {
#[test]
fn python_find_script() {
let context = TestContext::new("3.13")
- .with_filtered_exe_suffix()
.with_filtered_virtualenv_bin()
- .with_filtered_python_names();
+ .with_filtered_python_names()
+ .with_filtered_exe_suffix();
+
let filters = context
.filters()
.into_iter()
@@ -819,7 +819,7 @@ fn python_find_script() {
success: true
exit_code: 0
----- stdout -----
- [CACHE_DIR]/environments-v2/[HASHEDNAME]/[BIN]/python
+ [CACHE_DIR]/environments-v2/[HASHEDNAME]/[BIN]/[PYTHON]
----- stderr -----
");
@@ -828,9 +828,9 @@ fn python_find_script() {
#[test]
fn python_find_script_no_environment() {
let context = TestContext::new("3.13")
- .with_filtered_exe_suffix()
.with_filtered_virtualenv_bin()
- .with_filtered_python_names();
+ .with_filtered_python_names()
+ .with_filtered_exe_suffix();
let script = context.temp_dir.child("foo.py");
@@ -846,7 +846,7 @@ fn python_find_script_no_environment() {
success: true
exit_code: 0
----- stdout -----
- [VENV]/[BIN]/python
+ [VENV]/[BIN]/[PYTHON]
----- stderr -----
");
@@ -881,9 +881,9 @@ fn python_find_script_python_not_found() {
#[test]
fn python_find_script_no_such_version() {
let context = TestContext::new("3.13")
- .with_filtered_exe_suffix()
.with_filtered_virtualenv_bin()
.with_filtered_python_names()
+ .with_filtered_exe_suffix()
.with_filtered_python_sources();
let filters = context
.filters()
diff --git a/crates/uv/tests/it/python_install.rs b/crates/uv/tests/it/python_install.rs
index 0cb952054..62b3254b8 100644
--- a/crates/uv/tests/it/python_install.rs
+++ b/crates/uv/tests/it/python_install.rs
@@ -1499,10 +1499,10 @@ fn python_install_patch_dylib() {
fn python_install_314() {
let context: TestContext = TestContext::new_with_versions(&[])
.with_filtered_python_keys()
- .with_filtered_exe_suffix()
.with_managed_python_dirs()
+ .with_filtered_python_install_bin()
.with_filtered_python_names()
- .with_filtered_python_install_bin();
+ .with_filtered_exe_suffix();
// Install 3.14
// For now, this provides test coverage of pre-release handling
@@ -1533,7 +1533,7 @@ fn python_install_314() {
success: true
exit_code: 0
----- stdout -----
- [TEMP_DIR]/managed/cpython-3.14.0b4-[PLATFORM]/[INSTALL-BIN]/python
+ [TEMP_DIR]/managed/cpython-3.14.0b4-[PLATFORM]/[INSTALL-BIN]/[PYTHON]
----- stderr -----
");
@@ -1543,7 +1543,7 @@ fn python_install_314() {
success: true
exit_code: 0
----- stdout -----
- [TEMP_DIR]/managed/cpython-3.14.0b4-[PLATFORM]/[INSTALL-BIN]/python
+ [TEMP_DIR]/managed/cpython-3.14.0b4-[PLATFORM]/[INSTALL-BIN]/[PYTHON]
----- stderr -----
");
@@ -1552,7 +1552,7 @@ fn python_install_314() {
success: true
exit_code: 0
----- stdout -----
- [TEMP_DIR]/managed/cpython-3.14.0b4-[PLATFORM]/[INSTALL-BIN]/python
+ [TEMP_DIR]/managed/cpython-3.14.0b4-[PLATFORM]/[INSTALL-BIN]/[PYTHON]
----- stderr -----
");
@@ -1572,7 +1572,7 @@ fn python_install_314() {
success: true
exit_code: 0
----- stdout -----
- [TEMP_DIR]/managed/cpython-3.13.5-[PLATFORM]/[INSTALL-BIN]/python
+ [TEMP_DIR]/managed/cpython-3.13.5-[PLATFORM]/[INSTALL-BIN]/[PYTHON]
----- stderr -----
");
diff --git a/crates/uv/tests/it/python_list.rs b/crates/uv/tests/it/python_list.rs
index 959ebdd80..11472baec 100644
--- a/crates/uv/tests/it/python_list.rs
+++ b/crates/uv/tests/it/python_list.rs
@@ -411,8 +411,8 @@ fn python_list_downloads_installed() {
let context: TestContext = TestContext::new_with_versions(&[])
.with_filtered_python_keys()
- .with_filtered_python_names()
.with_filtered_python_install_bin()
+ .with_filtered_python_names()
.with_managed_python_dirs();
// We do not test showing all interpreters — as it differs per platform
@@ -450,7 +450,7 @@ fn python_list_downloads_installed() {
success: true
exit_code: 0
----- stdout -----
- cpython-3.10.18-[PLATFORM] managed/cpython-3.10.18-[PLATFORM]/[INSTALL-BIN]/python
+ cpython-3.10.18-[PLATFORM] managed/cpython-3.10.18-[PLATFORM]/[INSTALL-BIN]/[PYTHON]
pypy-3.10.16-[PLATFORM]
graalpy-3.10.0-[PLATFORM]
diff --git a/crates/uv/tests/it/run.rs b/crates/uv/tests/it/run.rs
index 98c2adbfe..93420cca0 100644
--- a/crates/uv/tests/it/run.rs
+++ b/crates/uv/tests/it/run.rs
@@ -2851,11 +2851,11 @@ fn run_no_project() -> Result<()> {
init.touch()?;
// `run` should run in the context of the project.
- uv_snapshot!(context.filters(), context.run().arg("python").arg("-c").arg("import sys; print(sys.executable)"), @r###"
+ uv_snapshot!(context.filters(), context.run().arg("python").arg("-c").arg("import sys; print(sys.executable)"), @r"
success: true
exit_code: 0
----- stdout -----
- [VENV]/[BIN]/python
+ [VENV]/[BIN]/[PYTHON]
----- stderr -----
Resolved 6 packages in [TIME]
@@ -2865,50 +2865,50 @@ fn run_no_project() -> Result<()> {
+ foo==1.0.0 (from file://[TEMP_DIR]/)
+ idna==3.6
+ sniffio==1.3.1
- "###);
+ ");
// `run --no-project` should not (but it should still run in the same environment, as it would
// if there were no project at all).
- uv_snapshot!(context.filters(), context.run().arg("--no-project").arg("python").arg("-c").arg("import sys; print(sys.executable)"), @r###"
+ uv_snapshot!(context.filters(), context.run().arg("--no-project").arg("python").arg("-c").arg("import sys; print(sys.executable)"), @r"
success: true
exit_code: 0
----- stdout -----
- [VENV]/[BIN]/python
+ [VENV]/[BIN]/[PYTHON]
----- stderr -----
- "###);
+ ");
// `run --no-project --isolated` should run in an entirely isolated environment.
- uv_snapshot!(context.filters(), context.run().arg("--no-project").arg("--isolated").arg("python").arg("-c").arg("import sys; print(sys.executable)"), @r###"
+ uv_snapshot!(context.filters(), context.run().arg("--no-project").arg("--isolated").arg("python").arg("-c").arg("import sys; print(sys.executable)"), @r"
success: true
exit_code: 0
----- stdout -----
- [CACHE_DIR]/builds-v0/[TMP]/python
+ [CACHE_DIR]/builds-v0/[TMP]/[PYTHON]
----- stderr -----
- "###);
+ ");
// `run --no-project` should not (but it should still run in the same environment, as it would
// if there were no project at all).
- uv_snapshot!(context.filters(), context.run().arg("--no-project").arg("python").arg("-c").arg("import sys; print(sys.executable)"), @r###"
+ uv_snapshot!(context.filters(), context.run().arg("--no-project").arg("python").arg("-c").arg("import sys; print(sys.executable)"), @r"
success: true
exit_code: 0
----- stdout -----
- [VENV]/[BIN]/python
+ [VENV]/[BIN]/[PYTHON]
----- stderr -----
- "###);
+ ");
// `run --no-project --locked` should fail.
- uv_snapshot!(context.filters(), context.run().arg("--no-project").arg("--locked").arg("python").arg("-c").arg("import sys; print(sys.executable)"), @r###"
+ uv_snapshot!(context.filters(), context.run().arg("--no-project").arg("--locked").arg("python").arg("-c").arg("import sys; print(sys.executable)"), @r"
success: true
exit_code: 0
----- stdout -----
- [VENV]/[BIN]/python
+ [VENV]/[BIN]/[PYTHON]
----- stderr -----
warning: `--locked` has no effect when used alongside `--no-project`
- "###);
+ ");
Ok(())
}
@@ -3092,14 +3092,14 @@ fn run_project_toml_error() -> Result<()> {
"###);
// `run --no-project` should not
- uv_snapshot!(context.filters(), context.run().arg("--no-project").arg("python").arg("-c").arg("import sys; print(sys.executable)"), @r###"
+ uv_snapshot!(context.filters(), context.run().arg("--no-project").arg("python").arg("-c").arg("import sys; print(sys.executable)"), @r"
success: true
exit_code: 0
----- stdout -----
- [VENV]/[BIN]/python
+ [VENV]/[BIN]/[PYTHON]
----- stderr -----
- "###);
+ ");
Ok(())
}
@@ -3691,7 +3691,7 @@ fn run_linked_environment_path() -> Result<()> {
exit_code: 0
----- stdout -----
[TEMP_DIR]/target
- [TEMP_DIR]/target/[BIN]/python
+ [TEMP_DIR]/target/[BIN]/[PYTHON]
----- stderr -----
Resolved 8 packages in [TIME]
@@ -3705,7 +3705,7 @@ fn run_linked_environment_path() -> Result<()> {
}, {
assert_snapshot!(
black_entrypoint, @r##"
- #![TEMP_DIR]/target/[BIN]/python
+ #![TEMP_DIR]/target/[BIN]/[PYTHON]
# -*- coding: utf-8 -*-
import sys
from black import patched_main
diff --git a/crates/uv/tests/it/sync.rs b/crates/uv/tests/it/sync.rs
index 7063035f9..9fecd50b0 100644
--- a/crates/uv/tests/it/sync.rs
+++ b/crates/uv/tests/it/sync.rs
@@ -306,7 +306,7 @@ fn sync_json() -> Result<()> {
"environment": {
"path": "[VENV]/",
"python": {
- "path": "[VENV]/[BIN]/python",
+ "path": "[VENV]/[BIN]/[PYTHON]",
"version": "3.12.[X]",
"implementation": "cpython"
}
@@ -350,7 +350,7 @@ fn sync_json() -> Result<()> {
"environment": {
"path": "[VENV]/",
"python": {
- "path": "[VENV]/[BIN]/python",
+ "path": "[VENV]/[BIN]/[PYTHON]",
"version": "3.12.[X]",
"implementation": "cpython"
}
@@ -389,7 +389,7 @@ fn sync_json() -> Result<()> {
"environment": {
"path": "[VENV]/",
"python": {
- "path": "[VENV]/[BIN]/python",
+ "path": "[VENV]/[BIN]/[PYTHON]",
"version": "3.12.[X]",
"implementation": "cpython"
}
@@ -475,7 +475,7 @@ fn sync_dry_json() -> Result<()> {
"environment": {
"path": "[VENV]/",
"python": {
- "path": "[VENV]/[BIN]/python",
+ "path": "[VENV]/[BIN]/[PYTHON]",
"version": "3.12.[X]",
"implementation": "cpython"
}
@@ -4884,14 +4884,10 @@ fn sync_active_script_environment_json() -> Result<()> {
let filters = context
.filters()
.into_iter()
- .chain(vec![
- (
- r"environments-v2/script-[a-z0-9]+",
- "environments-v2/script-[HASH]",
- ),
- ("bin/python3", "[PYTHON]"),
- ("Scripts/python.exe", "[PYTHON]"),
- ])
+ .chain(vec![(
+ r"environments-v2/script-[a-z0-9]+",
+ "environments-v2/script-[HASH]",
+ )])
.collect::>();
// Running `uv sync --script` with `VIRTUAL_ENV` should warn
@@ -4914,7 +4910,7 @@ fn sync_active_script_environment_json() -> Result<()> {
"environment": {
"path": "[CACHE_DIR]/environments-v2/script-[HASH]",
"python": {
- "path": "[CACHE_DIR]/environments-v2/script-[HASH]/[BIN]/python",
+ "path": "[CACHE_DIR]/environments-v2/script-[HASH]/[BIN]/[PYTHON]",
"version": "3.11.[X]",
"implementation": "cpython"
}
@@ -4960,7 +4956,7 @@ fn sync_active_script_environment_json() -> Result<()> {
"environment": {
"path": "[TEMP_DIR]/foo",
"python": {
- "path": "[TEMP_DIR]/foo/[BIN]/python",
+ "path": "[TEMP_DIR]/foo/[BIN]/[PYTHON]",
"version": "3.11.[X]",
"implementation": "cpython"
}
@@ -5019,7 +5015,7 @@ fn sync_active_script_environment_json() -> Result<()> {
"environment": {
"path": "[TEMP_DIR]/foo",
"python": {
- "path": "[TEMP_DIR]/foo/[BIN]/python",
+ "path": "[TEMP_DIR]/foo/[BIN]/[PYTHON]",
"version": "3.12.[X]",
"implementation": "cpython"
}
@@ -6558,7 +6554,7 @@ fn sync_invalid_environment() -> Result<()> {
----- stdout -----
----- stderr -----
- warning: Ignoring existing virtual environment linked to non-existent Python interpreter: .venv/[BIN]/python -> python
+ warning: Ignoring existing virtual environment linked to non-existent Python interpreter: .venv/[BIN]/[PYTHON] -> python
Using CPython 3.12.[X] interpreter at: [PYTHON-3.12]
Removed virtual environment at: .venv
Creating virtual environment at: .venv
diff --git a/crates/uv/tests/it/tool_list.rs b/crates/uv/tests/it/tool_list.rs
index 93dd5756e..9268118ca 100644
--- a/crates/uv/tests/it/tool_list.rs
+++ b/crates/uv/tests/it/tool_list.rs
@@ -180,7 +180,7 @@ fn tool_list_bad_environment() -> Result<()> {
.tool_list()
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str()),
- @r###"
+ @r"
success: true
exit_code: 0
----- stdout -----
@@ -188,8 +188,8 @@ fn tool_list_bad_environment() -> Result<()> {
- ruff
----- stderr -----
- warning: Invalid environment at `tools/black`: missing Python executable at `tools/black/[BIN]/python` (run `uv tool install black --reinstall` to reinstall)
- "###
+ warning: Invalid environment at `tools/black`: missing Python executable at `tools/black/[BIN]/[PYTHON]` (run `uv tool install black --reinstall` to reinstall)
+ "
);
Ok(())
From eaff96e5dce946ec91ee632025f4dbd8ae67f173 Mon Sep 17 00:00:00 2001
From: "github-actions[bot]"
<41898282+github-actions[bot]@users.noreply.github.com>
Date: Wed, 16 Jul 2025 09:06:06 -0500
Subject: [PATCH 046/130] Sync latest Python releases (#14643)
Automated update for Python releases.
Co-authored-by: zanieb <2586601+zanieb@users.noreply.github.com>
---
crates/uv-python/download-metadata.json | 20 ++++++++++----------
1 file changed, 10 insertions(+), 10 deletions(-)
diff --git a/crates/uv-python/download-metadata.json b/crates/uv-python/download-metadata.json
index 8c7ffec4c..540a3c8a0 100644
--- a/crates/uv-python/download-metadata.json
+++ b/crates/uv-python/download-metadata.json
@@ -35771,8 +35771,8 @@
"minor": 11,
"patch": 0,
"prerelease": "",
- "url": "https://github.com/oracle/graalpython/releases/download/graal-24.2.1/graalpy-24.2.1-macos-aarch64.tar.gz",
- "sha256": "61e11d5176d5bb709b919979ef3525f4db1e39c404b59aa54d887f56bf8fab44",
+ "url": "https://github.com/oracle/graalpython/releases/download/graal-24.2.2/graalpy-24.2.2-macos-aarch64.tar.gz",
+ "sha256": "f4a2ae01bae0fa53ec0d19f86d73c6dcc2a162d245552030183b84bfdd8f7635",
"variant": null
},
"graalpy-3.11.0-darwin-x86_64-none": {
@@ -35787,8 +35787,8 @@
"minor": 11,
"patch": 0,
"prerelease": "",
- "url": "https://github.com/oracle/graalpython/releases/download/graal-24.2.1/graalpy-24.2.1-macos-amd64.tar.gz",
- "sha256": "4bc42b36117c9ab09c4f411ec5a7a85ed58521dd20b529d971bb0ed3d0b7c363",
+ "url": "https://github.com/oracle/graalpython/releases/download/graal-24.2.2/graalpy-24.2.2-macos-amd64.tar.gz",
+ "sha256": "2f4d5e7dbdf90e38778dfcb8ca3e1ec7eee257ef726b1937d5bc91b54cdddf9b",
"variant": null
},
"graalpy-3.11.0-linux-aarch64-gnu": {
@@ -35803,8 +35803,8 @@
"minor": 11,
"patch": 0,
"prerelease": "",
- "url": "https://github.com/oracle/graalpython/releases/download/graal-24.2.1/graalpy-24.2.1-linux-aarch64.tar.gz",
- "sha256": "2a80800a76ee6b737d6458ba9ab30ce386dfdd5b2b2bec3ee6bc51fd8e51e7c2",
+ "url": "https://github.com/oracle/graalpython/releases/download/graal-24.2.2/graalpy-24.2.2-linux-aarch64.tar.gz",
+ "sha256": "c9be459ab9479892b88dd63f8f88cbc7b1067f4cb27ff17f4761b36de6bd73af",
"variant": null
},
"graalpy-3.11.0-linux-x86_64-gnu": {
@@ -35819,8 +35819,8 @@
"minor": 11,
"patch": 0,
"prerelease": "",
- "url": "https://github.com/oracle/graalpython/releases/download/graal-24.2.1/graalpy-24.2.1-linux-amd64.tar.gz",
- "sha256": "55872af24819cb99efa2338db057aeda0c8f9dd412a4a6f5ea19b256ee82fd9e",
+ "url": "https://github.com/oracle/graalpython/releases/download/graal-24.2.2/graalpy-24.2.2-linux-amd64.tar.gz",
+ "sha256": "604b7abf6c58038a30866e52da43818af63bcd97909af8b1a96523c7f0e01414",
"variant": null
},
"graalpy-3.11.0-windows-x86_64-none": {
@@ -35835,8 +35835,8 @@
"minor": 11,
"patch": 0,
"prerelease": "",
- "url": "https://github.com/oracle/graalpython/releases/download/graal-24.2.1/graalpy-24.2.1-windows-amd64.zip",
- "sha256": "bad923fb64fa2fc71bb424818aac8dcfe0cc9554abef5235d7c08e597ed778ae",
+ "url": "https://github.com/oracle/graalpython/releases/download/graal-24.2.2/graalpy-24.2.2-windows-amd64.zip",
+ "sha256": "9606134284d4d95b2f9d69c3087cd3e9e488f46355b419f5e66588a3281df6a3",
"variant": null
},
"graalpy-3.10.0-darwin-aarch64-none": {
From 1f49fbd53cdef47582cfdd45f31f077fbf75210d Mon Sep 17 00:00:00 2001
From: Zanie Blue
Date: Wed, 16 Jul 2025 09:17:01 -0500
Subject: [PATCH 047/130] Display `sys.executable` names in check system jobs
(#14656)
Cherry-picked from https://github.com/astral-sh/uv/pull/14652
This is useful for debugging
---
scripts/check_system_python.py | 6 ++++--
1 file changed, 4 insertions(+), 2 deletions(-)
diff --git a/scripts/check_system_python.py b/scripts/check_system_python.py
index 565518e50..fbfc5557e 100755
--- a/scripts/check_system_python.py
+++ b/scripts/check_system_python.py
@@ -24,7 +24,7 @@ def install_package(*, uv: str, package: str):
check=True,
)
- logging.info(f"Checking that `{package}` can be imported.")
+ logging.info(f"Checking that `{package}` can be imported with `{sys.executable}`.")
code = subprocess.run(
[sys.executable, "-c", f"import {package}"],
cwd=temp_dir,
@@ -82,7 +82,9 @@ if __name__ == "__main__":
)
# Ensure that the package (`pylint`) is installed.
- logging.info("Checking that `pylint` is installed.")
+ logging.info(
+ f"Checking that `pylint` is installed with `{sys.executable} -m pip`."
+ )
code = subprocess.run(
[sys.executable, "-m", "pip", "show", "pylint"],
cwd=temp_dir,
From 8b29ec0bfd4d34141bfcfb8f5424cbc737dba0b8 Mon Sep 17 00:00:00 2001
From: Zanie Blue
Date: Wed, 16 Jul 2025 09:20:25 -0500
Subject: [PATCH 048/130] Use `astral.sh` instead of `example.com` in
`lock_unique_named_index` (#14657)
This test flakes a lot, maybe using a different domain will help
Closes https://github.com/astral-sh/uv/issues/14542
---
crates/uv/tests/it/lock.rs | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/crates/uv/tests/it/lock.rs b/crates/uv/tests/it/lock.rs
index d5757b6ef..f91870762 100644
--- a/crates/uv/tests/it/lock.rs
+++ b/crates/uv/tests/it/lock.rs
@@ -16412,7 +16412,7 @@ fn lock_unique_named_index() -> Result<()> {
[[tool.uv.index]]
name = "example"
- url = "https://example.com"
+ url = "https://astral.sh"
"#,
)?;
From 7fece9b90a07ab8479ba9445f90cedc3b08f61a8 Mon Sep 17 00:00:00 2001
From: konsti
Date: Wed, 16 Jul 2025 15:21:22 +0100
Subject: [PATCH 049/130] Remove marker from `Edge` (#14649)
It seems that this field is unused.
---
crates/uv-distribution-types/src/dist_error.rs | 4 ++--
crates/uv-distribution-types/src/resolution.rs | 9 ++++-----
crates/uv-resolver/src/lock/export/pylock_toml.rs | 2 +-
crates/uv-resolver/src/lock/installable.rs | 13 ++++++-------
crates/uv-resolver/src/resolution/output.rs | 13 ++++---------
5 files changed, 17 insertions(+), 24 deletions(-)
diff --git a/crates/uv-distribution-types/src/dist_error.rs b/crates/uv-distribution-types/src/dist_error.rs
index a452ce663..d2cfee16d 100644
--- a/crates/uv-distribution-types/src/dist_error.rs
+++ b/crates/uv-distribution-types/src/dist_error.rs
@@ -131,11 +131,11 @@ impl DerivationChain {
));
let target = edge.source();
let extra = match edge.weight() {
- Edge::Optional(extra, ..) => Some(extra.clone()),
+ Edge::Optional(extra) => Some(extra.clone()),
_ => None,
};
let group = match edge.weight() {
- Edge::Dev(group, ..) => Some(group.clone()),
+ Edge::Dev(group) => Some(group.clone()),
_ => None,
};
queue.push_back((target, extra, group, path));
diff --git a/crates/uv-distribution-types/src/resolution.rs b/crates/uv-distribution-types/src/resolution.rs
index 5ff34adf5..e690b8693 100644
--- a/crates/uv-distribution-types/src/resolution.rs
+++ b/crates/uv-distribution-types/src/resolution.rs
@@ -1,6 +1,5 @@
use uv_distribution_filename::DistExtension;
use uv_normalize::{ExtraName, GroupName, PackageName};
-use uv_pep508::MarkerTree;
use uv_pypi_types::{HashDigest, HashDigests};
use crate::{
@@ -202,12 +201,12 @@ impl Node {
}
}
-/// An edge in the resolution graph, along with the marker that must be satisfied to traverse it.
+/// An edge in the resolution graph.
#[derive(Debug, Clone)]
pub enum Edge {
- Prod(MarkerTree),
- Optional(ExtraName, MarkerTree),
- Dev(GroupName, MarkerTree),
+ Prod,
+ Optional(ExtraName),
+ Dev(GroupName),
}
impl From<&ResolvedDist> for RequirementSource {
diff --git a/crates/uv-resolver/src/lock/export/pylock_toml.rs b/crates/uv-resolver/src/lock/export/pylock_toml.rs
index d2c2383a5..8a53fd8f7 100644
--- a/crates/uv-resolver/src/lock/export/pylock_toml.rs
+++ b/crates/uv-resolver/src/lock/export/pylock_toml.rs
@@ -1152,7 +1152,7 @@ impl<'lock> PylockToml {
};
let index = graph.add_node(dist);
- graph.add_edge(root, index, Edge::Prod(package.marker));
+ graph.add_edge(root, index, Edge::Prod);
}
Ok(Resolution::new(graph))
diff --git a/crates/uv-resolver/src/lock/installable.rs b/crates/uv-resolver/src/lock/installable.rs
index e3cdbf019..4851306da 100644
--- a/crates/uv-resolver/src/lock/installable.rs
+++ b/crates/uv-resolver/src/lock/installable.rs
@@ -13,7 +13,6 @@ use uv_configuration::ExtrasSpecificationWithDefaults;
use uv_configuration::{BuildOptions, DependencyGroupsWithDefaults, InstallOptions};
use uv_distribution_types::{Edge, Node, Resolution, ResolvedDist};
use uv_normalize::{ExtraName, GroupName, PackageName};
-use uv_pep508::MarkerTree;
use uv_platform_tags::Tags;
use uv_pypi_types::ResolverMarkerEnvironment;
@@ -113,7 +112,7 @@ pub trait Installable<'lock> {
inverse.insert(&dist.id, index);
// Add an edge from the root.
- petgraph.add_edge(root, index, Edge::Prod(MarkerTree::TRUE));
+ petgraph.add_edge(root, index, Edge::Prod);
// Push the package onto the queue.
roots.push((dist, index));
@@ -189,7 +188,7 @@ pub trait Installable<'lock> {
// a specific marker environment and set of extras/groups.
// So at this point, we know the extras/groups have been
// satisfied, so we can safely drop the conflict marker.
- Edge::Dev(group.clone(), dep.complexified_marker.pep508()),
+ Edge::Dev(group.clone()),
);
// Push its dependencies on the queue.
@@ -231,7 +230,7 @@ pub trait Installable<'lock> {
inverse.insert(&dist.id, index);
// Add the edge.
- petgraph.add_edge(root, index, Edge::Prod(dependency.marker));
+ petgraph.add_edge(root, index, Edge::Prod);
// Push its dependencies on the queue.
if seen.insert((&dist.id, None)) {
@@ -300,7 +299,7 @@ pub trait Installable<'lock> {
};
// Add the edge.
- petgraph.add_edge(root, index, Edge::Dev(group.clone(), dependency.marker));
+ petgraph.add_edge(root, index, Edge::Dev(group.clone()));
// Push its dependencies on the queue.
if seen.insert((&dist.id, None)) {
@@ -484,9 +483,9 @@ pub trait Installable<'lock> {
index,
dep_index,
if let Some(extra) = extra {
- Edge::Optional(extra.clone(), dep.complexified_marker.pep508())
+ Edge::Optional(extra.clone())
} else {
- Edge::Prod(dep.complexified_marker.pep508())
+ Edge::Prod
},
);
diff --git a/crates/uv-resolver/src/resolution/output.rs b/crates/uv-resolver/src/resolution/output.rs
index 928b9c605..dd2b3388f 100644
--- a/crates/uv-resolver/src/resolution/output.rs
+++ b/crates/uv-resolver/src/resolution/output.rs
@@ -894,16 +894,11 @@ impl From for uv_distribution_types::Resolution {
// Re-add the edges to the reduced graph.
for edge in graph.edge_indices() {
let (source, target) = graph.edge_endpoints(edge).unwrap();
- // OK to ignore conflicting marker because we've asserted
- // above that we aren't in universal mode. If we aren't in
- // universal mode, then there can be no conflicts since
- // conflicts imply forks and forks imply universal mode.
- let marker = graph[edge].pep508();
match (&graph[source], &graph[target]) {
(ResolutionGraphNode::Root, ResolutionGraphNode::Dist(target_dist)) => {
let target = inverse[&target_dist.name()];
- transformed.update_edge(root, target, Edge::Prod(marker));
+ transformed.update_edge(root, target, Edge::Prod);
}
(
ResolutionGraphNode::Dist(source_dist),
@@ -913,11 +908,11 @@ impl From for uv_distribution_types::Resolution {
let target = inverse[&target_dist.name()];
let edge = if let Some(extra) = source_dist.extra.as_ref() {
- Edge::Optional(extra.clone(), marker)
+ Edge::Optional(extra.clone())
} else if let Some(dev) = source_dist.dev.as_ref() {
- Edge::Dev(dev.clone(), marker)
+ Edge::Dev(dev.clone())
} else {
- Edge::Prod(marker)
+ Edge::Prod
};
transformed.add_edge(source, target, edge);
From 052a74c45110d5ddad9943ff33c2c3c332b96e0f Mon Sep 17 00:00:00 2001
From: konsti
Date: Wed, 16 Jul 2025 15:56:32 +0100
Subject: [PATCH 050/130] Fix doctests (#14658)
`cargo nextest run` doesn't run them, but `cargo insta test
--test-runner nextest` does, which surfaced those failures.
---
crates/uv-pep508/src/lib.rs | 2 +-
crates/uv-requirements-txt/src/shquote.rs | 4 ++--
2 files changed, 3 insertions(+), 3 deletions(-)
diff --git a/crates/uv-pep508/src/lib.rs b/crates/uv-pep508/src/lib.rs
index e2945743b..f63d46206 100644
--- a/crates/uv-pep508/src/lib.rs
+++ b/crates/uv-pep508/src/lib.rs
@@ -11,7 +11,7 @@
//! let marker = r#"requests [security,tests] >= 2.8.1, == 2.8.* ; python_version > "3.8""#;
//! let dependency_specification = Requirement::::from_str(marker).unwrap();
//! assert_eq!(dependency_specification.name.as_ref(), "requests");
-//! assert_eq!(dependency_specification.extras, vec![ExtraName::from_str("security").unwrap(), ExtraName::from_str("tests").unwrap()]);
+//! assert_eq!(dependency_specification.extras, vec![ExtraName::from_str("security").unwrap(), ExtraName::from_str("tests").unwrap()].into());
//! ```
#![warn(missing_docs)]
diff --git a/crates/uv-requirements-txt/src/shquote.rs b/crates/uv-requirements-txt/src/shquote.rs
index d30b4bc5b..180a62496 100644
--- a/crates/uv-requirements-txt/src/shquote.rs
+++ b/crates/uv-requirements-txt/src/shquote.rs
@@ -146,8 +146,8 @@ fn unquote_open_escape(acc: &mut String, cursor: &mut std::iter::Enumerate Result, UnquoteError> {
// If the string does not contain any single-quotes, double-quotes, or escape sequences, it
From 7cdc1f62ee9e1dbf6b9cbb9967c3e7e75813bd99 Mon Sep 17 00:00:00 2001
From: Charlie Marsh
Date: Wed, 16 Jul 2025 12:02:29 -0400
Subject: [PATCH 051/130] Suggest `uv cache clean` prior to `--reinstall`
(#14659)
## Summary
Closes https://github.com/astral-sh/uv/issues/14479.
---
docs/concepts/cache.md | 9 +++++++--
1 file changed, 7 insertions(+), 2 deletions(-)
diff --git a/docs/concepts/cache.md b/docs/concepts/cache.md
index 6610ccb55..189465ac4 100644
--- a/docs/concepts/cache.md
+++ b/docs/concepts/cache.md
@@ -19,12 +19,17 @@ The specifics of uv's caching semantics vary based on the nature of the dependen
If you're running into caching issues, uv includes a few escape hatches:
+- To clear the cache entirely, run `uv cache clean`. To clear the cache for a specific package, run
+ `uv cache clean `. For example, `uv cache clean ruff` will clear the cache for the
+ `ruff` package.
- To force uv to revalidate cached data for all dependencies, pass `--refresh` to any command (e.g.,
`uv sync --refresh` or `uv pip install --refresh ...`).
- To force uv to revalidate cached data for a specific dependency pass `--refresh-package` to any
- command (e.g., `uv sync --refresh-package flask` or `uv pip install --refresh-package flask ...`).
+ command (e.g., `uv sync --refresh-package ruff` or `uv pip install --refresh-package ruff ...`).
- To force uv to ignore existing installed versions, pass `--reinstall` to any installation command
- (e.g., `uv sync --reinstall` or `uv pip install --reinstall ...`).
+ (e.g., `uv sync --reinstall` or `uv pip install --reinstall ...`). (Consider running
+ `uv cache clean ` first, to ensure that the cache is cleared prior to
+ reinstallation.)
As a special case, uv will always rebuild and reinstall any local directory dependencies passed
explicitly on the command-line (e.g., `uv pip install .`).
From a8bb7be52b15ac0b8bced1d58b4044191491ecce Mon Sep 17 00:00:00 2001
From: Geoffrey Thomas
Date: Wed, 16 Jul 2025 21:39:21 -0400
Subject: [PATCH 052/130] windows_exception: Improve async signal safety
(#14619)
It's not as bad as I feared to bypass libsys's stderr. (There's still a
lock in libsys's backtrace, which might also not be too bad to bypass.)
---
Cargo.lock | 2 +
Cargo.toml | 3 +-
crates/uv/Cargo.toml | 2 +
crates/uv/src/windows_exception.rs | 299 +++++++++++++++++++++++------
4 files changed, 247 insertions(+), 59 deletions(-)
diff --git a/Cargo.lock b/Cargo.lock
index c43f4872d..3ff7ad6d0 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -4637,6 +4637,7 @@ version = "0.7.21"
dependencies = [
"anstream",
"anyhow",
+ "arrayvec",
"assert_cmd",
"assert_fs",
"axoupdater",
@@ -4735,6 +4736,7 @@ dependencies = [
"which",
"whoami",
"windows 0.59.0",
+ "windows-result 0.3.4",
"wiremock",
"zip",
]
diff --git a/Cargo.toml b/Cargo.toml
index 752955223..2c32ce8d0 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -75,6 +75,7 @@ uv-workspace = { path = "crates/uv-workspace" }
anstream = { version = "0.6.15" }
anyhow = { version = "1.0.89" }
arcstr = { version = "1.2.0" }
+arrayvec = { version = "0.7.6" }
astral-tokio-tar = { version = "0.5.1" }
async-channel = { version = "2.3.1" }
async-compression = { version = "0.4.12", features = ["bzip2", "gzip", "xz", "zstd"] }
@@ -184,7 +185,7 @@ url = { version = "2.5.2", features = ["serde"] }
version-ranges = { git = "https://github.com/astral-sh/pubgrub", rev = "06ec5a5f59ffaeb6cf5079c6cb184467da06c9db" }
walkdir = { version = "2.5.0" }
which = { version = "8.0.0", features = ["regex"] }
-windows = { version = "0.59.0", features = ["Win32_System_Kernel", "Win32_System_Diagnostics_Debug", "Win32_Storage_FileSystem"] }
+windows = { version = "0.59.0", features = ["Win32_Globalization", "Win32_System_Console", "Win32_System_Kernel", "Win32_System_Diagnostics_Debug", "Win32_Storage_FileSystem"] }
windows-core = { version = "0.59.0" }
windows-registry = { version = "0.5.0" }
windows-result = { version = "0.3.0" }
diff --git a/crates/uv/Cargo.toml b/crates/uv/Cargo.toml
index fe2f2200c..d72035467 100644
--- a/crates/uv/Cargo.toml
+++ b/crates/uv/Cargo.toml
@@ -107,8 +107,10 @@ which = { workspace = true }
zip = { workspace = true }
[target.'cfg(target_os = "windows")'.dependencies]
+arrayvec = { workspace = true }
self-replace = { workspace = true }
windows = { workspace = true }
+windows-result = { workspace = true }
[dev-dependencies]
assert_cmd = { version = "2.0.16" }
diff --git a/crates/uv/src/windows_exception.rs b/crates/uv/src/windows_exception.rs
index e96075f96..2e40e89cc 100644
--- a/crates/uv/src/windows_exception.rs
+++ b/crates/uv/src/windows_exception.rs
@@ -9,121 +9,304 @@
//! implementation and also displays some minimal information from the exception itself.
#![allow(unsafe_code)]
-#![allow(clippy::print_stderr)]
+// Usually we want fs_err over std::fs, but there's no advantage here, we don't
+// report errors encountered while reporting an exception.
+#![allow(clippy::disallowed_types)]
+use std::fmt::Write;
+use std::fs::File;
+use std::mem::ManuallyDrop;
+use std::os::windows::io::FromRawHandle;
+
+use arrayvec::ArrayVec;
use windows::Win32::{
Foundation,
+ Globalization::CP_UTF8,
+ System::Console::{
+ CONSOLE_MODE, GetConsoleMode, GetConsoleOutputCP, GetStdHandle, STD_ERROR_HANDLE,
+ WriteConsoleW,
+ },
System::Diagnostics::Debug::{
CONTEXT, EXCEPTION_CONTINUE_SEARCH, EXCEPTION_POINTERS, SetUnhandledExceptionFilter,
},
};
-fn display_exception_info(name: &str, info: &[usize; 15]) {
- match info[0] {
- 0 => eprintln!("{name} reading {:#x}", info[1]),
- 1 => eprintln!("{name} writing {:#x}", info[1]),
- 8 => eprintln!("{name} executing {:#x}", info[1]),
- _ => eprintln!("{name} from operation {} at {:#x}", info[0], info[1]),
+/// A write target for standard error that can be safely used in an exception handler.
+///
+/// The exception handler can be called at any point in the execution of machine code, perhaps
+/// halfway through a Rust operation. It needs to be robust to operating with unknown program
+/// state, a concept that the UNIX world calls "async signal safety." In particular, we can't
+/// write to `std::io::stderr()` because that takes a lock, and we could be called in the middle of
+/// code that is holding that lock.
+enum ExceptionSafeStderr {
+ // This is a simplified version of the logic in Rust std::sys::stdio::windows, on the
+ // assumption that we're only writing strs, not bytes (so we do not need to care about
+ // incomplete or invalid UTF-8) and we don't care about Windows 7 or every drop of
+ // performance.
+ // - If stderr is a non-UTF-8 console, we need to write UTF-16 with WriteConsoleW, and we
+ // convert with encode_utf16().
+ // - If stderr is not a console, we cannot use WriteConsole and must use NtWriteFile, which
+ // takes (UTF-8) bytes.
+ // - If stderr is a UTF-8 console, we can do either. std uses NtWriteFile.
+ // Note that we do not want to close stderr at any point, hence ManuallyDrop.
+ WriteConsole(Foundation::HANDLE),
+ NtWriteFile(ManuallyDrop),
+}
+
+impl ExceptionSafeStderr {
+ fn new() -> Result {
+ // SAFETY: winapi call, no interesting parameters
+ let handle = unsafe { GetStdHandle(STD_ERROR_HANDLE) }?;
+ if handle.is_invalid() {
+ return Err(windows_result::Error::empty());
+ }
+ let mut mode = CONSOLE_MODE::default();
+ // SAFETY: winapi calls, no interesting parameters
+ if unsafe {
+ GetConsoleMode(handle, &raw mut mode).is_ok() && GetConsoleOutputCP() != CP_UTF8
+ } {
+ Ok(Self::WriteConsole(handle))
+ } else {
+ // SAFETY: winapi call, we just got this handle from the OS and checked it
+ let file = unsafe { File::from_raw_handle(handle.0) };
+ Ok(Self::NtWriteFile(ManuallyDrop::new(file)))
+ }
+ }
+
+ fn write_winerror(&mut self, s: &str) -> Result<(), windows_result::Error> {
+ match self {
+ Self::WriteConsole(handle) => {
+ // According to comments in the ReactOS source, NT's behavior is that writes of 80
+ // bytes or fewer are passed in-line in the message to the console server and
+ // longer writes allocate out of a shared heap with CSRSS. In an attempt to avoid
+ // allocations, write in 80-byte chunks.
+ let mut buf = ArrayVec::::new();
+ for c in s.encode_utf16() {
+ if buf.try_push(c).is_err() {
+ // SAFETY: winapi call, arrayvec guarantees the slice is valid
+ unsafe { WriteConsoleW(*handle, &buf, None, None) }?;
+ buf.clear();
+ buf.push(c);
+ }
+ }
+ if !buf.is_empty() {
+ // SAFETY: winapi call, arrayvec guarantees the slice is valid
+ unsafe { WriteConsoleW(*handle, &buf, None, None) }?;
+ }
+ }
+ Self::NtWriteFile(file) => {
+ use std::io::Write;
+ file.write_all(s.as_bytes())?;
+ }
+ }
+ Ok(())
}
}
+impl Write for ExceptionSafeStderr {
+ fn write_str(&mut self, s: &str) -> std::fmt::Result {
+ self.write_winerror(s).map_err(|_| std::fmt::Error)
+ }
+}
+
+fn display_exception_info(
+ e: &mut ExceptionSafeStderr,
+ name: &str,
+ info: &[usize; 15],
+) -> std::fmt::Result {
+ match info[0] {
+ 0 => writeln!(e, "{name} reading {:#x}", info[1])?,
+ 1 => writeln!(e, "{name} writing {:#x}", info[1])?,
+ 8 => writeln!(e, "{name} executing {:#x}", info[1])?,
+ _ => writeln!(e, "{name} from operation {} at {:#x}", info[0], info[1])?,
+ }
+ Ok(())
+}
+
#[cfg(target_arch = "x86")]
-fn dump_regs(c: &CONTEXT) {
- eprintln!(
- "eax={:08x} ebx={:08x} ecx={:08x} edx={:08x} esi={:08x} edi={:08x}",
- c.Eax, c.Ebx, c.Ecx, c.Edx, c.Esi, c.Edi
- );
- eprintln!(
- "eip={:08x} ebp={:08x} esp={:08x} eflags={:08x}",
- c.Eip, c.Ebp, c.Esp, c.EFlags
- );
+fn dump_regs(e: &mut ExceptionSafeStderr, c: &CONTEXT) -> std::fmt::Result {
+ let CONTEXT {
+ Eax,
+ Ebx,
+ Ecx,
+ Edx,
+ Esi,
+ Edi,
+ Eip,
+ Ebp,
+ Esp,
+ EFlags,
+ ..
+ } = c;
+ writeln!(
+ e,
+ "eax={Eax:08x} ebx={Ebx:08x} ecx={Ecx:08x} edx={Edx:08x} esi={Esi:08x} edi={Edi:08x}"
+ )?;
+ writeln!(
+ e,
+ "eip={Eip:08x} ebp={Ebp:08x} esp={Esp:08x} eflags={EFlags:08x}"
+ )?;
+ Ok(())
}
#[cfg(target_arch = "x86_64")]
-fn dump_regs(c: &CONTEXT) {
- eprintln!("rax={:016x} rbx={:016x} rcx={:016x}", c.Rax, c.Rbx, c.Rcx);
- eprintln!("rdx={:016x} rsx={:016x} rdi={:016x}", c.Rdx, c.Rsi, c.Rdi);
- eprintln!("rsp={:016x} rbp={:016x} r8={:016x}", c.Rsp, c.Rbp, c.R8);
- eprintln!(" r9={:016x} r10={:016x} r11={:016x}", c.R9, c.R10, c.R11);
- eprintln!("r12={:016x} r13={:016x} r14={:016x}", c.R12, c.R13, c.R14);
- eprintln!(
- "r15={:016x} rip={:016x} eflags={:016x}",
- c.R15, c.Rip, c.EFlags
- );
+fn dump_regs(e: &mut ExceptionSafeStderr, c: &CONTEXT) -> std::fmt::Result {
+ let CONTEXT {
+ Rax,
+ Rbx,
+ Rcx,
+ Rdx,
+ Rsi,
+ Rdi,
+ Rsp,
+ Rbp,
+ R8,
+ R9,
+ R10,
+ R11,
+ R12,
+ R13,
+ R14,
+ R15,
+ Rip,
+ EFlags,
+ ..
+ } = c;
+ writeln!(e, "rax={Rax:016x} rbx={Rbx:016x} rcx={Rcx:016x}")?;
+ writeln!(e, "rdx={Rdx:016x} rsi={Rsi:016x} rdi={Rdi:016x}")?;
+ writeln!(e, "rsp={Rsp:016x} rbp={Rbp:016x} r8={R8 :016x}")?;
+ writeln!(e, " r9={R9 :016x} r10={R10:016x} r11={R11:016x}")?;
+ writeln!(e, "r12={R12:016x} r13={R13:016x} r14={R14:016x}")?;
+ writeln!(e, "r15={R15:016x} rip={Rip:016x} eflags={EFlags:016x}")?;
+ Ok(())
}
#[cfg(target_arch = "aarch64")]
-fn dump_regs(c: &CONTEXT) {
+fn dump_regs(e: &mut ExceptionSafeStderr, c: &CONTEXT) -> std::fmt::Result {
+ let CONTEXT { Cpsr, Sp, Pc, .. } = c;
// SAFETY: The two variants of this anonymous union are equivalent,
// one's an array and one has named registers.
- let r = unsafe { c.Anonymous.Anonymous };
- eprintln!("cpsr={:016x} sp={:016x} pc={:016x}", c.Cpsr, c.Sp, c.Pc);
- eprintln!(" x0={:016x} x1={:016x} x2={:016x}", r.X0, r.X1, r.X2);
- eprintln!(" x3={:016x} x4={:016x} x5={:016x}", r.X3, r.X4, r.X5);
- eprintln!(" x6={:016x} x7={:016x} x8={:016x}", r.X6, r.X7, r.X8);
- eprintln!(" x9={:016x} x10={:016x} x11={:016x}", r.X9, r.X10, r.X11);
- eprintln!(" x12={:016x} x13={:016x} x14={:016x}", r.X12, r.X13, r.X14);
- eprintln!(" x15={:016x} x16={:016x} x17={:016x}", r.X15, r.X16, r.X17);
- eprintln!(" x18={:016x} x19={:016x} x20={:016x}", r.X18, r.X19, r.X20);
- eprintln!(" x21={:016x} x22={:016x} x23={:016x}", r.X21, r.X22, r.X23);
- eprintln!(" x24={:016x} x25={:016x} x26={:016x}", r.X24, r.X25, r.X26);
- eprintln!(" x27={:016x} x28={:016x}", r.X27, r.X28);
- eprintln!(" fp={:016x} lr={:016x}", r.Fp, r.Lr);
+ let regs = unsafe { c.Anonymous.Anonymous };
+ let Windows::Win32::System::Diagnostics::Debug::CONTEXT_0_0 {
+ X0,
+ X1,
+ X2,
+ X3,
+ X4,
+ X5,
+ X6,
+ X7,
+ X8,
+ X9,
+ X10,
+ X11,
+ X12,
+ X13,
+ X14,
+ X15,
+ X16,
+ X17,
+ X18,
+ X19,
+ X20,
+ X21,
+ X22,
+ X23,
+ X24,
+ X25,
+ X26,
+ X27,
+ X28,
+ Fp,
+ Lr,
+ } = regs;
+ writeln!(e, "cpsr={Cpsr:016x} sp={Sp :016x} pc={Pc :016x}")?;
+ writeln!(e, " x0={X0 :016x} x1={X1 :016x} x2={X2 :016x}")?;
+ writeln!(e, " x3={X3 :016x} x4={X4 :016x} x5={X5 :016x}")?;
+ writeln!(e, " x6={X6 :016x} x7={X7 :016x} x8={X8 :016x}")?;
+ writeln!(e, " x9={X9 :016x} x10={X10:016x} x11={X11:016x}")?;
+ writeln!(e, " x12={X12 :016x} x13={X13:016x} x14={X14:016x}")?;
+ writeln!(e, " x15={X15 :016x} x16={X16:016x} x17={X17:016x}")?;
+ writeln!(e, " x18={X18 :016x} x19={X19:016x} x20={X20:016x}")?;
+ writeln!(e, " x21={X21 :016x} x22={X22:016x} x23={X23:016x}")?;
+ writeln!(e, " x24={X24 :016x} x25={X25:016x} x26={X26:016x}")?;
+ writeln!(e, " x27={X27 :016x} x28={X28:016x}")?;
+ writeln!(e, " fp={Fp :016x} lr={Lr :016x}")?;
+ Ok(())
}
-unsafe extern "system" fn unhandled_exception_filter(
- exception_info: *const EXCEPTION_POINTERS,
-) -> i32 {
- // TODO: Really we should not be using eprintln here because Stderr is not async-signal-safe.
- // Probably we should be calling the console APIs directly.
- eprintln!("error: unhandled exception in uv, please report a bug:");
+fn dump_exception(exception_info: *const EXCEPTION_POINTERS) -> std::fmt::Result {
+ let mut e = ExceptionSafeStderr::new().map_err(|_| std::fmt::Error)?;
+ writeln!(e, "error: unhandled exception in uv, please report a bug:")?;
let mut context = None;
// SAFETY: Pointer comes from the OS
if let Some(info) = unsafe { exception_info.as_ref() } {
// SAFETY: Pointer comes from the OS
if let Some(exc) = unsafe { info.ExceptionRecord.as_ref() } {
- eprintln!(
+ writeln!(
+ e,
"code {:#X} at address {:?}",
exc.ExceptionCode.0, exc.ExceptionAddress
- );
+ )?;
match exc.ExceptionCode {
Foundation::EXCEPTION_ACCESS_VIOLATION => {
- display_exception_info("EXCEPTION_ACCESS_VIOLATION", &exc.ExceptionInformation);
+ display_exception_info(
+ &mut e,
+ "EXCEPTION_ACCESS_VIOLATION",
+ &exc.ExceptionInformation,
+ )?;
}
Foundation::EXCEPTION_IN_PAGE_ERROR => {
- display_exception_info("EXCEPTION_IN_PAGE_ERROR", &exc.ExceptionInformation);
+ display_exception_info(
+ &mut e,
+ "EXCEPTION_IN_PAGE_ERROR",
+ &exc.ExceptionInformation,
+ )?;
}
Foundation::EXCEPTION_ILLEGAL_INSTRUCTION => {
- eprintln!("EXCEPTION_ILLEGAL_INSTRUCTION");
+ writeln!(e, "EXCEPTION_ILLEGAL_INSTRUCTION")?;
}
Foundation::EXCEPTION_STACK_OVERFLOW => {
- eprintln!("EXCEPTION_STACK_OVERFLOW");
+ writeln!(e, "EXCEPTION_STACK_OVERFLOW")?;
}
_ => {}
}
} else {
- eprintln!("(ExceptionRecord is NULL)");
+ writeln!(e, "(ExceptionRecord is NULL)")?;
}
// SAFETY: Pointer comes from the OS
context = unsafe { info.ContextRecord.as_ref() };
} else {
- eprintln!("(ExceptionInfo is NULL)");
+ writeln!(e, "(ExceptionInfo is NULL)")?;
}
+ // TODO: std::backtrace does a lot of allocations, so we are no longer async-signal-safe at
+ // this point, but hopefully we got a useful error message on screen already. We could do a
+ // better job by using backtrace-rs directly + arrayvec.
let backtrace = std::backtrace::Backtrace::capture();
if backtrace.status() == std::backtrace::BacktraceStatus::Disabled {
- eprintln!("note: run with `RUST_BACKTRACE=1` environment variable to display a backtrace");
+ writeln!(
+ e,
+ "note: run with `RUST_BACKTRACE=1` environment variable to display a backtrace"
+ )?;
} else {
if let Some(context) = context {
- dump_regs(context);
+ dump_regs(&mut e, context)?;
}
- eprintln!("stack backtrace:\n{backtrace:#}");
+ writeln!(e, "stack backtrace:\n{backtrace:#}")?;
}
+ Ok(())
+}
+
+unsafe extern "system" fn unhandled_exception_filter(
+ exception_info: *const EXCEPTION_POINTERS,
+) -> i32 {
+ let _ = dump_exception(exception_info);
EXCEPTION_CONTINUE_SEARCH
}
/// Set up our handler for unhandled exceptions.
pub(crate) fn setup() {
- // SAFETY: winapi call
+ // SAFETY: winapi call, argument is a mostly async-signal-safe function
unsafe {
SetUnhandledExceptionFilter(Some(Some(unhandled_exception_filter)));
}
From b3df1c2401e71eea05f2dba3ff39d48a1a5d4ded Mon Sep 17 00:00:00 2001
From: Geoffrey Thomas
Date: Thu, 17 Jul 2025 08:29:41 -0400
Subject: [PATCH 053/130] Fix typo in #14619 (#14677)
---
crates/uv/src/windows_exception.rs | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/crates/uv/src/windows_exception.rs b/crates/uv/src/windows_exception.rs
index 2e40e89cc..048eaa1ba 100644
--- a/crates/uv/src/windows_exception.rs
+++ b/crates/uv/src/windows_exception.rs
@@ -187,7 +187,7 @@ fn dump_regs(e: &mut ExceptionSafeStderr, c: &CONTEXT) -> std::fmt::Result {
// SAFETY: The two variants of this anonymous union are equivalent,
// one's an array and one has named registers.
let regs = unsafe { c.Anonymous.Anonymous };
- let Windows::Win32::System::Diagnostics::Debug::CONTEXT_0_0 {
+ let windows::Win32::System::Diagnostics::Debug::CONTEXT_0_0 {
X0,
X1,
X2,
From 09fc943cca0789e83c6afa16fc17d1d6f54b7978 Mon Sep 17 00:00:00 2001
From: Zanie Blue
Date: Thu, 17 Jul 2025 07:38:12 -0500
Subject: [PATCH 054/130] Rename msrv build job for consistency with other
binary builds (#14679)
---
.github/workflows/ci.yml | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index bb357f4a3..e9beddcc5 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -654,8 +654,8 @@ jobs:
${{ env.UV_WORKSPACE }}/target/aarch64-pc-windows-msvc/debug/uvx.exe
retention-days: 1
- cargo-build-msrv:
- name: "cargo build (msrv)"
+ build-binary-msrv:
+ name: "build binary | msrv"
needs: determine_changes
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && (needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main') }}
runs-on: github-ubuntu-24.04-x86_64-8
From bdb8c2646a0f4ce64574b048e9b60246193a9ffc Mon Sep 17 00:00:00 2001
From: adisbladis
Date: Fri, 18 Jul 2025 01:11:32 +1200
Subject: [PATCH 055/130] Add UV_COMPILE_BYTECODE_TIMEOUT environment variable
(#14369)
## Summary
When installing packages on _very_ slow/overloaded systems it'spossible
to trigger bytecode compilation timeouts, which tends to happen in
environments such as Qemu (especially without KVM/virtio), but also on
systems that are simply overloaded. I've seen this in my Nix builds if I
for example am compiling a Linux kernel at the same time as a few other
concurrent builds.
By making the bytecode compilation timeout adjustable you can work
around such issues. I plan to set `UV_COMPILE_BYTECODE_TIMEOUT=0` in the
[pyproject.nix
builders](https://pyproject-nix.github.io/pyproject.nix/build.html) to
make them more reliable.
- Related issues
* https://github.com/astral-sh/uv/issues/6105
## Test Plan
Only manual testing was applied in this instance. There is no existing
automated tests for bytecode compilation timeout afaict.
---
crates/uv-installer/src/compile.rs | 61 ++++++++++++++++++++++++------
crates/uv-static/src/env_vars.rs | 3 ++
docs/reference/environment.md | 4 ++
3 files changed, 56 insertions(+), 12 deletions(-)
diff --git a/crates/uv-installer/src/compile.rs b/crates/uv-installer/src/compile.rs
index 7dd167b4a..4ee74f40d 100644
--- a/crates/uv-installer/src/compile.rs
+++ b/crates/uv-installer/src/compile.rs
@@ -2,7 +2,7 @@ use std::panic::AssertUnwindSafe;
use std::path::{Path, PathBuf};
use std::process::Stdio;
use std::time::Duration;
-use std::{io, panic};
+use std::{env, io, panic};
use async_channel::{Receiver, SendError};
use tempfile::tempdir_in;
@@ -20,7 +20,7 @@ use uv_warnings::warn_user;
const COMPILEALL_SCRIPT: &str = include_str!("pip_compileall.py");
/// This is longer than any compilation should ever take.
-const COMPILE_TIMEOUT: Duration = Duration::from_secs(60);
+const DEFAULT_COMPILE_TIMEOUT: Duration = Duration::from_secs(60);
#[derive(Debug, Error)]
pub enum CompileError {
@@ -55,6 +55,8 @@ pub enum CompileError {
},
#[error("Python startup timed out ({}s)", _0.as_secs_f32())]
StartupTimeout(Duration),
+ #[error("Got invalid value from environment for {var}: {message}.")]
+ EnvironmentError { var: &'static str, message: String },
}
/// Bytecode compile all file in `dir` using a pool of Python interpreters running a Python script
@@ -88,6 +90,29 @@ pub async fn compile_tree(
let tempdir = tempdir_in(cache).map_err(CompileError::TempFile)?;
let pip_compileall_py = tempdir.path().join("pip_compileall.py");
+ let timeout: Option = match env::var(EnvVars::UV_COMPILE_BYTECODE_TIMEOUT) {
+ Ok(value) => {
+ if value == "0" {
+ debug!("Disabling bytecode compilation timeout");
+ None
+ } else {
+ if let Ok(duration) = value.parse::().map(Duration::from_secs) {
+ debug!(
+ "Using bytecode compilation timeout of {}s",
+ duration.as_secs()
+ );
+ Some(duration)
+ } else {
+ return Err(CompileError::EnvironmentError {
+ var: "UV_COMPILE_BYTECODE_TIMEOUT",
+ message: format!("Expected an integer number of seconds, got \"{value}\""),
+ });
+ }
+ }
+ }
+ Err(_) => Some(DEFAULT_COMPILE_TIMEOUT),
+ };
+
debug!("Starting {} bytecode compilation workers", worker_count);
let mut worker_handles = Vec::new();
for _ in 0..worker_count {
@@ -98,6 +123,7 @@ pub async fn compile_tree(
python_executable.to_path_buf(),
pip_compileall_py.clone(),
receiver.clone(),
+ timeout,
);
// Spawn each worker on a dedicated thread.
@@ -189,6 +215,7 @@ async fn worker(
interpreter: PathBuf,
pip_compileall_py: PathBuf,
receiver: Receiver,
+ timeout: Option,
) -> Result<(), CompileError> {
fs_err::tokio::write(&pip_compileall_py, COMPILEALL_SCRIPT)
.await
@@ -208,12 +235,17 @@ async fn worker(
}
}
};
+
// Handle a broken `python` by using a timeout, one that's higher than any compilation
// should ever take.
let (mut bytecode_compiler, child_stdin, mut child_stdout, mut child_stderr) =
- tokio::time::timeout(COMPILE_TIMEOUT, wait_until_ready)
- .await
- .map_err(|_| CompileError::StartupTimeout(COMPILE_TIMEOUT))??;
+ if let Some(duration) = timeout {
+ tokio::time::timeout(duration, wait_until_ready)
+ .await
+ .map_err(|_| CompileError::StartupTimeout(timeout.unwrap()))??
+ } else {
+ wait_until_ready.await?
+ };
let stderr_reader = tokio::task::spawn(async move {
let mut child_stderr_collected: Vec = Vec::new();
@@ -223,7 +255,7 @@ async fn worker(
Ok(child_stderr_collected)
});
- let result = worker_main_loop(receiver, child_stdin, &mut child_stdout).await;
+ let result = worker_main_loop(receiver, child_stdin, &mut child_stdout, timeout).await;
// Reap the process to avoid zombies.
let _ = bytecode_compiler.kill().await;
@@ -340,6 +372,7 @@ async fn worker_main_loop(
receiver: Receiver,
mut child_stdin: ChildStdin,
child_stdout: &mut BufReader,
+ timeout: Option,
) -> Result<(), CompileError> {
let mut out_line = String::new();
while let Ok(source_file) = receiver.recv().await {
@@ -372,12 +405,16 @@ async fn worker_main_loop(
// Handle a broken `python` by using a timeout, one that's higher than any compilation
// should ever take.
- tokio::time::timeout(COMPILE_TIMEOUT, python_handle)
- .await
- .map_err(|_| CompileError::CompileTimeout {
- elapsed: COMPILE_TIMEOUT,
- source_file: source_file.clone(),
- })??;
+ if let Some(duration) = timeout {
+ tokio::time::timeout(duration, python_handle)
+ .await
+ .map_err(|_| CompileError::CompileTimeout {
+ elapsed: duration,
+ source_file: source_file.clone(),
+ })??;
+ } else {
+ python_handle.await?;
+ }
// This is a sanity check, if we don't get the path back something has gone wrong, e.g.
// we're not actually running a python interpreter.
diff --git a/crates/uv-static/src/env_vars.rs b/crates/uv-static/src/env_vars.rs
index ae981cac3..216228ff2 100644
--- a/crates/uv-static/src/env_vars.rs
+++ b/crates/uv-static/src/env_vars.rs
@@ -162,6 +162,9 @@ impl EnvVars {
/// will compile Python source files to bytecode after installation.
pub const UV_COMPILE_BYTECODE: &'static str = "UV_COMPILE_BYTECODE";
+ /// Timeout (in seconds) for bytecode compilation.
+ pub const UV_COMPILE_BYTECODE_TIMEOUT: &'static str = "UV_COMPILE_BYTECODE_TIMEOUT";
+
/// Equivalent to the `--no-editable` command-line argument. If set, uv
/// installs any editable dependencies, including the project and any workspace members, as
/// non-editable
diff --git a/docs/reference/environment.md b/docs/reference/environment.md
index 47e4d8db9..5f06cfd3f 100644
--- a/docs/reference/environment.md
+++ b/docs/reference/environment.md
@@ -26,6 +26,10 @@ directory for caching instead of the default cache directory.
Equivalent to the `--compile-bytecode` command-line argument. If set, uv
will compile Python source files to bytecode after installation.
+### `UV_COMPILE_BYTECODE_TIMEOUT`
+
+Timeout (in seconds) for bytecode compilation.
+
### `UV_CONCURRENT_BUILDS`
Sets the maximum number of source distributions that uv will build
From 3884ab5715937fdf01dbc1c6bfb91cacf00e20ce Mon Sep 17 00:00:00 2001
From: adisbladis
Date: Fri, 18 Jul 2025 01:35:25 +1200
Subject: [PATCH 056/130] Fix bytecode compilation debug message introduced by
#14369 (#14682)
## Summary
When refactoring the addition PR I accidentally introduced a bug where
the debug message would not be output if the default value is used.
cc @zanieb
---
crates/uv-installer/src/compile.rs | 29 +++++++++++++++--------------
1 file changed, 15 insertions(+), 14 deletions(-)
diff --git a/crates/uv-installer/src/compile.rs b/crates/uv-installer/src/compile.rs
index 4ee74f40d..8704d9542 100644
--- a/crates/uv-installer/src/compile.rs
+++ b/crates/uv-installer/src/compile.rs
@@ -91,27 +91,28 @@ pub async fn compile_tree(
let pip_compileall_py = tempdir.path().join("pip_compileall.py");
let timeout: Option = match env::var(EnvVars::UV_COMPILE_BYTECODE_TIMEOUT) {
- Ok(value) => {
- if value == "0" {
- debug!("Disabling bytecode compilation timeout");
- None
- } else {
- if let Ok(duration) = value.parse::().map(Duration::from_secs) {
- debug!(
- "Using bytecode compilation timeout of {}s",
- duration.as_secs()
- );
- Some(duration)
- } else {
+ Ok(value) => match value.as_str() {
+ "0" => None,
+ _ => match value.parse::().map(Duration::from_secs) {
+ Ok(duration) => Some(duration),
+ Err(_) => {
return Err(CompileError::EnvironmentError {
var: "UV_COMPILE_BYTECODE_TIMEOUT",
message: format!("Expected an integer number of seconds, got \"{value}\""),
});
}
- }
- }
+ },
+ },
Err(_) => Some(DEFAULT_COMPILE_TIMEOUT),
};
+ if let Some(duration) = timeout {
+ debug!(
+ "Using bytecode compilation timeout of {}s",
+ duration.as_secs()
+ );
+ } else {
+ debug!("Disabling bytecode compilation timeout");
+ }
debug!("Starting {} bytecode compilation workers", worker_count);
let mut worker_handles = Vec::new();
From 78d6d1134a50705d336ba209055eb0076feb017d Mon Sep 17 00:00:00 2001
From: Zanie Blue
Date: Thu, 17 Jul 2025 11:27:15 -0500
Subject: [PATCH 057/130] Bump version to 0.7.22 (#14685)
---
CHANGELOG.md | 34 +++++++++++++++++++++++++++
Cargo.lock | 6 ++---
crates/uv-build/Cargo.toml | 2 +-
crates/uv-build/pyproject.toml | 2 +-
crates/uv-version/Cargo.toml | 2 +-
crates/uv/Cargo.toml | 2 +-
docs/concepts/build-backend.md | 2 +-
docs/getting-started/installation.md | 4 ++--
docs/guides/integration/aws-lambda.md | 4 ++--
docs/guides/integration/docker.md | 10 ++++----
docs/guides/integration/github.md | 2 +-
docs/guides/integration/pre-commit.md | 10 ++++----
pyproject.toml | 2 +-
13 files changed, 58 insertions(+), 24 deletions(-)
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 38be00d2d..87cf0c9e8 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -3,6 +3,40 @@
+## 0.7.22
+
+### Python
+
+- Upgrade GraalPy to 24.2.2
+
+See the [GraalPy release notes](https://github.com/oracle/graalpython/releases/tag/graal-24.2.2) for more details.
+
+### Configuration
+
+- Add `UV_COMPILE_BYTECODE_TIMEOUT` environment variable ([#14369](https://github.com/astral-sh/uv/pull/14369))
+- Allow users to override index `cache-control` headers ([#14620](https://github.com/astral-sh/uv/pull/14620))
+- Add `UV_LIBC` to override libc selection in multi-libc environment ([#14646](https://github.com/astral-sh/uv/pull/14646))
+
+### Bug fixes
+
+- Fix `--all-arches` when paired with `--only-downloads` ([#14629](https://github.com/astral-sh/uv/pull/14629))
+- Skip Windows Python interpreters that return a broken MSIX package code ([#14636](https://github.com/astral-sh/uv/pull/14636))
+- Warn on invalid `uv.toml` when provided via direct path ([#14653](https://github.com/astral-sh/uv/pull/14653))
+- Improve async signal safety in Windows exception handler ([#14619](https://github.com/astral-sh/uv/pull/14619))
+
+### Documentation
+
+- Mention the `revision` in the lockfile versioning doc ([#14634](https://github.com/astral-sh/uv/pull/14634))
+- Move "Conflicting dependencies" to the "Resolution" page ([#14633](https://github.com/astral-sh/uv/pull/14633))
+- Rename "Dependency specifiers" section to exclude PEP 508 reference ([#14631](https://github.com/astral-sh/uv/pull/14631))
+- Suggest `uv cache clean` prior to `--reinstall` ([#14659](https://github.com/astral-sh/uv/pull/14659))
+
+### Preview features
+
+- Make preview Python registration on Windows non-fatal ([#14614](https://github.com/astral-sh/uv/pull/14614))
+- Update preview installation of Python executables to be non-fatal ([#14612](https://github.com/astral-sh/uv/pull/14612))
+- Add `uv python update-shell` ([#14627](https://github.com/astral-sh/uv/pull/14627))
+
## 0.7.21
### Python
diff --git a/Cargo.lock b/Cargo.lock
index 3ff7ad6d0..8a95f655d 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -4633,7 +4633,7 @@ dependencies = [
[[package]]
name = "uv"
-version = "0.7.21"
+version = "0.7.22"
dependencies = [
"anstream",
"anyhow",
@@ -4800,7 +4800,7 @@ dependencies = [
[[package]]
name = "uv-build"
-version = "0.7.21"
+version = "0.7.22"
dependencies = [
"anyhow",
"uv-build-backend",
@@ -5993,7 +5993,7 @@ dependencies = [
[[package]]
name = "uv-version"
-version = "0.7.21"
+version = "0.7.22"
[[package]]
name = "uv-virtualenv"
diff --git a/crates/uv-build/Cargo.toml b/crates/uv-build/Cargo.toml
index f943010ae..8014fa445 100644
--- a/crates/uv-build/Cargo.toml
+++ b/crates/uv-build/Cargo.toml
@@ -1,6 +1,6 @@
[package]
name = "uv-build"
-version = "0.7.21"
+version = "0.7.22"
edition.workspace = true
rust-version.workspace = true
homepage.workspace = true
diff --git a/crates/uv-build/pyproject.toml b/crates/uv-build/pyproject.toml
index 5a2209155..1a78d34dc 100644
--- a/crates/uv-build/pyproject.toml
+++ b/crates/uv-build/pyproject.toml
@@ -1,6 +1,6 @@
[project]
name = "uv-build"
-version = "0.7.21"
+version = "0.7.22"
description = "The uv build backend"
authors = [{ name = "Astral Software Inc.", email = "hey@astral.sh" }]
requires-python = ">=3.8"
diff --git a/crates/uv-version/Cargo.toml b/crates/uv-version/Cargo.toml
index a9fe788a5..e1a424af8 100644
--- a/crates/uv-version/Cargo.toml
+++ b/crates/uv-version/Cargo.toml
@@ -1,6 +1,6 @@
[package]
name = "uv-version"
-version = "0.7.21"
+version = "0.7.22"
edition = { workspace = true }
rust-version = { workspace = true }
homepage = { workspace = true }
diff --git a/crates/uv/Cargo.toml b/crates/uv/Cargo.toml
index d72035467..975495904 100644
--- a/crates/uv/Cargo.toml
+++ b/crates/uv/Cargo.toml
@@ -1,6 +1,6 @@
[package]
name = "uv"
-version = "0.7.21"
+version = "0.7.22"
edition = { workspace = true }
rust-version = { workspace = true }
homepage = { workspace = true }
diff --git a/docs/concepts/build-backend.md b/docs/concepts/build-backend.md
index 69694f317..5f52463bf 100644
--- a/docs/concepts/build-backend.md
+++ b/docs/concepts/build-backend.md
@@ -36,7 +36,7 @@ To use uv as a build backend in an existing project, add `uv_build` to the
```toml title="pyproject.toml"
[build-system]
-requires = ["uv_build>=0.7.21,<0.8.0"]
+requires = ["uv_build>=0.7.22,<0.8.0"]
build-backend = "uv_build"
```
diff --git a/docs/getting-started/installation.md b/docs/getting-started/installation.md
index fa68d210a..3e31a5003 100644
--- a/docs/getting-started/installation.md
+++ b/docs/getting-started/installation.md
@@ -25,7 +25,7 @@ uv provides a standalone installer to download and install uv:
Request a specific version by including it in the URL:
```console
- $ curl -LsSf https://astral.sh/uv/0.7.21/install.sh | sh
+ $ curl -LsSf https://astral.sh/uv/0.7.22/install.sh | sh
```
=== "Windows"
@@ -41,7 +41,7 @@ uv provides a standalone installer to download and install uv:
Request a specific version by including it in the URL:
```pwsh-session
- PS> powershell -ExecutionPolicy ByPass -c "irm https://astral.sh/uv/0.7.21/install.ps1 | iex"
+ PS> powershell -ExecutionPolicy ByPass -c "irm https://astral.sh/uv/0.7.22/install.ps1 | iex"
```
!!! tip
diff --git a/docs/guides/integration/aws-lambda.md b/docs/guides/integration/aws-lambda.md
index 4cdb75b7a..14224b3fe 100644
--- a/docs/guides/integration/aws-lambda.md
+++ b/docs/guides/integration/aws-lambda.md
@@ -92,7 +92,7 @@ the second stage, we'll copy this directory over to the final image, omitting th
other unnecessary files.
```dockerfile title="Dockerfile"
-FROM ghcr.io/astral-sh/uv:0.7.21 AS uv
+FROM ghcr.io/astral-sh/uv:0.7.22 AS uv
# First, bundle the dependencies into the task root.
FROM public.ecr.aws/lambda/python:3.13 AS builder
@@ -334,7 +334,7 @@ And confirm that opening http://127.0.0.1:8000/ in a web browser displays, "Hell
Finally, we'll update the Dockerfile to include the local library in the deployment package:
```dockerfile title="Dockerfile"
-FROM ghcr.io/astral-sh/uv:0.7.21 AS uv
+FROM ghcr.io/astral-sh/uv:0.7.22 AS uv
# First, bundle the dependencies into the task root.
FROM public.ecr.aws/lambda/python:3.13 AS builder
diff --git a/docs/guides/integration/docker.md b/docs/guides/integration/docker.md
index bbea9b264..2ea14c9b0 100644
--- a/docs/guides/integration/docker.md
+++ b/docs/guides/integration/docker.md
@@ -31,7 +31,7 @@ $ docker run --rm -it ghcr.io/astral-sh/uv:debian uv --help
The following distroless images are available:
- `ghcr.io/astral-sh/uv:latest`
-- `ghcr.io/astral-sh/uv:{major}.{minor}.{patch}`, e.g., `ghcr.io/astral-sh/uv:0.7.21`
+- `ghcr.io/astral-sh/uv:{major}.{minor}.{patch}`, e.g., `ghcr.io/astral-sh/uv:0.7.22`
- `ghcr.io/astral-sh/uv:{major}.{minor}`, e.g., `ghcr.io/astral-sh/uv:0.7` (the latest patch
version)
@@ -75,7 +75,7 @@ And the following derived images are available:
As with the distroless image, each derived image is published with uv version tags as
`ghcr.io/astral-sh/uv:{major}.{minor}.{patch}-{base}` and
-`ghcr.io/astral-sh/uv:{major}.{minor}-{base}`, e.g., `ghcr.io/astral-sh/uv:0.7.21-alpine`.
+`ghcr.io/astral-sh/uv:{major}.{minor}-{base}`, e.g., `ghcr.io/astral-sh/uv:0.7.22-alpine`.
For more details, see the [GitHub Container](https://github.com/astral-sh/uv/pkgs/container/uv)
page.
@@ -113,7 +113,7 @@ Note this requires `curl` to be available.
In either case, it is best practice to pin to a specific uv version, e.g., with:
```dockerfile
-COPY --from=ghcr.io/astral-sh/uv:0.7.21 /uv /uvx /bin/
+COPY --from=ghcr.io/astral-sh/uv:0.7.22 /uv /uvx /bin/
```
!!! tip
@@ -131,7 +131,7 @@ COPY --from=ghcr.io/astral-sh/uv:0.7.21 /uv /uvx /bin/
Or, with the installer:
```dockerfile
-ADD https://astral.sh/uv/0.7.21/install.sh /uv-installer.sh
+ADD https://astral.sh/uv/0.7.22/install.sh /uv-installer.sh
```
### Installing a project
@@ -557,5 +557,5 @@ Verified OK
!!! tip
These examples use `latest`, but best practice is to verify the attestation for a specific
- version tag, e.g., `ghcr.io/astral-sh/uv:0.7.21`, or (even better) the specific image digest,
+ version tag, e.g., `ghcr.io/astral-sh/uv:0.7.22`, or (even better) the specific image digest,
such as `ghcr.io/astral-sh/uv:0.5.27@sha256:5adf09a5a526f380237408032a9308000d14d5947eafa687ad6c6a2476787b4f`.
diff --git a/docs/guides/integration/github.md b/docs/guides/integration/github.md
index d206febd1..956b47660 100644
--- a/docs/guides/integration/github.md
+++ b/docs/guides/integration/github.md
@@ -47,7 +47,7 @@ jobs:
uses: astral-sh/setup-uv@v6
with:
# Install a specific version of uv.
- version: "0.7.21"
+ version: "0.7.22"
```
## Setting up Python
diff --git a/docs/guides/integration/pre-commit.md b/docs/guides/integration/pre-commit.md
index 912ff0213..d2598fed8 100644
--- a/docs/guides/integration/pre-commit.md
+++ b/docs/guides/integration/pre-commit.md
@@ -19,7 +19,7 @@ To make sure your `uv.lock` file is up to date even if your `pyproject.toml` fil
repos:
- repo: https://github.com/astral-sh/uv-pre-commit
# uv version.
- rev: 0.7.21
+ rev: 0.7.22
hooks:
- id: uv-lock
```
@@ -30,7 +30,7 @@ To keep a `requirements.txt` file in sync with your `uv.lock` file:
repos:
- repo: https://github.com/astral-sh/uv-pre-commit
# uv version.
- rev: 0.7.21
+ rev: 0.7.22
hooks:
- id: uv-export
```
@@ -41,7 +41,7 @@ To compile requirements files:
repos:
- repo: https://github.com/astral-sh/uv-pre-commit
# uv version.
- rev: 0.7.21
+ rev: 0.7.22
hooks:
# Compile requirements
- id: pip-compile
@@ -54,7 +54,7 @@ To compile alternative requirements files, modify `args` and `files`:
repos:
- repo: https://github.com/astral-sh/uv-pre-commit
# uv version.
- rev: 0.7.21
+ rev: 0.7.22
hooks:
# Compile requirements
- id: pip-compile
@@ -68,7 +68,7 @@ To run the hook over multiple files at the same time, add additional entries:
repos:
- repo: https://github.com/astral-sh/uv-pre-commit
# uv version.
- rev: 0.7.21
+ rev: 0.7.22
hooks:
# Compile requirements
- id: pip-compile
diff --git a/pyproject.toml b/pyproject.toml
index f3c9c4f64..a079d53b2 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -4,7 +4,7 @@ build-backend = "maturin"
[project]
name = "uv"
-version = "0.7.21"
+version = "0.7.22"
description = "An extremely fast Python package and project manager, written in Rust."
authors = [{ name = "Astral Software Inc.", email = "hey@astral.sh" }]
requires-python = ">=3.8"
From 868ecd7b3a4855b3b84ec121a826fb218b843084 Mon Sep 17 00:00:00 2001
From: Zanie Blue
Date: Thu, 17 Jul 2025 12:33:43 -0500
Subject: [PATCH 058/130] Add support for toggling Python bin and registry
install options via env vars (#14662)
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
Adds environment variables for
https://github.com/astral-sh/uv/pull/14612 and
https://github.com/astral-sh/uv/pull/14614
We can't use the Clap `BoolishValueParser` here, and the reasoning is a
little hard to explain. If we used `UV_PYTHON_INSTALL_NO_BIN`, as is our
typical pattern, it'd work, but here we allow opt-in to hard errors with
`UV_PYTHON_INSTALL_BIN=1` and I don't think we should have both
`UV_PYTHON_INSTALL_BIN` and `UV_PYTHON_INSTALL_NO_BIN`.
Consequently, this pull request introduces a new `EnvironmentOptions`
abstraction which allows us to express semantics that Clap cannot —
which we probably want anyway because we have an increasing number of
environment variables we're parsing downstream, e.g., #14544 and #14369.
---
crates/uv-cli/src/lib.rs | 8 +++
crates/uv-settings/src/lib.rs | 82 ++++++++++++++++++++++++++++
crates/uv-static/src/env_vars.rs | 6 ++
crates/uv/src/lib.rs | 7 ++-
crates/uv/src/settings.rs | 15 +++--
crates/uv/tests/it/help.rs | 8 ++-
crates/uv/tests/it/python_install.rs | 20 ++++++-
docs/reference/cli.md | 6 +-
docs/reference/environment.md | 8 +++
9 files changed, 147 insertions(+), 13 deletions(-)
diff --git a/crates/uv-cli/src/lib.rs b/crates/uv-cli/src/lib.rs
index a846aec59..94b79558d 100644
--- a/crates/uv-cli/src/lib.rs
+++ b/crates/uv-cli/src/lib.rs
@@ -4959,11 +4959,15 @@ pub struct PythonInstallArgs {
/// This is the default behavior. If this flag is provided explicitly, uv will error if the
/// executable cannot be installed.
///
+ /// This can also be set with `UV_PYTHON_INSTALL_BIN=1`.
+ ///
/// See `UV_PYTHON_BIN_DIR` to customize the target directory.
#[arg(long, overrides_with("no_bin"), hide = true)]
pub bin: bool,
/// Do not install a Python executable into the `bin` directory.
+ ///
+ /// This can also be set with `UV_PYTHON_INSTALL_BIN=0`.
#[arg(long, overrides_with("bin"), conflicts_with("default"))]
pub no_bin: bool,
@@ -4971,10 +4975,14 @@ pub struct PythonInstallArgs {
///
/// This is the default behavior on Windows. If this flag is provided explicitly, uv will error if the
/// registry entry cannot be created.
+ ///
+ /// This can also be set with `UV_PYTHON_INSTALL_REGISTRY=1`.
#[arg(long, overrides_with("no_registry"), hide = true)]
pub registry: bool,
/// Do not register the Python installation in the Windows registry.
+ ///
+ /// This can also be set with `UV_PYTHON_INSTALL_REGISTRY=0`.
#[arg(long, overrides_with("registry"))]
pub no_registry: bool,
diff --git a/crates/uv-settings/src/lib.rs b/crates/uv-settings/src/lib.rs
index d676cc060..cad600cfc 100644
--- a/crates/uv-settings/src/lib.rs
+++ b/crates/uv-settings/src/lib.rs
@@ -3,6 +3,7 @@ use std::path::{Path, PathBuf};
use uv_dirs::{system_config_file, user_config_dir};
use uv_fs::Simplified;
+use uv_static::EnvVars;
use uv_warnings::warn_user;
pub use crate::combine::*;
@@ -246,4 +247,85 @@ pub enum Error {
#[error("Failed to parse: `{}`. The `{}` field is not allowed in a `uv.toml` file. `{}` is only applicable in the context of a project, and should be placed in a `pyproject.toml` file instead.", _0.user_display(), _1, _1)]
PyprojectOnlyField(PathBuf, &'static str),
+
+ #[error("Failed to parse environment variable `{name}` with invalid value `{value}`: {err}")]
+ InvalidEnvironmentVariable {
+ name: String,
+ value: String,
+ err: String,
+ },
+}
+
+/// Options loaded from environment variables.
+///
+/// This is currently a subset of all respected environment variables, most are parsed via Clap at
+/// the CLI level, however there are limited semantics in that context.
+#[derive(Debug, Clone)]
+pub struct EnvironmentOptions {
+ pub python_install_bin: Option,
+ pub python_install_registry: Option,
+}
+
+impl EnvironmentOptions {
+ /// Create a new [`EnvironmentOptions`] from environment variables.
+ pub fn new() -> Result {
+ Ok(Self {
+ python_install_bin: parse_boolish_environment_variable(EnvVars::UV_PYTHON_INSTALL_BIN)?,
+ python_install_registry: parse_boolish_environment_variable(
+ EnvVars::UV_PYTHON_INSTALL_REGISTRY,
+ )?,
+ })
+ }
+}
+
+/// Parse a boolean environment variable.
+///
+/// Adapted from Clap's `BoolishValueParser` which is dual licensed under the MIT and Apache-2.0.
+fn parse_boolish_environment_variable(name: &'static str) -> Result, Error> {
+ // See `clap_builder/src/util/str_to_bool.rs`
+ // We want to match Clap's accepted values
+
+ // True values are `y`, `yes`, `t`, `true`, `on`, and `1`.
+ const TRUE_LITERALS: [&str; 6] = ["y", "yes", "t", "true", "on", "1"];
+
+ // False values are `n`, `no`, `f`, `false`, `off`, and `0`.
+ const FALSE_LITERALS: [&str; 6] = ["n", "no", "f", "false", "off", "0"];
+
+ // Converts a string literal representation of truth to true or false.
+ //
+ // `false` values are `n`, `no`, `f`, `false`, `off`, and `0` (case insensitive).
+ //
+ // Any other value will be considered as `true`.
+ fn str_to_bool(val: impl AsRef) -> Option {
+ let pat: &str = &val.as_ref().to_lowercase();
+ if TRUE_LITERALS.contains(&pat) {
+ Some(true)
+ } else if FALSE_LITERALS.contains(&pat) {
+ Some(false)
+ } else {
+ None
+ }
+ }
+
+ let Some(value) = std::env::var_os(name) else {
+ return Ok(None);
+ };
+
+ let Some(value) = value.to_str() else {
+ return Err(Error::InvalidEnvironmentVariable {
+ name: name.to_string(),
+ value: value.to_string_lossy().to_string(),
+ err: "expected a valid UTF-8 string".to_string(),
+ });
+ };
+
+ let Some(value) = str_to_bool(value) else {
+ return Err(Error::InvalidEnvironmentVariable {
+ name: name.to_string(),
+ value: value.to_string(),
+ err: "expected a boolish value".to_string(),
+ });
+ };
+
+ Ok(Some(value))
}
diff --git a/crates/uv-static/src/env_vars.rs b/crates/uv-static/src/env_vars.rs
index 216228ff2..58458e8ca 100644
--- a/crates/uv-static/src/env_vars.rs
+++ b/crates/uv-static/src/env_vars.rs
@@ -269,6 +269,12 @@ impl EnvVars {
/// Specifies the directory for storing managed Python installations.
pub const UV_PYTHON_INSTALL_DIR: &'static str = "UV_PYTHON_INSTALL_DIR";
+ /// Whether to install the Python executable into the `UV_PYTHON_BIN_DIR` directory.
+ pub const UV_PYTHON_INSTALL_BIN: &'static str = "UV_PYTHON_INSTALL_BIN";
+
+ /// Whether to install the Python executable into the Windows registry.
+ pub const UV_PYTHON_INSTALL_REGISTRY: &'static str = "UV_PYTHON_INSTALL_REGISTRY";
+
/// Managed Python installations information is hardcoded in the `uv` binary.
///
/// This variable can be set to a URL pointing to JSON to use as a list for Python installations.
diff --git a/crates/uv/src/lib.rs b/crates/uv/src/lib.rs
index 384f48ac4..995738638 100644
--- a/crates/uv/src/lib.rs
+++ b/crates/uv/src/lib.rs
@@ -39,7 +39,7 @@ use uv_python::PythonRequest;
use uv_requirements::RequirementsSource;
use uv_requirements_txt::RequirementsTxtRequirement;
use uv_scripts::{Pep723Error, Pep723Item, Pep723ItemRef, Pep723Metadata, Pep723Script};
-use uv_settings::{Combine, FilesystemOptions, Options};
+use uv_settings::{Combine, EnvironmentOptions, FilesystemOptions, Options};
use uv_static::EnvVars;
use uv_warnings::{warn_user, warn_user_once};
use uv_workspace::{DiscoveryOptions, Workspace, WorkspaceCache};
@@ -304,6 +304,9 @@ async fn run(mut cli: Cli) -> Result {
.map(FilesystemOptions::from)
.combine(filesystem);
+ // Load environment variables not handled by Clap
+ let environment = EnvironmentOptions::new()?;
+
// Resolve the global settings.
let globals = GlobalSettings::resolve(&cli.top_level.global_args, filesystem.as_ref());
@@ -1391,7 +1394,7 @@ async fn run(mut cli: Cli) -> Result {
command: PythonCommand::Install(args),
}) => {
// Resolve the settings from the command-line arguments and workspace configuration.
- let args = settings::PythonInstallSettings::resolve(args, filesystem);
+ let args = settings::PythonInstallSettings::resolve(args, filesystem, environment);
show_settings!(args);
// TODO(john): If we later want to support `--upgrade`, we need to replace this.
let upgrade = false;
diff --git a/crates/uv/src/settings.rs b/crates/uv/src/settings.rs
index b221f0f5d..b246f228f 100644
--- a/crates/uv/src/settings.rs
+++ b/crates/uv/src/settings.rs
@@ -38,8 +38,8 @@ use uv_resolver::{
AnnotationStyle, DependencyMode, ExcludeNewer, ForkStrategy, PrereleaseMode, ResolutionMode,
};
use uv_settings::{
- Combine, FilesystemOptions, Options, PipOptions, PublishOptions, PythonInstallMirrors,
- ResolverInstallerOptions, ResolverOptions,
+ Combine, EnvironmentOptions, FilesystemOptions, Options, PipOptions, PublishOptions,
+ PythonInstallMirrors, ResolverInstallerOptions, ResolverOptions,
};
use uv_static::EnvVars;
use uv_torch::TorchMode;
@@ -944,7 +944,11 @@ pub(crate) struct PythonInstallSettings {
impl PythonInstallSettings {
/// Resolve the [`PythonInstallSettings`] from the CLI and filesystem configuration.
#[allow(clippy::needless_pass_by_value)]
- pub(crate) fn resolve(args: PythonInstallArgs, filesystem: Option) -> Self {
+ pub(crate) fn resolve(
+ args: PythonInstallArgs,
+ filesystem: Option,
+ environment: EnvironmentOptions,
+ ) -> Self {
let options = filesystem.map(FilesystemOptions::into_options);
let (python_mirror, pypy_mirror, python_downloads_json_url) = match options {
Some(options) => (
@@ -979,8 +983,9 @@ impl PythonInstallSettings {
targets,
reinstall,
force,
- bin: flag(bin, no_bin, "bin"),
- registry: flag(registry, no_registry, "registry"),
+ bin: flag(bin, no_bin, "bin").or(environment.python_install_bin),
+ registry: flag(registry, no_registry, "registry")
+ .or(environment.python_install_registry),
python_install_mirror: python_mirror,
pypy_install_mirror: pypy_mirror,
python_downloads_json_url,
diff --git a/crates/uv/tests/it/help.rs b/crates/uv/tests/it/help.rs
index 39de4c6f9..d9353f7c3 100644
--- a/crates/uv/tests/it/help.rs
+++ b/crates/uv/tests/it/help.rs
@@ -506,10 +506,14 @@ fn help_subsubcommand() {
[env: UV_PYTHON_INSTALL_DIR=]
--no-bin
- Do not install a Python executable into the `bin` directory
+ Do not install a Python executable into the `bin` directory.
+
+ This can also be set with `UV_PYTHON_INSTALL_BIN=0`.
--no-registry
- Do not register the Python installation in the Windows registry
+ Do not register the Python installation in the Windows registry.
+
+ This can also be set with `UV_PYTHON_INSTALL_REGISTRY=0`.
--mirror
Set the URL to use as the source for downloading Python installations.
diff --git a/crates/uv/tests/it/python_install.rs b/crates/uv/tests/it/python_install.rs
index 62b3254b8..50b0b3cf5 100644
--- a/crates/uv/tests/it/python_install.rs
+++ b/crates/uv/tests/it/python_install.rs
@@ -445,6 +445,15 @@ fn python_install_preview() {
exit_code: 1
----- stdout -----
+ ----- stderr -----
+ error: Failed to install executable for cpython-3.13.5-[PLATFORM]
+ Caused by: Executable already exists at `[BIN]/python3.13` but is not managed by uv; use `--force` to replace it
+ ");
+ uv_snapshot!(context.filters(), context.python_install().arg("--preview").arg("3.13").env(EnvVars::UV_PYTHON_INSTALL_BIN, "1"), @r"
+ success: false
+ exit_code: 1
+ ----- stdout -----
+
----- stderr -----
error: Failed to install executable for cpython-3.13.5-[PLATFORM]
Caused by: Executable already exists at `[BIN]/python3.13` but is not managed by uv; use `--force` to replace it
@@ -456,6 +465,13 @@ fn python_install_preview() {
exit_code: 0
----- stdout -----
+ ----- stderr -----
+ ");
+ uv_snapshot!(context.filters(), context.python_install().arg("--preview").arg("3.13").env(EnvVars::UV_PYTHON_INSTALL_BIN, "0"), @r"
+ success: true
+ exit_code: 0
+ ----- stdout -----
+
----- stderr -----
");
@@ -643,7 +659,7 @@ fn python_install_preview_upgrade() {
.child(format!("python3.12{}", std::env::consts::EXE_SUFFIX));
// Install 3.12.5
- uv_snapshot!(context.filters(), context.python_install().arg("--preview").arg("3.12.5"), @r###"
+ uv_snapshot!(context.filters(), context.python_install().arg("--preview").arg("3.12.5"), @r"
success: true
exit_code: 0
----- stdout -----
@@ -651,7 +667,7 @@ fn python_install_preview_upgrade() {
----- stderr -----
Installed Python 3.12.5 in [TIME]
+ cpython-3.12.5-[PLATFORM] (python3.12)
- "###);
+ ");
// Installing with a patch version should cause the link to be to the patch installation.
if cfg!(unix) {
diff --git a/docs/reference/cli.md b/docs/reference/cli.md
index 66c46ae0c..5aea00f32 100644
--- a/docs/reference/cli.md
+++ b/docs/reference/cli.md
@@ -2796,7 +2796,8 @@ uv python install [OPTIONS] [TARGETS]...
May also be set with the UV_PYTHON_INSTALL_MIRROR environment variable.
--native-tlsWhether to load TLS certificates from the platform's native certificate store.
By default, uv loads certificates from the bundled webpki-roots crate. The webpki-roots are a reliable set of trust roots from Mozilla, and including them in uv improves portability and performance (especially on macOS).
However, in some cases, you may want to use the platform's native certificate store, especially if you're relying on a corporate trust root (e.g., for a mandatory proxy) that's included in your system's certificate store.
-May also be set with the UV_NATIVE_TLS environment variable.
--no-binDo not install a Python executable into the bin directory
+May also be set with the UV_NATIVE_TLS environment variable.
--no-binDo not install a Python executable into the bin directory.
+This can also be set with UV_PYTHON_INSTALL_BIN=0.
--no-cache , --no-cache-dir, -nAvoid reading from or writing to the cache, instead using a temporary directory for the duration of the operation
May also be set with the UV_NO_CACHE environment variable.
--no-configAvoid discovering configuration files (pyproject.toml, uv.toml).
Normally, configuration files are discovered in the current directory, parent directories, or user configuration directories.
@@ -2805,7 +2806,8 @@ uv python install [OPTIONS] [TARGETS]...
May also be set with the UV_NO_MANAGED_PYTHON environment variable.
--no-progressHide all progress outputs.
For example, spinners or progress bars.
May also be set with the UV_NO_PROGRESS environment variable.
--no-python-downloadsDisable automatic downloads of Python.
---no-registryDo not register the Python installation in the Windows registry
+--no-registryDo not register the Python installation in the Windows registry.
+This can also be set with UV_PYTHON_INSTALL_REGISTRY=0.
--offlineDisable network access.
When disabled, uv will only use locally cached data and locally available files.
May also be set with the UV_OFFLINE environment variable.
--project project Run the command within the given project directory.
diff --git a/docs/reference/environment.md b/docs/reference/environment.md
index 5f06cfd3f..a64869edb 100644
--- a/docs/reference/environment.md
+++ b/docs/reference/environment.md
@@ -376,6 +376,10 @@ This will allow for setting each property of the Python installation, mostly the
Note that currently, only local paths are supported.
+### `UV_PYTHON_INSTALL_BIN`
+
+Whether to install the Python executable into the `UV_PYTHON_BIN_DIR` directory.
+
### `UV_PYTHON_INSTALL_DIR`
Specifies the directory for storing managed Python installations.
@@ -390,6 +394,10 @@ The provided URL will replace `https://github.com/astral-sh/python-build-standal
`https://github.com/astral-sh/python-build-standalone/releases/download/20240713/cpython-3.12.4%2B20240713-aarch64-apple-darwin-install_only.tar.gz`.
Distributions can be read from a local directory by using the `file://` URL scheme.
+### `UV_PYTHON_INSTALL_REGISTRY`
+
+Whether to install the Python executable into the Windows registry.
+
### `UV_PYTHON_PREFERENCE`
Whether uv should prefer system or managed Python versions.
From 35e2f67b5e4529050e615f6e812d0d727afc20d3 Mon Sep 17 00:00:00 2001
From: samypr100 <3933065+samypr100@users.noreply.github.com>
Date: Sat, 21 Jun 2025 15:42:28 -0400
Subject: [PATCH 059/130] feat(docker): set default `UV_TOOL_BIN_DIR` on docker
images (#13391)
Closes #13057
Sets `UV_TOOL_BIN_DIR` to `/usr/local/bin` for all derived images to
allow `uv tool install` to work out of the box.
Note, when the default image user is overwritten (e.g. `USER `) by
a less privileged one, an alternative writable location would now need
to be set by downstream consumers to prevent issues, hence I'm labeling
this as a breaking change for 0.8.x release.
Relates to https://github.com/astral-sh/uv-docker-example/pull/55
Each image was tested to work with uv tool with `UV_TOOL_BIN_DIR` set to
`/usr/local/bin` with the default root user and alternative non-root
users to confirm breaking nature of the change.
---
.github/workflows/build-docker.yml | 1 +
docs/guides/integration/docker.md | 3 +++
2 files changed, 4 insertions(+)
diff --git a/.github/workflows/build-docker.yml b/.github/workflows/build-docker.yml
index 843ee8dfb..3c080b63f 100644
--- a/.github/workflows/build-docker.yml
+++ b/.github/workflows/build-docker.yml
@@ -225,6 +225,7 @@ jobs:
cat < Dockerfile
FROM ${BASE_IMAGE}
COPY --from=${{ env.UV_GHCR_IMAGE }}:latest /uv /uvx /usr/local/bin/
+ ENV UV_TOOL_BIN_DIR="/usr/local/bin"
ENTRYPOINT []
CMD ["/usr/local/bin/uv"]
EOF
diff --git a/docs/guides/integration/docker.md b/docs/guides/integration/docker.md
index 2ea14c9b0..a75228723 100644
--- a/docs/guides/integration/docker.md
+++ b/docs/guides/integration/docker.md
@@ -77,6 +77,9 @@ As with the distroless image, each derived image is published with uv version ta
`ghcr.io/astral-sh/uv:{major}.{minor}.{patch}-{base}` and
`ghcr.io/astral-sh/uv:{major}.{minor}-{base}`, e.g., `ghcr.io/astral-sh/uv:0.7.22-alpine`.
+In addition, starting with `0.8` each derived image also sets `UV_TOOL_BIN_DIR` to `/usr/local/bin`
+to allow `uv tool install` to work as expected with the default user.
+
For more details, see the [GitHub Container](https://github.com/astral-sh/uv/pkgs/container/uv)
page.
From c8925e2541ae451148cdd2d4b12fa7904004dceb Mon Sep 17 00:00:00 2001
From: Zanie Blue
Date: Thu, 26 Jun 2025 12:22:38 -0500
Subject: [PATCH 060/130] Require `--global` for removal of the global Python
pin (#14169)
While reviewing https://github.com/astral-sh/uv/pull/14107, @oconnor663
pointed out a bug where we allow `uv python pin --rm` to delete the
global pin without the `--global` flag. I think that shouldn't be
allowed? I'm not 100% certain though.
---
Cargo.lock | 1 -
crates/uv-python/src/version_files.rs | 16 +++++++++++++
crates/uv/Cargo.toml | 1 -
crates/uv/src/commands/python/pin.rs | 22 ++++++++++++------
crates/uv/tests/it/python_pin.rs | 33 +++++++++++++++++++++++++--
5 files changed, 62 insertions(+), 11 deletions(-)
diff --git a/Cargo.lock b/Cargo.lock
index 8a95f655d..2963b6374 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -4696,7 +4696,6 @@ dependencies = [
"uv-client",
"uv-configuration",
"uv-console",
- "uv-dirs",
"uv-dispatch",
"uv-distribution",
"uv-distribution-filename",
diff --git a/crates/uv-python/src/version_files.rs b/crates/uv-python/src/version_files.rs
index a9cd05b7e..595a18f0f 100644
--- a/crates/uv-python/src/version_files.rs
+++ b/crates/uv-python/src/version_files.rs
@@ -217,6 +217,19 @@ impl PythonVersionFile {
}
}
+ /// Create a new representation of a global Python version file.
+ ///
+ /// Returns [`None`] if the user configuration directory cannot be determined.
+ pub fn global() -> Option {
+ let path = user_uv_config_dir()?.join(PYTHON_VERSION_FILENAME);
+ Some(Self::new(path))
+ }
+
+ /// Returns `true` if the version file is a global version file.
+ pub fn is_global(&self) -> bool {
+ PythonVersionFile::global().is_some_and(|global| self.path() == global.path())
+ }
+
/// Return the first request declared in the file, if any.
pub fn version(&self) -> Option<&PythonRequest> {
self.versions.first()
@@ -260,6 +273,9 @@ impl PythonVersionFile {
/// Update the version file on the file system.
pub async fn write(&self) -> Result<(), std::io::Error> {
debug!("Writing Python versions to `{}`", self.path.display());
+ if let Some(parent) = self.path.parent() {
+ fs_err::tokio::create_dir_all(parent).await?;
+ }
fs::tokio::write(
&self.path,
self.versions
diff --git a/crates/uv/Cargo.toml b/crates/uv/Cargo.toml
index 975495904..ff389f033 100644
--- a/crates/uv/Cargo.toml
+++ b/crates/uv/Cargo.toml
@@ -24,7 +24,6 @@ uv-cli = { workspace = true }
uv-client = { workspace = true }
uv-configuration = { workspace = true }
uv-console = { workspace = true }
-uv-dirs = { workspace = true }
uv-dispatch = { workspace = true }
uv-distribution = { workspace = true }
uv-distribution-filename = { workspace = true }
diff --git a/crates/uv/src/commands/python/pin.rs b/crates/uv/src/commands/python/pin.rs
index f4d10cdfa..0e78e6b5c 100644
--- a/crates/uv/src/commands/python/pin.rs
+++ b/crates/uv/src/commands/python/pin.rs
@@ -9,7 +9,6 @@ use tracing::debug;
use uv_cache::Cache;
use uv_client::BaseClientBuilder;
use uv_configuration::{DependencyGroupsWithDefaults, PreviewMode};
-use uv_dirs::user_uv_config_dir;
use uv_fs::Simplified;
use uv_python::{
EnvironmentPreference, PYTHON_VERSION_FILENAME, PythonDownloads, PythonInstallation,
@@ -72,10 +71,20 @@ pub(crate) async fn pin(
}
bail!("No Python version file found");
};
+
+ if !global && file.is_global() {
+ bail!("No Python version file found; use `--rm --global` to remove the global pin");
+ }
+
fs_err::tokio::remove_file(file.path()).await?;
writeln!(
printer.stdout(),
- "Removed Python version file at `{}`",
+ "Removed {} at `{}`",
+ if global {
+ "global Python pin"
+ } else {
+ "Python version file"
+ },
file.path().user_display()
)?;
return Ok(ExitStatus::Success);
@@ -194,12 +203,11 @@ pub(crate) async fn pin(
let existing = version_file.ok().flatten();
// TODO(zanieb): Allow updating the discovered version file with an `--update` flag.
let new = if global {
- let Some(config_dir) = user_uv_config_dir() else {
- return Err(anyhow::anyhow!("No user-level config directory found."));
+ let Some(new) = PythonVersionFile::global() else {
+ // TODO(zanieb): We should find a nice way to surface that as an error
+ bail!("Failed to determine directory for global Python pin");
};
- fs_err::tokio::create_dir_all(&config_dir).await?;
- PythonVersionFile::new(config_dir.join(PYTHON_VERSION_FILENAME))
- .with_versions(vec![request])
+ new.with_versions(vec![request])
} else {
PythonVersionFile::new(project_dir.join(PYTHON_VERSION_FILENAME))
.with_versions(vec![request])
diff --git a/crates/uv/tests/it/python_pin.rs b/crates/uv/tests/it/python_pin.rs
index cf8849f42..97093831c 100644
--- a/crates/uv/tests/it/python_pin.rs
+++ b/crates/uv/tests/it/python_pin.rs
@@ -847,7 +847,7 @@ fn python_pin_rm() {
error: No Python version file found
");
- // Remove the local pin
+ // Create and remove a local pin
context.python_pin().arg("3.12").assert().success();
uv_snapshot!(context.filters(), context.python_pin().arg("--rm"), @r"
success: true
@@ -884,12 +884,41 @@ fn python_pin_rm() {
.arg("--global")
.assert()
.success();
+
uv_snapshot!(context.filters(), context.python_pin().arg("--rm").arg("--global"), @r"
success: true
exit_code: 0
----- stdout -----
- Removed Python version file at `[UV_USER_CONFIG_DIR]/.python-version`
+ Removed global Python pin at `[UV_USER_CONFIG_DIR]/.python-version`
----- stderr -----
");
+
+ // Add the global pin again
+ context
+ .python_pin()
+ .arg("3.12")
+ .arg("--global")
+ .assert()
+ .success();
+
+ // Remove the local pin
+ uv_snapshot!(context.filters(), context.python_pin().arg("--rm"), @r"
+ success: true
+ exit_code: 0
+ ----- stdout -----
+ Removed Python version file at `.python-version`
+
+ ----- stderr -----
+ ");
+
+ // The global pin should not be removed without `--global`
+ uv_snapshot!(context.filters(), context.python_pin().arg("--rm"), @r"
+ success: false
+ exit_code: 2
+ ----- stdout -----
+
+ ----- stderr -----
+ error: No Python version file found; use `--rm --global` to remove the global pin
+ ");
}
From e4c04af32d297693801582999178775b7627f9d8 Mon Sep 17 00:00:00 2001
From: Charlie Marsh
Date: Thu, 26 Jun 2025 22:45:45 -0400
Subject: [PATCH 061/130] Bump `--python-platform linux` to `manylinux_2_28`
(#14300)
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
Right now, `--python-platform linux` to defaults to `manylinux_2_17`.
Defaulting to `manylinux_2_17` causes some problems for users, since it
means we can't use (e.g.) `manylinux_2_28` wheels, and end up having to
build from source.
cibuildwheel made `manylinux_2_28` their default in
https://github.com/pypa/cibuildwheel/pull/1988, and there's a lot of
discussion in https://github.com/pypa/cibuildwheel/issues/1772 and
https://github.com/pypa/cibuildwheel/issues/2047. In short, the
`manylinux_2014` image is EOL, and the vast majority of consumers now
run at least glibc 2.28 (https://mayeut.github.io/manylinux-timeline/):

Note that this only changes the _default_. Users can still compile
against `manylinux_2_17` by specifying it.
---
crates/uv-configuration/src/target_triple.rs | 8 ++++----
crates/uv/tests/it/pip_compile.rs | 2 +-
docs/reference/cli.md | 16 ++++++++--------
uv.schema.json | 4 ++--
4 files changed, 15 insertions(+), 15 deletions(-)
diff --git a/crates/uv-configuration/src/target_triple.rs b/crates/uv-configuration/src/target_triple.rs
index 81499deff..842fb39a7 100644
--- a/crates/uv-configuration/src/target_triple.rs
+++ b/crates/uv-configuration/src/target_triple.rs
@@ -33,7 +33,7 @@ pub enum TargetTriple {
#[serde(rename = "i686-pc-windows-msvc")]
I686PcWindowsMsvc,
- /// An x86 Linux target. Equivalent to `x86_64-manylinux_2_17`.
+ /// An x86 Linux target. Equivalent to `x86_64-manylinux_2_28`.
#[cfg_attr(feature = "clap", value(name = "x86_64-unknown-linux-gnu"))]
#[serde(rename = "x86_64-unknown-linux-gnu")]
#[serde(alias = "x8664-unknown-linux-gnu")]
@@ -56,7 +56,7 @@ pub enum TargetTriple {
#[serde(alias = "x8664-apple-darwin")]
X8664AppleDarwin,
- /// An ARM64 Linux target. Equivalent to `aarch64-manylinux_2_17`.
+ /// An ARM64 Linux target. Equivalent to `aarch64-manylinux_2_28`.
#[cfg_attr(feature = "clap", value(name = "aarch64-unknown-linux-gnu"))]
#[serde(rename = "aarch64-unknown-linux-gnu")]
Aarch64UnknownLinuxGnu,
@@ -240,7 +240,7 @@ impl TargetTriple {
Self::Linux | Self::X8664UnknownLinuxGnu => Platform::new(
Os::Manylinux {
major: 2,
- minor: 17,
+ minor: 28,
},
Arch::X86_64,
),
@@ -262,7 +262,7 @@ impl TargetTriple {
Self::Aarch64UnknownLinuxGnu => Platform::new(
Os::Manylinux {
major: 2,
- minor: 17,
+ minor: 28,
},
Arch::Aarch64,
),
diff --git a/crates/uv/tests/it/pip_compile.rs b/crates/uv/tests/it/pip_compile.rs
index b99be1296..f04c16b86 100644
--- a/crates/uv/tests/it/pip_compile.rs
+++ b/crates/uv/tests/it/pip_compile.rs
@@ -14728,7 +14728,7 @@ fn invalid_platform() -> Result<()> {
uv_snapshot!(context
.pip_compile()
.arg("--python-platform")
- .arg("linux")
+ .arg("x86_64-manylinux_2_17")
.arg("requirements.in"), @r"
success: false
exit_code: 1
diff --git a/docs/reference/cli.md b/docs/reference/cli.md
index 5aea00f32..aa6213eff 100644
--- a/docs/reference/cli.md
+++ b/docs/reference/cli.md
@@ -1736,10 +1736,10 @@ interpreter. Use --universal to display the tree for all platforms,
macos: An alias for aarch64-apple-darwin, the default target for macOS
x86_64-pc-windows-msvc: A 64-bit x86 Windows target
i686-pc-windows-msvc: A 32-bit x86 Windows target
-x86_64-unknown-linux-gnu: An x86 Linux target. Equivalent to x86_64-manylinux_2_17
+x86_64-unknown-linux-gnu: An x86 Linux target. Equivalent to x86_64-manylinux_2_28
aarch64-apple-darwin: An ARM-based macOS target, as seen on Apple Silicon devices
x86_64-apple-darwin: An x86 macOS target
-aarch64-unknown-linux-gnu: An ARM64 Linux target. Equivalent to aarch64-manylinux_2_17
+aarch64-unknown-linux-gnu: An ARM64 Linux target. Equivalent to aarch64-manylinux_2_28
aarch64-unknown-linux-musl: An ARM64 Linux target
x86_64-unknown-linux-musl: An x86_64 Linux target
x86_64-manylinux2014: An x86_64 target for the manylinux2014 platform. Equivalent to x86_64-manylinux_2_17
@@ -3490,10 +3490,10 @@ by --python-version.
macos: An alias for aarch64-apple-darwin, the default target for macOS
x86_64-pc-windows-msvc: A 64-bit x86 Windows target
i686-pc-windows-msvc: A 32-bit x86 Windows target
-x86_64-unknown-linux-gnu: An x86 Linux target. Equivalent to x86_64-manylinux_2_17
+x86_64-unknown-linux-gnu: An x86 Linux target. Equivalent to x86_64-manylinux_2_28
aarch64-apple-darwin: An ARM-based macOS target, as seen on Apple Silicon devices
x86_64-apple-darwin: An x86 macOS target
-aarch64-unknown-linux-gnu: An ARM64 Linux target. Equivalent to aarch64-manylinux_2_17
+aarch64-unknown-linux-gnu: An ARM64 Linux target. Equivalent to aarch64-manylinux_2_28
aarch64-unknown-linux-musl: An ARM64 Linux target
x86_64-unknown-linux-musl: An x86_64 Linux target
x86_64-manylinux2014: An x86_64 target for the manylinux2014 platform. Equivalent to x86_64-manylinux_2_17
@@ -3747,10 +3747,10 @@ be used with caution, as it can modify the system Python installation.
macos: An alias for aarch64-apple-darwin, the default target for macOS
x86_64-pc-windows-msvc: A 64-bit x86 Windows target
i686-pc-windows-msvc: A 32-bit x86 Windows target
-x86_64-unknown-linux-gnu: An x86 Linux target. Equivalent to x86_64-manylinux_2_17
+x86_64-unknown-linux-gnu: An x86 Linux target. Equivalent to x86_64-manylinux_2_28
aarch64-apple-darwin: An ARM-based macOS target, as seen on Apple Silicon devices
x86_64-apple-darwin: An x86 macOS target
-aarch64-unknown-linux-gnu: An ARM64 Linux target. Equivalent to aarch64-manylinux_2_17
+aarch64-unknown-linux-gnu: An ARM64 Linux target. Equivalent to aarch64-manylinux_2_28
aarch64-unknown-linux-musl: An ARM64 Linux target
x86_64-unknown-linux-musl: An x86_64 Linux target
x86_64-manylinux2014: An x86_64 target for the manylinux2014 platform. Equivalent to x86_64-manylinux_2_17
@@ -4029,10 +4029,10 @@ should be used with caution, as it can modify the system Python installation.macos: An alias for aarch64-apple-darwin, the default target for macOS
x86_64-pc-windows-msvc: A 64-bit x86 Windows target
i686-pc-windows-msvc: A 32-bit x86 Windows target
-x86_64-unknown-linux-gnu: An x86 Linux target. Equivalent to x86_64-manylinux_2_17
+x86_64-unknown-linux-gnu: An x86 Linux target. Equivalent to x86_64-manylinux_2_28
aarch64-apple-darwin: An ARM-based macOS target, as seen on Apple Silicon devices
x86_64-apple-darwin: An x86 macOS target
-aarch64-unknown-linux-gnu: An ARM64 Linux target. Equivalent to aarch64-manylinux_2_17
+aarch64-unknown-linux-gnu: An ARM64 Linux target. Equivalent to aarch64-manylinux_2_28
aarch64-unknown-linux-musl: An ARM64 Linux target
x86_64-unknown-linux-musl: An x86_64 Linux target
x86_64-manylinux2014: An x86_64 target for the manylinux2014 platform. Equivalent to x86_64-manylinux_2_17
diff --git a/uv.schema.json b/uv.schema.json
index e418f37f0..ba89f65f4 100644
--- a/uv.schema.json
+++ b/uv.schema.json
@@ -2114,7 +2114,7 @@
"const": "i686-pc-windows-msvc"
},
{
- "description": "An x86 Linux target. Equivalent to `x86_64-manylinux_2_17`.",
+ "description": "An x86 Linux target. Equivalent to `x86_64-manylinux_2_28`.",
"type": "string",
"const": "x86_64-unknown-linux-gnu"
},
@@ -2129,7 +2129,7 @@
"const": "x86_64-apple-darwin"
},
{
- "description": "An ARM64 Linux target. Equivalent to `aarch64-manylinux_2_17`.",
+ "description": "An ARM64 Linux target. Equivalent to `aarch64-manylinux_2_28`.",
"type": "string",
"const": "aarch64-unknown-linux-gnu"
},
From c3d7d3899c435d528d34f242a3750aeed1bb8c50 Mon Sep 17 00:00:00 2001
From: Charlie Marsh
Date: Thu, 10 Jul 2025 22:05:49 -0400
Subject: [PATCH 062/130] Default to `--workspace` when adding subdirectories
(#14529)
If `--workspace` is provided, we add all paths as workspace members.
If `--no-workspace` is provided, we add all paths as direct path
dependencies.
If neither is provided, then we add any paths that are under the
workspace root as workspace members, and the rest as direct path
dependencies.
Closes #14524.
---
crates/uv-cli/src/lib.rs | 15 +-
crates/uv/src/commands/project/add.rs | 80 +++--
crates/uv/src/settings.rs | 5 +-
crates/uv/tests/it/edit.rs | 454 +++++++++++++++++++++++++-
docs/reference/cli.md | 10 +-
5 files changed, 522 insertions(+), 42 deletions(-)
diff --git a/crates/uv-cli/src/lib.rs b/crates/uv-cli/src/lib.rs
index 94b79558d..4c01fd780 100644
--- a/crates/uv-cli/src/lib.rs
+++ b/crates/uv-cli/src/lib.rs
@@ -3726,10 +3726,19 @@ pub struct AddArgs {
/// Add the dependency as a workspace member.
///
- /// When used with a path dependency, the package will be added to the workspace's `members`
- /// list in the root `pyproject.toml` file.
- #[arg(long)]
+ /// By default, uv will add path dependencies that are within the workspace directory
+ /// as workspace members. When used with a path dependency, the package will be added
+ /// to the workspace's `members` list in the root `pyproject.toml` file.
+ #[arg(long, overrides_with = "no_workspace")]
pub workspace: bool,
+
+ /// Don't add the dependency as a workspace member.
+ ///
+ /// By default, when adding a dependency that's a local path and is within the workspace
+ /// directory, uv will add it as a workspace member; pass `--no-workspace` to add the package
+ /// as direct path dependency instead.
+ #[arg(long, overrides_with = "workspace")]
+ pub no_workspace: bool,
}
#[derive(Args)]
diff --git a/crates/uv/src/commands/project/add.rs b/crates/uv/src/commands/project/add.rs
index d65866483..28cc2dcd5 100644
--- a/crates/uv/src/commands/project/add.rs
+++ b/crates/uv/src/commands/project/add.rs
@@ -83,7 +83,7 @@ pub(crate) async fn add(
extras_of_dependency: Vec,
package: Option,
python: Option,
- workspace: bool,
+ workspace: Option,
install_mirrors: PythonInstallMirrors,
settings: ResolverInstallerSettings,
network_settings: NetworkSettings,
@@ -497,16 +497,41 @@ pub(crate) async fn add(
// Track modification status, for reverts.
let mut modified = false;
- // If `--workspace` is provided, add any members to the `workspace` section of the
+ // Determine whether to use workspace mode.
+ let use_workspace = match workspace {
+ Some(workspace) => workspace,
+ None => {
+ // Check if we're in a project (not a script), and if any requirements are path
+ // dependencies within the workspace.
+ if let AddTarget::Project(ref project, _) = target {
+ let workspace_root = project.workspace().install_path();
+ requirements.iter().any(|req| {
+ if let RequirementSource::Directory { install_path, .. } = &req.source {
+ let absolute_path = if install_path.is_absolute() {
+ install_path.to_path_buf()
+ } else {
+ project.root().join(install_path)
+ };
+ absolute_path.starts_with(workspace_root)
+ } else {
+ false
+ }
+ })
+ } else {
+ false
+ }
+ }
+ };
+
+ // If workspace mode is enabled, add any members to the `workspace` section of the
// `pyproject.toml` file.
- if workspace {
+ if use_workspace {
let AddTarget::Project(project, python_target) = target else {
unreachable!("`--workspace` and `--script` are conflicting options");
};
- let workspace = project.workspace();
let mut toml = PyProjectTomlMut::from_toml(
- &workspace.pyproject_toml().raw,
+ &project.workspace().pyproject_toml().raw,
DependencyTarget::PyProjectToml,
)?;
@@ -519,21 +544,32 @@ pub(crate) async fn add(
project.root().join(install_path)
};
- // Check if the path is not already included in the workspace.
- if !workspace.includes(&absolute_path)? {
- let relative_path = absolute_path
- .strip_prefix(workspace.install_path())
- .unwrap_or(&absolute_path);
-
- toml.add_workspace(relative_path)?;
- modified |= true;
-
- writeln!(
- printer.stderr(),
- "Added `{}` to workspace members",
- relative_path.user_display().cyan()
- )?;
+ // Either `--workspace` was provided explicitly, or it was omitted but the path is
+ // within the workspace root.
+ let use_workspace = workspace.unwrap_or_else(|| {
+ absolute_path.starts_with(project.workspace().install_path())
+ });
+ if !use_workspace {
+ continue;
}
+
+ // If the project is already a member of the workspace, skip it.
+ if project.workspace().includes(&absolute_path)? {
+ continue;
+ }
+
+ let relative_path = absolute_path
+ .strip_prefix(project.workspace().install_path())
+ .unwrap_or(&absolute_path);
+
+ toml.add_workspace(relative_path)?;
+ modified |= true;
+
+ writeln!(
+ printer.stderr(),
+ "Added `{}` to workspace members",
+ relative_path.user_display().cyan()
+ )?;
}
}
@@ -542,7 +578,7 @@ pub(crate) async fn add(
target = if modified {
let workspace_content = toml.to_string();
fs_err::write(
- workspace.install_path().join("pyproject.toml"),
+ project.workspace().install_path().join("pyproject.toml"),
&workspace_content,
)?;
@@ -747,13 +783,13 @@ fn edits(
.and_then(|tool| tool.uv.as_ref())
.and_then(|uv| uv.sources.as_ref())
.map(ToolUvSources::inner);
- let workspace = project
+ let is_workspace_member = project
.workspace()
.packages()
.contains_key(&requirement.name);
resolve_requirement(
requirement,
- workspace,
+ is_workspace_member,
editable,
index.cloned(),
rev.map(ToString::to_string),
diff --git a/crates/uv/src/settings.rs b/crates/uv/src/settings.rs
index b246f228f..bf3bca4a4 100644
--- a/crates/uv/src/settings.rs
+++ b/crates/uv/src/settings.rs
@@ -1351,7 +1351,7 @@ pub(crate) struct AddSettings {
pub(crate) package: Option,
pub(crate) script: Option,
pub(crate) python: Option,
- pub(crate) workspace: bool,
+ pub(crate) workspace: Option,
pub(crate) install_mirrors: PythonInstallMirrors,
pub(crate) refresh: Refresh,
pub(crate) indexes: Vec,
@@ -1390,6 +1390,7 @@ impl AddSettings {
script,
python,
workspace,
+ no_workspace,
} = args;
let dependency_type = if let Some(extra) = optional {
@@ -1490,7 +1491,7 @@ impl AddSettings {
package,
script,
python: python.and_then(Maybe::into_option),
- workspace,
+ workspace: flag(workspace, no_workspace, "workspace"),
editable: flag(editable, no_editable, "editable"),
extras: extra.unwrap_or_default(),
refresh: Refresh::from(refresh),
diff --git a/crates/uv/tests/it/edit.rs b/crates/uv/tests/it/edit.rs
index ddaed434f..ccc0cabf2 100644
--- a/crates/uv/tests/it/edit.rs
+++ b/crates/uv/tests/it/edit.rs
@@ -2491,9 +2491,9 @@ fn add_workspace_path() -> Result<()> {
Ok(())
}
-/// Add a path dependency.
+/// Add a path dependency, which should be implicitly added to the workspace.
#[test]
-fn add_path() -> Result<()> {
+fn add_path_implicit_workspace() -> Result<()> {
let context = TestContext::new("3.12");
let workspace = context.temp_dir.child("workspace");
@@ -2533,6 +2533,7 @@ fn add_path() -> Result<()> {
----- stderr -----
Using CPython 3.12.[X] interpreter at: [PYTHON-3.12]
Creating virtual environment at: .venv
+ Added `packages/child` to workspace members
Resolved 2 packages in [TIME]
Prepared 1 package in [TIME]
Installed 1 package in [TIME]
@@ -2545,7 +2546,134 @@ fn add_path() -> Result<()> {
filters => context.filters(),
}, {
assert_snapshot!(
- pyproject_toml, @r###"
+ pyproject_toml, @r#"
+ [project]
+ name = "parent"
+ version = "0.1.0"
+ requires-python = ">=3.12"
+ dependencies = [
+ "child",
+ ]
+
+ [tool.uv.workspace]
+ members = [
+ "packages/child",
+ ]
+
+ [tool.uv.sources]
+ child = { workspace = true }
+ "#
+ );
+ });
+
+ // `uv add` implies a full lock and sync, including development dependencies.
+ let lock = fs_err::read_to_string(workspace.join("uv.lock"))?;
+
+ insta::with_settings!({
+ filters => context.filters(),
+ }, {
+ assert_snapshot!(
+ lock, @r#"
+ version = 1
+ revision = 2
+ requires-python = ">=3.12"
+
+ [options]
+ exclude-newer = "2024-03-25T00:00:00Z"
+
+ [manifest]
+ members = [
+ "child",
+ "parent",
+ ]
+
+ [[package]]
+ name = "child"
+ version = "0.1.0"
+ source = { editable = "packages/child" }
+
+ [[package]]
+ name = "parent"
+ version = "0.1.0"
+ source = { virtual = "." }
+ dependencies = [
+ { name = "child" },
+ ]
+
+ [package.metadata]
+ requires-dist = [{ name = "child", editable = "packages/child" }]
+ "#
+ );
+ });
+
+ // Install from the lockfile.
+ uv_snapshot!(context.filters(), context.sync().arg("--frozen").current_dir(workspace.path()), @r"
+ success: true
+ exit_code: 0
+ ----- stdout -----
+
+ ----- stderr -----
+ Audited 1 package in [TIME]
+ ");
+
+ Ok(())
+}
+
+/// Add a path dependency with `--no-workspace`, which should not be added to the workspace.
+#[test]
+fn add_path_no_workspace() -> Result<()> {
+ let context = TestContext::new("3.12");
+
+ let workspace = context.temp_dir.child("workspace");
+ workspace.child("pyproject.toml").write_str(indoc! {r#"
+ [project]
+ name = "parent"
+ version = "0.1.0"
+ requires-python = ">=3.12"
+ dependencies = []
+ "#})?;
+
+ let child = workspace.child("packages").child("child");
+ child.child("pyproject.toml").write_str(indoc! {r#"
+ [project]
+ name = "child"
+ version = "0.1.0"
+ requires-python = ">=3.12"
+ dependencies = []
+
+ [build-system]
+ requires = ["hatchling"]
+ build-backend = "hatchling.build"
+ "#})?;
+ workspace
+ .child("packages")
+ .child("child")
+ .child("src")
+ .child("child")
+ .child("__init__.py")
+ .touch()?;
+
+ uv_snapshot!(context.filters(), context.add().arg(Path::new("packages").join("child")).current_dir(workspace.path()).arg("--no-workspace"), @r"
+ success: true
+ exit_code: 0
+ ----- stdout -----
+
+ ----- stderr -----
+ Using CPython 3.12.[X] interpreter at: [PYTHON-3.12]
+ Creating virtual environment at: .venv
+ Resolved 2 packages in [TIME]
+ Prepared 1 package in [TIME]
+ Installed 1 package in [TIME]
+ + child==0.1.0 (from file://[TEMP_DIR]/workspace/packages/child)
+ ");
+
+ let pyproject_toml = fs_err::read_to_string(workspace.join("pyproject.toml"))?;
+
+ insta::with_settings!({
+ filters => context.filters(),
+ }, {
+ assert_snapshot!(
+ pyproject_toml, @r#"
[project]
name = "parent"
version = "0.1.0"
@@ -2556,7 +2684,7 @@ fn add_path() -> Result<()> {
[tool.uv.sources]
child = { path = "packages/child" }
- "###
+ "#
);
});
@@ -2607,6 +2735,110 @@ fn add_path() -> Result<()> {
Ok(())
}
+/// Add a path dependency in an adjacent directory, which should not be added to the workspace.
+#[test]
+fn add_path_adjacent_directory() -> Result<()> {
+ let context = TestContext::new("3.12");
+
+ let project = context.temp_dir.child("project");
+ project.child("pyproject.toml").write_str(indoc! {r#"
+ [project]
+ name = "project"
+ version = "0.1.0"
+ requires-python = ">=3.12"
+ dependencies = []
+ "#})?;
+
+ let dependency = context.temp_dir.child("dependency");
+ dependency.child("pyproject.toml").write_str(indoc! {r#"
+ [project]
+ name = "dependency"
+ version = "0.1.0"
+ requires-python = ">=3.12"
+ dependencies = []
+
+ [build-system]
+ requires = ["hatchling"]
+ build-backend = "hatchling.build"
+ "#})?;
+ dependency
+ .child("src")
+ .child("dependency")
+ .child("__init__.py")
+ .touch()?;
+
+ uv_snapshot!(context.filters(), context.add().arg(dependency.path()).current_dir(project.path()), @r"
+ success: true
+ exit_code: 0
+ ----- stdout -----
+
+ ----- stderr -----
+ Using CPython 3.12.[X] interpreter at: [PYTHON-3.12]
+ Creating virtual environment at: .venv
+ Resolved 2 packages in [TIME]
+ Prepared 1 package in [TIME]
+ Installed 1 package in [TIME]
+ + dependency==0.1.0 (from file://[TEMP_DIR]/dependency)
+ ");
+
+ let pyproject_toml = fs_err::read_to_string(project.join("pyproject.toml"))?;
+
+ insta::with_settings!({
+ filters => context.filters(),
+ }, {
+ assert_snapshot!(
+ pyproject_toml, @r#"
+ [project]
+ name = "project"
+ version = "0.1.0"
+ requires-python = ">=3.12"
+ dependencies = [
+ "dependency",
+ ]
+
+ [tool.uv.sources]
+ dependency = { path = "../dependency" }
+ "#
+ );
+ });
+
+ // `uv add` implies a full lock and sync, including development dependencies.
+ let lock = fs_err::read_to_string(project.join("uv.lock"))?;
+
+ insta::with_settings!({
+ filters => context.filters(),
+ }, {
+ assert_snapshot!(
+ lock, @r#"
+ version = 1
+ revision = 2
+ requires-python = ">=3.12"
+
+ [options]
+ exclude-newer = "2024-03-25T00:00:00Z"
+
+ [[package]]
+ name = "dependency"
+ version = "0.1.0"
+ source = { directory = "../dependency" }
+
+ [[package]]
+ name = "project"
+ version = "0.1.0"
+ source = { virtual = "." }
+ dependencies = [
+ { name = "dependency" },
+ ]
+
+ [package.metadata]
+ requires-dist = [{ name = "dependency", directory = "../dependency" }]
+ "#
+ );
+ });
+
+ Ok(())
+}
+
/// Update a requirement, modifying the source and extras.
#[test]
#[cfg(feature = "git")]
@@ -7249,7 +7481,7 @@ fn fail_to_add_revert_project() -> Result<()> {
.child("setup.py")
.write_str("1/0")?;
- uv_snapshot!(context.filters(), context.add().arg("./child"), @r#"
+ uv_snapshot!(context.filters(), context.add().arg("./child").arg("--no-workspace"), @r#"
success: false
exit_code: 1
----- stdout -----
@@ -7351,7 +7583,7 @@ fn fail_to_edit_revert_project() -> Result<()> {
.child("setup.py")
.write_str("1/0")?;
- uv_snapshot!(context.filters(), context.add().arg("./child"), @r#"
+ uv_snapshot!(context.filters(), context.add().arg("./child").arg("--no-workspace"), @r#"
success: false
exit_code: 1
----- stdout -----
@@ -7460,7 +7692,7 @@ fn fail_to_add_revert_workspace_root() -> Result<()> {
.child("setup.py")
.write_str("1/0")?;
- uv_snapshot!(context.filters(), context.add().arg("--workspace").arg("./broken"), @r#"
+ uv_snapshot!(context.filters(), context.add().arg("./broken"), @r#"
success: false
exit_code: 1
----- stdout -----
@@ -7575,7 +7807,7 @@ fn fail_to_add_revert_workspace_member() -> Result<()> {
.child("setup.py")
.write_str("1/0")?;
- uv_snapshot!(context.filters(), context.add().current_dir(&project).arg("--workspace").arg("../broken"), @r#"
+ uv_snapshot!(context.filters(), context.add().current_dir(&project).arg("../broken"), @r#"
success: false
exit_code: 1
----- stdout -----
@@ -12928,12 +13160,12 @@ fn add_path_with_existing_workspace() -> Result<()> {
dependencies = []
"#})?;
- // Add the dependency with `--workspace` flag from the project directory.
+ // Add the dependency from the project directory. It should automatically be added as a
+ // workspace member, since it's in the same directory as the workspace.
uv_snapshot!(context.filters(), context
.add()
.current_dir(&project_dir)
- .arg("../dep")
- .arg("--workspace"), @r"
+ .arg("../dep"), @r"
success: true
exit_code: 0
----- stdout -----
@@ -13044,3 +13276,203 @@ fn add_path_with_workspace() -> Result<()> {
Ok(())
}
+
+/// Add a path dependency within the workspace directory without --workspace flag.
+/// It should automatically be added as a workspace member.
+#[test]
+fn add_path_within_workspace_defaults_to_workspace() -> Result<()> {
+ let context = TestContext::new("3.12");
+
+ let workspace_toml = context.temp_dir.child("pyproject.toml");
+ workspace_toml.write_str(indoc! {r#"
+ [project]
+ name = "parent"
+ version = "0.1.0"
+ requires-python = ">=3.12"
+ dependencies = []
+
+ [tool.uv.workspace]
+ members = []
+ "#})?;
+
+ let dep_dir = context.temp_dir.child("dep");
+ dep_dir.child("pyproject.toml").write_str(indoc! {r#"
+ [project]
+ name = "dep"
+ version = "0.1.0"
+ requires-python = ">=3.12"
+ dependencies = []
+ "#})?;
+
+ // Add the dependency without --workspace flag - it should still be added as workspace member
+ // since it's within the workspace directory.
+ uv_snapshot!(context.filters(), context
+ .add()
+ .arg("./dep"), @r"
+ success: true
+ exit_code: 0
+ ----- stdout -----
+
+ ----- stderr -----
+ Added `dep` to workspace members
+ Resolved 2 packages in [TIME]
+ Audited in [TIME]
+ ");
+
+ let pyproject_toml = context.read("pyproject.toml");
+ assert_snapshot!(
+ pyproject_toml, @r#"
+ [project]
+ name = "parent"
+ version = "0.1.0"
+ requires-python = ">=3.12"
+ dependencies = [
+ "dep",
+ ]
+
+ [tool.uv.workspace]
+ members = [
+ "dep",
+ ]
+
+ [tool.uv.sources]
+ dep = { workspace = true }
+ "#
+ );
+
+ Ok(())
+}
+
+/// Add a path dependency within the workspace directory with --no-workspace flag.
+/// It should be added as a direct path dependency.
+#[test]
+fn add_path_with_no_workspace() -> Result<()> {
+ let context = TestContext::new("3.12");
+
+ let workspace_toml = context.temp_dir.child("pyproject.toml");
+ workspace_toml.write_str(indoc! {r#"
+ [project]
+ name = "parent"
+ version = "0.1.0"
+ requires-python = ">=3.12"
+ dependencies = []
+
+ [tool.uv.workspace]
+ members = []
+ "#})?;
+
+ let dep_dir = context.temp_dir.child("dep");
+ dep_dir.child("pyproject.toml").write_str(indoc! {r#"
+ [project]
+ name = "dep"
+ version = "0.1.0"
+ requires-python = ">=3.12"
+ dependencies = []
+ "#})?;
+
+ // Add the dependency with --no-workspace flag - it should be added as direct path dependency.
+ uv_snapshot!(context.filters(), context
+ .add()
+ .arg("./dep")
+ .arg("--no-workspace"), @r"
+ success: true
+ exit_code: 0
+ ----- stdout -----
+
+ ----- stderr -----
+ Resolved 2 packages in [TIME]
+ Audited in [TIME]
+ ");
+
+ let pyproject_toml = context.read("pyproject.toml");
+ assert_snapshot!(
+ pyproject_toml, @r#"
+ [project]
+ name = "parent"
+ version = "0.1.0"
+ requires-python = ">=3.12"
+ dependencies = [
+ "dep",
+ ]
+
+ [tool.uv.workspace]
+ members = []
+
+ [tool.uv.sources]
+ dep = { path = "dep" }
+ "#
+ );
+
+ Ok(())
+}
+
+/// Add a path dependency outside the workspace directory.
+/// It should be added as a direct path dependency, not a workspace member.
+#[test]
+fn add_path_outside_workspace_no_default() -> Result<()> {
+ let context = TestContext::new("3.12");
+
+ // Create a workspace directory
+ let workspace_dir = context.temp_dir.child("workspace");
+ workspace_dir.create_dir_all()?;
+
+ let workspace_toml = workspace_dir.child("pyproject.toml");
+ workspace_toml.write_str(indoc! {r#"
+ [project]
+ name = "parent"
+ version = "0.1.0"
+ requires-python = ">=3.12"
+ dependencies = []
+
+ [tool.uv.workspace]
+ members = []
+ "#})?;
+
+ // Create a dependency outside the workspace
+ let dep_dir = context.temp_dir.child("external_dep");
+ dep_dir.child("pyproject.toml").write_str(indoc! {r#"
+ [project]
+ name = "dep"
+ version = "0.1.0"
+ requires-python = ">=3.12"
+ dependencies = []
+ "#})?;
+
+ // Add the dependency without --workspace flag - it should be a direct path dependency
+ // since it's outside the workspace directory.
+ uv_snapshot!(context.filters(), context
+ .add()
+ .current_dir(&workspace_dir)
+ .arg("../external_dep"), @r"
+ success: true
+ exit_code: 0
+ ----- stdout -----
+
+ ----- stderr -----
+ Using CPython 3.12.[X] interpreter at: [PYTHON-3.12]
+ Creating virtual environment at: .venv
+ Resolved 2 packages in [TIME]
+ Audited in [TIME]
+ ");
+
+ let pyproject_toml = fs_err::read_to_string(workspace_toml)?;
+ assert_snapshot!(
+ pyproject_toml, @r#"
+ [project]
+ name = "parent"
+ version = "0.1.0"
+ requires-python = ">=3.12"
+ dependencies = [
+ "dep",
+ ]
+
+ [tool.uv.workspace]
+ members = []
+
+ [tool.uv.sources]
+ dep = { path = "../external_dep" }
+ "#
+ );
+
+ Ok(())
+}
diff --git a/docs/reference/cli.md b/docs/reference/cli.md
index aa6213eff..881c96697 100644
--- a/docs/reference/cli.md
+++ b/docs/reference/cli.md
@@ -535,7 +535,9 @@ uv add [OPTIONS] >
May also be set with the UV_NO_PROGRESS environment variable.
--no-python-downloadsDisable automatic downloads of Python.
--no-sourcesIgnore the tool.uv.sources table when resolving dependencies. Used to lock against the standards-compliant, publishable package metadata, as opposed to using any workspace, Git, URL, or local path sources
--no-syncAvoid syncing the virtual environment
-May also be set with the UV_NO_SYNC environment variable.
--offlineDisable network access.
+May also be set with the UV_NO_SYNC environment variable.
--no-workspaceDon't add the dependency as a workspace member.
+By default, when adding a dependency that's a local path and is within the workspace directory, uv will add it as a workspace member; pass --no-workspace to add the package as direct path dependency instead.
+--offlineDisable network access.
When disabled, uv will only use locally cached data and locally available files.
May also be set with the UV_OFFLINE environment variable.
--optional optional Add the requirements to the package's optional dependencies for the specified extra.
The group may then be activated when installing the project with the --extra flag.
@@ -583,7 +585,7 @@ uv add [OPTIONS] >
--verbose , -vUse verbose output.
You can configure fine-grained logging using the RUST_LOG environment variable. (https://docs.rs/tracing-subscriber/latest/tracing_subscriber/filter/struct.EnvFilter.html#directives )
--workspaceAdd the dependency as a workspace member.
-When used with a path dependency, the package will be added to the workspace's members list in the root pyproject.toml file.
+By default, uv will add path dependencies that are within the workspace directory as workspace members. When used with a path dependency, the package will be added to the workspace's members list in the root pyproject.toml file.
## uv remove
@@ -1154,10 +1156,10 @@ environment in the project.
macos: An alias for aarch64-apple-darwin, the default target for macOS
x86_64-pc-windows-msvc: A 64-bit x86 Windows target
i686-pc-windows-msvc: A 32-bit x86 Windows target
-x86_64-unknown-linux-gnu: An x86 Linux target. Equivalent to x86_64-manylinux_2_17
+x86_64-unknown-linux-gnu: An x86 Linux target. Equivalent to x86_64-manylinux_2_28
aarch64-apple-darwin: An ARM-based macOS target, as seen on Apple Silicon devices
x86_64-apple-darwin: An x86 macOS target
-aarch64-unknown-linux-gnu: An ARM64 Linux target. Equivalent to aarch64-manylinux_2_17
+aarch64-unknown-linux-gnu: An ARM64 Linux target. Equivalent to aarch64-manylinux_2_28
aarch64-unknown-linux-musl: An ARM64 Linux target
x86_64-unknown-linux-musl: An x86_64 Linux target
x86_64-manylinux2014: An x86_64 target for the manylinux2014 platform. Equivalent to x86_64-manylinux_2_17
From dff9ced40ab2633d32f7e9bcdcb6484500caf621 Mon Sep 17 00:00:00 2001
From: Charlie Marsh
Date: Thu, 10 Jul 2025 22:20:01 -0400
Subject: [PATCH 063/130] Support conflicting editable settings across groups
(#14197)
If a user specifies `-e /path/to/dir` and `/path/to/dir` in a `uv pip
install` command, we want the editable to "win" (rather than erroring
due to conflicting URLs). Unfortunately, this behavior meant that when
you requested a package as editable and non-editable in conflicting
groups, the editable version was _always_ used. This PR modifies the
requisite types to use `Option` rather than `bool` for the
`editable` field, so we can determine whether a requirement was
explicitly requested as editable, explicitly requested as non-editable,
or not specified (as in the case of `/path/to/dir` in a
`requirements.txt` file). In the latter case, we allow editables to
override the "unspecified" requirement.
If a project includes a path dependency twice, once with `editable =
true` and once without any `editable` annotation, those are now
considered conflicting URLs, and lead to an error, so I've marked this
change as breaking.
Closes https://github.com/astral-sh/uv/issues/14139.
---
crates/uv-distribution-types/src/buildable.rs | 7 +-
crates/uv-distribution-types/src/lib.rs | 12 +-
.../uv-distribution-types/src/requirement.rs | 32 +-
.../src/index/built_wheel_index.rs | 2 +-
.../uv-distribution/src/metadata/lowering.rs | 20 +-
crates/uv-distribution/src/source/mod.rs | 4 +-
crates/uv-installer/src/satisfies.rs | 2 +-
crates/uv-pypi-types/src/parsed_url.rs | 29 +-
crates/uv-requirements-txt/src/lib.rs | 6 +-
crates/uv-requirements-txt/src/requirement.rs | 4 +-
...ts_txt__test__parse-unix-bare-url.txt.snap | 24 +-
...ts_txt__test__parse-unix-editable.txt.snap | 48 ++-
...txt__test__parse-windows-bare-url.txt.snap | 24 +-
...txt__test__parse-windows-editable.txt.snap | 48 ++-
crates/uv-requirements/src/source_tree.rs | 2 +-
.../src/lock/export/pylock_toml.rs | 8 +-
crates/uv-resolver/src/lock/mod.rs | 20 +-
crates/uv-resolver/src/resolver/mod.rs | 1 +
crates/uv-resolver/src/resolver/urls.rs | 9 +-
crates/uv-workspace/src/workspace.rs | 16 +-
crates/uv/src/commands/project/sync.rs | 10 +-
crates/uv/tests/it/lock.rs | 86 +----
crates/uv/tests/it/sync.rs | 332 ++++++++++++++++++
23 files changed, 530 insertions(+), 216 deletions(-)
diff --git a/crates/uv-distribution-types/src/buildable.rs b/crates/uv-distribution-types/src/buildable.rs
index c97bb362f..75997e406 100644
--- a/crates/uv-distribution-types/src/buildable.rs
+++ b/crates/uv-distribution-types/src/buildable.rs
@@ -124,7 +124,10 @@ impl SourceUrl<'_> {
pub fn is_editable(&self) -> bool {
matches!(
self,
- Self::Directory(DirectorySourceUrl { editable: true, .. })
+ Self::Directory(DirectorySourceUrl {
+ editable: Some(true),
+ ..
+ })
)
}
@@ -210,7 +213,7 @@ impl<'a> From<&'a PathSourceDist> for PathSourceUrl<'a> {
pub struct DirectorySourceUrl<'a> {
pub url: &'a DisplaySafeUrl,
pub install_path: Cow<'a, Path>,
- pub editable: bool,
+ pub editable: Option,
}
impl std::fmt::Display for DirectorySourceUrl<'_> {
diff --git a/crates/uv-distribution-types/src/lib.rs b/crates/uv-distribution-types/src/lib.rs
index 1e3ad7eba..0b25669b0 100644
--- a/crates/uv-distribution-types/src/lib.rs
+++ b/crates/uv-distribution-types/src/lib.rs
@@ -343,9 +343,9 @@ pub struct DirectorySourceDist {
/// The absolute path to the distribution which we use for installing.
pub install_path: Box,
/// Whether the package should be installed in editable mode.
- pub editable: bool,
+ pub editable: Option,
/// Whether the package should be built and installed.
- pub r#virtual: bool,
+ pub r#virtual: Option,
/// The URL as it was provided by the user.
pub url: VerbatimUrl,
}
@@ -452,8 +452,8 @@ impl Dist {
name: PackageName,
url: VerbatimUrl,
install_path: &Path,
- editable: bool,
- r#virtual: bool,
+ editable: Option,
+ r#virtual: Option,
) -> Result {
// Convert to an absolute path.
let install_path = path::absolute(install_path)?;
@@ -655,7 +655,7 @@ impl SourceDist {
/// Returns `true` if the distribution is editable.
pub fn is_editable(&self) -> bool {
match self {
- Self::Directory(DirectorySourceDist { editable, .. }) => *editable,
+ Self::Directory(DirectorySourceDist { editable, .. }) => editable.unwrap_or(false),
_ => false,
}
}
@@ -663,7 +663,7 @@ impl SourceDist {
/// Returns `true` if the distribution is virtual.
pub fn is_virtual(&self) -> bool {
match self {
- Self::Directory(DirectorySourceDist { r#virtual, .. }) => *r#virtual,
+ Self::Directory(DirectorySourceDist { r#virtual, .. }) => r#virtual.unwrap_or(false),
_ => false,
}
}
diff --git a/crates/uv-distribution-types/src/requirement.rs b/crates/uv-distribution-types/src/requirement.rs
index 432cc4e12..104cf396c 100644
--- a/crates/uv-distribution-types/src/requirement.rs
+++ b/crates/uv-distribution-types/src/requirement.rs
@@ -429,9 +429,9 @@ pub enum RequirementSource {
/// The absolute path to the distribution which we use for installing.
install_path: Box,
/// For a source tree (a directory), whether to install as an editable.
- editable: bool,
+ editable: Option,
/// For a source tree (a directory), whether the project should be built and installed.
- r#virtual: bool,
+ r#virtual: Option,
/// The PEP 508 style URL in the format
/// `file:///#subdirectory=`.
url: VerbatimUrl,
@@ -545,7 +545,13 @@ impl RequirementSource {
/// Returns `true` if the source is editable.
pub fn is_editable(&self) -> bool {
- matches!(self, Self::Directory { editable: true, .. })
+ matches!(
+ self,
+ Self::Directory {
+ editable: Some(true),
+ ..
+ }
+ )
}
/// Returns `true` if the source is empty.
@@ -792,11 +798,11 @@ impl From for RequirementSourceWire {
r#virtual,
url: _,
} => {
- if editable {
+ if editable.unwrap_or(false) {
Self::Editable {
editable: PortablePathBuf::from(install_path),
}
- } else if r#virtual {
+ } else if r#virtual.unwrap_or(false) {
Self::Virtual {
r#virtual: PortablePathBuf::from(install_path),
}
@@ -908,8 +914,8 @@ impl TryFrom for RequirementSource {
))?;
Ok(Self::Directory {
install_path: directory,
- editable: false,
- r#virtual: false,
+ editable: Some(false),
+ r#virtual: Some(false),
url,
})
}
@@ -920,8 +926,8 @@ impl TryFrom for RequirementSource {
))?;
Ok(Self::Directory {
install_path: editable,
- editable: true,
- r#virtual: false,
+ editable: Some(true),
+ r#virtual: Some(false),
url,
})
}
@@ -932,8 +938,8 @@ impl TryFrom for RequirementSource {
))?;
Ok(Self::Directory {
install_path: r#virtual,
- editable: false,
- r#virtual: true,
+ editable: Some(false),
+ r#virtual: Some(true),
url,
})
}
@@ -980,8 +986,8 @@ mod tests {
marker: MarkerTree::TRUE,
source: RequirementSource::Directory {
install_path: PathBuf::from(path).into_boxed_path(),
- editable: false,
- r#virtual: false,
+ editable: Some(false),
+ r#virtual: Some(false),
url: VerbatimUrl::from_absolute_path(path).unwrap(),
},
origin: None,
diff --git a/crates/uv-distribution/src/index/built_wheel_index.rs b/crates/uv-distribution/src/index/built_wheel_index.rs
index fb376d1b4..9752e7e4f 100644
--- a/crates/uv-distribution/src/index/built_wheel_index.rs
+++ b/crates/uv-distribution/src/index/built_wheel_index.rs
@@ -119,7 +119,7 @@ impl<'a> BuiltWheelIndex<'a> {
) -> Result, Error> {
let cache_shard = self.cache.shard(
CacheBucket::SourceDistributions,
- if source_dist.editable {
+ if source_dist.editable.unwrap_or(false) {
WheelCache::Editable(&source_dist.url).root()
} else {
WheelCache::Path(&source_dist.url).root()
diff --git a/crates/uv-distribution/src/metadata/lowering.rs b/crates/uv-distribution/src/metadata/lowering.rs
index 330075842..54782c083 100644
--- a/crates/uv-distribution/src/metadata/lowering.rs
+++ b/crates/uv-distribution/src/metadata/lowering.rs
@@ -310,15 +310,15 @@ impl LoweredRequirement {
RequirementSource::Directory {
install_path: install_path.into_boxed_path(),
url,
- editable: true,
- r#virtual: false,
+ editable: Some(true),
+ r#virtual: Some(false),
}
} else {
RequirementSource::Directory {
install_path: install_path.into_boxed_path(),
url,
- editable: false,
- r#virtual: true,
+ editable: Some(false),
+ r#virtual: Some(true),
}
};
(source, marker)
@@ -724,8 +724,8 @@ fn path_source(
Ok(RequirementSource::Directory {
install_path: install_path.into_boxed_path(),
url,
- editable: true,
- r#virtual: false,
+ editable,
+ r#virtual: Some(false),
})
} else {
// Determine whether the project is a package or virtual.
@@ -738,12 +738,14 @@ fn path_source(
.unwrap_or(true)
});
+ // If the project is not a package, treat it as a virtual dependency.
+ let r#virtual = !is_package;
+
Ok(RequirementSource::Directory {
install_path: install_path.into_boxed_path(),
url,
- editable: false,
- // If a project is not a package, treat it as a virtual dependency.
- r#virtual: !is_package,
+ editable: Some(false),
+ r#virtual: Some(r#virtual),
})
}
} else {
diff --git a/crates/uv-distribution/src/source/mod.rs b/crates/uv-distribution/src/source/mod.rs
index 92d83e6ce..1308e3d77 100644
--- a/crates/uv-distribution/src/source/mod.rs
+++ b/crates/uv-distribution/src/source/mod.rs
@@ -1060,7 +1060,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
let cache_shard = self.build_context.cache().shard(
CacheBucket::SourceDistributions,
- if resource.editable {
+ if resource.editable.unwrap_or(false) {
WheelCache::Editable(resource.url).root()
} else {
WheelCache::Path(resource.url).root()
@@ -1173,7 +1173,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
let cache_shard = self.build_context.cache().shard(
CacheBucket::SourceDistributions,
- if resource.editable {
+ if resource.editable.unwrap_or(false) {
WheelCache::Editable(resource.url).root()
} else {
WheelCache::Path(resource.url).root()
diff --git a/crates/uv-installer/src/satisfies.rs b/crates/uv-installer/src/satisfies.rs
index a91676595..b7e824202 100644
--- a/crates/uv-installer/src/satisfies.rs
+++ b/crates/uv-installer/src/satisfies.rs
@@ -241,7 +241,7 @@ impl RequirementSatisfaction {
return Self::Mismatch;
};
- if *requested_editable != installed_editable.unwrap_or_default() {
+ if requested_editable != installed_editable {
trace!(
"Editable mismatch: {:?} vs. {:?}",
*requested_editable,
diff --git a/crates/uv-pypi-types/src/parsed_url.rs b/crates/uv-pypi-types/src/parsed_url.rs
index 9517dfdc6..57afbcdf9 100644
--- a/crates/uv-pypi-types/src/parsed_url.rs
+++ b/crates/uv-pypi-types/src/parsed_url.rs
@@ -86,8 +86,8 @@ impl UnnamedRequirementUrl for VerbatimParsedUrl {
ParsedUrl::Directory(ParsedDirectoryUrl {
url,
install_path,
- editable: false,
- r#virtual: false,
+ editable: None,
+ r#virtual: None,
})
} else {
ParsedUrl::Path(ParsedPathUrl {
@@ -118,8 +118,8 @@ impl UnnamedRequirementUrl for VerbatimParsedUrl {
ParsedUrl::Directory(ParsedDirectoryUrl {
url,
install_path,
- editable: false,
- r#virtual: false,
+ editable: None,
+ r#virtual: None,
})
} else {
ParsedUrl::Path(ParsedPathUrl {
@@ -187,7 +187,10 @@ impl ParsedUrl {
pub fn is_editable(&self) -> bool {
matches!(
self,
- Self::Directory(ParsedDirectoryUrl { editable: true, .. })
+ Self::Directory(ParsedDirectoryUrl {
+ editable: Some(true),
+ ..
+ })
)
}
}
@@ -226,16 +229,18 @@ pub struct ParsedDirectoryUrl {
pub url: DisplaySafeUrl,
/// The absolute path to the distribution which we use for installing.
pub install_path: Box,
- pub editable: bool,
- pub r#virtual: bool,
+ /// Whether the project at the given URL should be installed in editable mode.
+ pub editable: Option,
+ /// Whether the project at the given URL should be treated as a virtual package.
+ pub r#virtual: Option,
}
impl ParsedDirectoryUrl {
/// Construct a [`ParsedDirectoryUrl`] from a path requirement source.
pub fn from_source(
install_path: Box,
- editable: bool,
- r#virtual: bool,
+ editable: Option,
+ r#virtual: Option,
url: DisplaySafeUrl,
) -> Self {
Self {
@@ -399,8 +404,8 @@ impl TryFrom for ParsedUrl {
Ok(Self::Directory(ParsedDirectoryUrl {
url,
install_path: path.into_boxed_path(),
- editable: false,
- r#virtual: false,
+ editable: None,
+ r#virtual: None,
}))
} else {
Ok(Self::Path(ParsedPathUrl {
@@ -445,7 +450,7 @@ impl From<&ParsedDirectoryUrl> for DirectUrl {
Self::LocalDirectory {
url: value.url.to_string(),
dir_info: DirInfo {
- editable: value.editable.then_some(true),
+ editable: value.editable,
},
subdirectory: None,
}
diff --git a/crates/uv-requirements-txt/src/lib.rs b/crates/uv-requirements-txt/src/lib.rs
index b734bf8a2..b95875768 100644
--- a/crates/uv-requirements-txt/src/lib.rs
+++ b/crates/uv-requirements-txt/src/lib.rs
@@ -2064,8 +2064,10 @@ mod test {
fragment: None,
},
install_path: "/foo/bar",
- editable: true,
- virtual: false,
+ editable: Some(
+ true,
+ ),
+ virtual: None,
},
),
verbatim: VerbatimUrl {
diff --git a/crates/uv-requirements-txt/src/requirement.rs b/crates/uv-requirements-txt/src/requirement.rs
index 285753ed8..6c7cf0b52 100644
--- a/crates/uv-requirements-txt/src/requirement.rs
+++ b/crates/uv-requirements-txt/src/requirement.rs
@@ -90,7 +90,7 @@ impl RequirementsTxtRequirement {
version_or_url: Some(uv_pep508::VersionOrUrl::Url(VerbatimParsedUrl {
verbatim: url.verbatim,
parsed_url: ParsedUrl::Directory(ParsedDirectoryUrl {
- editable: true,
+ editable: Some(true),
..parsed_url
}),
})),
@@ -115,7 +115,7 @@ impl RequirementsTxtRequirement {
url: VerbatimParsedUrl {
verbatim: requirement.url.verbatim,
parsed_url: ParsedUrl::Directory(ParsedDirectoryUrl {
- editable: true,
+ editable: Some(true),
..parsed_url
}),
},
diff --git a/crates/uv-requirements-txt/src/snapshots/uv_requirements_txt__test__parse-unix-bare-url.txt.snap b/crates/uv-requirements-txt/src/snapshots/uv_requirements_txt__test__parse-unix-bare-url.txt.snap
index f2187a1a2..dd03d09bf 100644
--- a/crates/uv-requirements-txt/src/snapshots/uv_requirements_txt__test__parse-unix-bare-url.txt.snap
+++ b/crates/uv-requirements-txt/src/snapshots/uv_requirements_txt__test__parse-unix-bare-url.txt.snap
@@ -22,8 +22,8 @@ RequirementsTxt {
fragment: None,
},
install_path: "/scripts/packages/black_editable",
- editable: false,
- virtual: false,
+ editable: None,
+ virtual: None,
},
),
verbatim: VerbatimUrl {
@@ -72,8 +72,8 @@ RequirementsTxt {
fragment: None,
},
install_path: "/scripts/packages/black_editable",
- editable: false,
- virtual: false,
+ editable: None,
+ virtual: None,
},
),
verbatim: VerbatimUrl {
@@ -126,8 +126,8 @@ RequirementsTxt {
fragment: None,
},
install_path: "/scripts/packages/black_editable",
- editable: false,
- virtual: false,
+ editable: None,
+ virtual: None,
},
),
verbatim: VerbatimUrl {
@@ -176,8 +176,8 @@ RequirementsTxt {
fragment: None,
},
install_path: "/scripts/packages/black editable",
- editable: false,
- virtual: false,
+ editable: None,
+ virtual: None,
},
),
verbatim: VerbatimUrl {
@@ -226,8 +226,8 @@ RequirementsTxt {
fragment: None,
},
install_path: "/scripts/packages/black editable",
- editable: false,
- virtual: false,
+ editable: None,
+ virtual: None,
},
),
verbatim: VerbatimUrl {
@@ -276,8 +276,8 @@ RequirementsTxt {
fragment: None,
},
install_path: "/scripts/packages/black editable",
- editable: false,
- virtual: false,
+ editable: None,
+ virtual: None,
},
),
verbatim: VerbatimUrl {
diff --git a/crates/uv-requirements-txt/src/snapshots/uv_requirements_txt__test__parse-unix-editable.txt.snap b/crates/uv-requirements-txt/src/snapshots/uv_requirements_txt__test__parse-unix-editable.txt.snap
index 222ab6b10..39a4885dc 100644
--- a/crates/uv-requirements-txt/src/snapshots/uv_requirements_txt__test__parse-unix-editable.txt.snap
+++ b/crates/uv-requirements-txt/src/snapshots/uv_requirements_txt__test__parse-unix-editable.txt.snap
@@ -24,8 +24,10 @@ RequirementsTxt {
fragment: None,
},
install_path: "/editable",
- editable: true,
- virtual: false,
+ editable: Some(
+ true,
+ ),
+ virtual: None,
},
),
verbatim: VerbatimUrl {
@@ -81,8 +83,10 @@ RequirementsTxt {
fragment: None,
},
install_path: "/editable",
- editable: true,
- virtual: false,
+ editable: Some(
+ true,
+ ),
+ virtual: None,
},
),
verbatim: VerbatimUrl {
@@ -138,8 +142,10 @@ RequirementsTxt {
fragment: None,
},
install_path: "/editable",
- editable: true,
- virtual: false,
+ editable: Some(
+ true,
+ ),
+ virtual: None,
},
),
verbatim: VerbatimUrl {
@@ -195,8 +201,10 @@ RequirementsTxt {
fragment: None,
},
install_path: "/editable",
- editable: true,
- virtual: false,
+ editable: Some(
+ true,
+ ),
+ virtual: None,
},
),
verbatim: VerbatimUrl {
@@ -252,8 +260,10 @@ RequirementsTxt {
fragment: None,
},
install_path: "/editable",
- editable: true,
- virtual: false,
+ editable: Some(
+ true,
+ ),
+ virtual: None,
},
),
verbatim: VerbatimUrl {
@@ -302,8 +312,10 @@ RequirementsTxt {
fragment: None,
},
install_path: "/editable[d",
- editable: true,
- virtual: false,
+ editable: Some(
+ true,
+ ),
+ virtual: None,
},
),
verbatim: VerbatimUrl {
@@ -352,8 +364,10 @@ RequirementsTxt {
fragment: None,
},
install_path: "/editable",
- editable: true,
- virtual: false,
+ editable: Some(
+ true,
+ ),
+ virtual: None,
},
),
verbatim: VerbatimUrl {
@@ -402,8 +416,10 @@ RequirementsTxt {
fragment: None,
},
install_path: "/editable",
- editable: true,
- virtual: false,
+ editable: Some(
+ true,
+ ),
+ virtual: None,
},
),
verbatim: VerbatimUrl {
diff --git a/crates/uv-requirements-txt/src/snapshots/uv_requirements_txt__test__parse-windows-bare-url.txt.snap b/crates/uv-requirements-txt/src/snapshots/uv_requirements_txt__test__parse-windows-bare-url.txt.snap
index 72e1c8635..be90c5c44 100644
--- a/crates/uv-requirements-txt/src/snapshots/uv_requirements_txt__test__parse-windows-bare-url.txt.snap
+++ b/crates/uv-requirements-txt/src/snapshots/uv_requirements_txt__test__parse-windows-bare-url.txt.snap
@@ -22,8 +22,8 @@ RequirementsTxt {
fragment: None,
},
install_path: "/scripts/packages/black_editable",
- editable: false,
- virtual: false,
+ editable: None,
+ virtual: None,
},
),
verbatim: VerbatimUrl {
@@ -72,8 +72,8 @@ RequirementsTxt {
fragment: None,
},
install_path: "/scripts/packages/black_editable",
- editable: false,
- virtual: false,
+ editable: None,
+ virtual: None,
},
),
verbatim: VerbatimUrl {
@@ -126,8 +126,8 @@ RequirementsTxt {
fragment: None,
},
install_path: "/scripts/packages/black_editable",
- editable: false,
- virtual: false,
+ editable: None,
+ virtual: None,
},
),
verbatim: VerbatimUrl {
@@ -176,8 +176,8 @@ RequirementsTxt {
fragment: None,
},
install_path: "/scripts/packages/black editable",
- editable: false,
- virtual: false,
+ editable: None,
+ virtual: None,
},
),
verbatim: VerbatimUrl {
@@ -226,8 +226,8 @@ RequirementsTxt {
fragment: None,
},
install_path: "/scripts/packages/black editable",
- editable: false,
- virtual: false,
+ editable: None,
+ virtual: None,
},
),
verbatim: VerbatimUrl {
@@ -276,8 +276,8 @@ RequirementsTxt {
fragment: None,
},
install_path: "/scripts/packages/black editable",
- editable: false,
- virtual: false,
+ editable: None,
+ virtual: None,
},
),
verbatim: VerbatimUrl {
diff --git a/crates/uv-requirements-txt/src/snapshots/uv_requirements_txt__test__parse-windows-editable.txt.snap b/crates/uv-requirements-txt/src/snapshots/uv_requirements_txt__test__parse-windows-editable.txt.snap
index 84ae22816..dde16b40c 100644
--- a/crates/uv-requirements-txt/src/snapshots/uv_requirements_txt__test__parse-windows-editable.txt.snap
+++ b/crates/uv-requirements-txt/src/snapshots/uv_requirements_txt__test__parse-windows-editable.txt.snap
@@ -24,8 +24,10 @@ RequirementsTxt {
fragment: None,
},
install_path: "/editable",
- editable: true,
- virtual: false,
+ editable: Some(
+ true,
+ ),
+ virtual: None,
},
),
verbatim: VerbatimUrl {
@@ -81,8 +83,10 @@ RequirementsTxt {
fragment: None,
},
install_path: "/editable",
- editable: true,
- virtual: false,
+ editable: Some(
+ true,
+ ),
+ virtual: None,
},
),
verbatim: VerbatimUrl {
@@ -138,8 +142,10 @@ RequirementsTxt {
fragment: None,
},
install_path: "/editable",
- editable: true,
- virtual: false,
+ editable: Some(
+ true,
+ ),
+ virtual: None,
},
),
verbatim: VerbatimUrl {
@@ -195,8 +201,10 @@ RequirementsTxt {
fragment: None,
},
install_path: "/editable",
- editable: true,
- virtual: false,
+ editable: Some(
+ true,
+ ),
+ virtual: None,
},
),
verbatim: VerbatimUrl {
@@ -252,8 +260,10 @@ RequirementsTxt {
fragment: None,
},
install_path: "/editable",
- editable: true,
- virtual: false,
+ editable: Some(
+ true,
+ ),
+ virtual: None,
},
),
verbatim: VerbatimUrl {
@@ -302,8 +312,10 @@ RequirementsTxt {
fragment: None,
},
install_path: "/editable[d",
- editable: true,
- virtual: false,
+ editable: Some(
+ true,
+ ),
+ virtual: None,
},
),
verbatim: VerbatimUrl {
@@ -352,8 +364,10 @@ RequirementsTxt {
fragment: None,
},
install_path: "/editable",
- editable: true,
- virtual: false,
+ editable: Some(
+ true,
+ ),
+ virtual: None,
},
),
verbatim: VerbatimUrl {
@@ -402,8 +416,10 @@ RequirementsTxt {
fragment: None,
},
install_path: "/editable",
- editable: true,
- virtual: false,
+ editable: Some(
+ true,
+ ),
+ virtual: None,
},
),
verbatim: VerbatimUrl {
diff --git a/crates/uv-requirements/src/source_tree.rs b/crates/uv-requirements/src/source_tree.rs
index 39fbe453b..a7a99c5a2 100644
--- a/crates/uv-requirements/src/source_tree.rs
+++ b/crates/uv-requirements/src/source_tree.rs
@@ -154,7 +154,7 @@ impl<'a, Context: BuildContext> SourceTreeResolver<'a, Context> {
let source = SourceUrl::Directory(DirectorySourceUrl {
url: &url,
install_path: Cow::Borrowed(source_tree),
- editable: false,
+ editable: None,
});
// Determine the hash policy. Since we don't have a package name, we perform a
diff --git a/crates/uv-resolver/src/lock/export/pylock_toml.rs b/crates/uv-resolver/src/lock/export/pylock_toml.rs
index 8a53fd8f7..80cd54be2 100644
--- a/crates/uv-resolver/src/lock/export/pylock_toml.rs
+++ b/crates/uv-resolver/src/lock/export/pylock_toml.rs
@@ -500,7 +500,7 @@ impl<'lock> PylockToml {
.unwrap_or_else(|_| dist.install_path.clone());
package.directory = Some(PylockTomlDirectory {
path: PortablePathBuf::from(path),
- editable: if dist.editable { Some(true) } else { None },
+ editable: dist.editable,
subdirectory: None,
});
}
@@ -737,7 +737,7 @@ impl<'lock> PylockToml {
),
editable: match editable {
EditableMode::NonEditable => None,
- EditableMode::Editable => Some(sdist.editable),
+ EditableMode::Editable => sdist.editable,
},
subdirectory: None,
}),
@@ -1394,8 +1394,8 @@ impl PylockTomlDirectory {
Ok(DirectorySourceDist {
name: name.clone(),
install_path: path.into_boxed_path(),
- editable: self.editable.unwrap_or(false),
- r#virtual: false,
+ editable: self.editable,
+ r#virtual: Some(false),
url,
})
}
diff --git a/crates/uv-resolver/src/lock/mod.rs b/crates/uv-resolver/src/lock/mod.rs
index 7ca100fd8..7cbac67df 100644
--- a/crates/uv-resolver/src/lock/mod.rs
+++ b/crates/uv-resolver/src/lock/mod.rs
@@ -2396,8 +2396,8 @@ impl Package {
name: self.id.name.clone(),
url: verbatim_url(&install_path, &self.id)?,
install_path: install_path.into_boxed_path(),
- editable: false,
- r#virtual: false,
+ editable: Some(false),
+ r#virtual: Some(false),
};
uv_distribution_types::SourceDist::Directory(dir_dist)
}
@@ -2407,8 +2407,8 @@ impl Package {
name: self.id.name.clone(),
url: verbatim_url(&install_path, &self.id)?,
install_path: install_path.into_boxed_path(),
- editable: true,
- r#virtual: false,
+ editable: Some(true),
+ r#virtual: Some(false),
};
uv_distribution_types::SourceDist::Directory(dir_dist)
}
@@ -2418,8 +2418,8 @@ impl Package {
name: self.id.name.clone(),
url: verbatim_url(&install_path, &self.id)?,
install_path: install_path.into_boxed_path(),
- editable: false,
- r#virtual: true,
+ editable: Some(false),
+ r#virtual: Some(true),
};
uv_distribution_types::SourceDist::Directory(dir_dist)
}
@@ -3250,9 +3250,9 @@ impl Source {
let path = relative_to(&directory_dist.install_path, root)
.or_else(|_| std::path::absolute(&directory_dist.install_path))
.map_err(LockErrorKind::DistributionRelativePath)?;
- if directory_dist.editable {
+ if directory_dist.editable.unwrap_or(false) {
Ok(Source::Editable(path.into_boxed_path()))
- } else if directory_dist.r#virtual {
+ } else if directory_dist.r#virtual.unwrap_or(false) {
Ok(Source::Virtual(path.into_boxed_path()))
} else {
Ok(Source::Directory(path.into_boxed_path()))
@@ -4800,8 +4800,8 @@ fn normalize_requirement(
marker: requires_python.simplify_markers(requirement.marker),
source: RequirementSource::Directory {
install_path,
- editable,
- r#virtual,
+ editable: Some(editable.unwrap_or(false)),
+ r#virtual: Some(r#virtual.unwrap_or(false)),
url,
},
origin: None,
diff --git a/crates/uv-resolver/src/resolver/mod.rs b/crates/uv-resolver/src/resolver/mod.rs
index 32d684f04..c30c4e947 100644
--- a/crates/uv-resolver/src/resolver/mod.rs
+++ b/crates/uv-resolver/src/resolver/mod.rs
@@ -620,6 +620,7 @@ impl ResolverState {
// Then here, if we get a reason that we consider unrecoverable, we should
diff --git a/crates/uv-resolver/src/resolver/urls.rs b/crates/uv-resolver/src/resolver/urls.rs
index 73d190b4a..57803ed0b 100644
--- a/crates/uv-resolver/src/resolver/urls.rs
+++ b/crates/uv-resolver/src/resolver/urls.rs
@@ -63,9 +63,9 @@ impl Urls {
verbatim: _,
} = package_url
{
- if !*editable {
+ if editable.is_none() {
debug!("Allowing an editable variant of {}", &package_url.verbatim);
- *editable = true;
+ *editable = Some(true);
}
}
}
@@ -201,8 +201,9 @@ fn same_resource(a: &ParsedUrl, b: &ParsedUrl, git: &GitResolver) -> bool {
|| is_same_file(&a.install_path, &b.install_path).unwrap_or(false)
}
(ParsedUrl::Directory(a), ParsedUrl::Directory(b)) => {
- a.install_path == b.install_path
- || is_same_file(&a.install_path, &b.install_path).unwrap_or(false)
+ (a.install_path == b.install_path
+ || is_same_file(&a.install_path, &b.install_path).unwrap_or(false))
+ && a.editable.is_none_or(|a| b.editable.is_none_or(|b| a == b))
}
_ => false,
}
diff --git a/crates/uv-workspace/src/workspace.rs b/crates/uv-workspace/src/workspace.rs
index 1349d739c..8d09554d9 100644
--- a/crates/uv-workspace/src/workspace.rs
+++ b/crates/uv-workspace/src/workspace.rs
@@ -315,15 +315,15 @@ impl Workspace {
source: if member.pyproject_toml.is_package() {
RequirementSource::Directory {
install_path: member.root.clone().into_boxed_path(),
- editable: true,
- r#virtual: false,
+ editable: Some(true),
+ r#virtual: Some(false),
url,
}
} else {
RequirementSource::Directory {
install_path: member.root.clone().into_boxed_path(),
- editable: false,
- r#virtual: true,
+ editable: Some(false),
+ r#virtual: Some(true),
url,
}
},
@@ -371,15 +371,15 @@ impl Workspace {
source: if member.pyproject_toml.is_package() {
RequirementSource::Directory {
install_path: member.root.clone().into_boxed_path(),
- editable: true,
- r#virtual: false,
+ editable: Some(true),
+ r#virtual: Some(false),
url,
}
} else {
RequirementSource::Directory {
install_path: member.root.clone().into_boxed_path(),
- editable: false,
- r#virtual: true,
+ editable: Some(false),
+ r#virtual: Some(true),
url,
}
},
diff --git a/crates/uv/src/commands/project/sync.rs b/crates/uv/src/commands/project/sync.rs
index 94586004f..5843df6be 100644
--- a/crates/uv/src/commands/project/sync.rs
+++ b/crates/uv/src/commands/project/sync.rs
@@ -747,7 +747,7 @@ fn apply_no_virtual_project(resolution: Resolution) -> Resolution {
return true;
};
- !dist.r#virtual
+ !dist.r#virtual.unwrap_or(false)
})
}
@@ -765,8 +765,8 @@ fn apply_editable_mode(resolution: Resolution, editable: EditableMode) -> Resolu
let Dist::Source(SourceDist::Directory(DirectorySourceDist {
name,
install_path,
- editable: true,
- r#virtual: false,
+ editable: Some(true),
+ r#virtual,
url,
})) = dist.as_ref()
else {
@@ -777,8 +777,8 @@ fn apply_editable_mode(resolution: Resolution, editable: EditableMode) -> Resolu
dist: Arc::new(Dist::Source(SourceDist::Directory(DirectorySourceDist {
name: name.clone(),
install_path: install_path.clone(),
- editable: false,
- r#virtual: false,
+ editable: Some(false),
+ r#virtual: *r#virtual,
url: url.clone(),
}))),
version: version.clone(),
diff --git a/crates/uv/tests/it/lock.rs b/crates/uv/tests/it/lock.rs
index f91870762..477b4b039 100644
--- a/crates/uv/tests/it/lock.rs
+++ b/crates/uv/tests/it/lock.rs
@@ -10946,7 +10946,7 @@ fn lock_sources_source_tree() -> Result<()> {
}
/// Lock a project in which a given dependency is requested from two different members, once as
-/// editable, and once as non-editable.
+/// editable, and once as non-editable. This should trigger a conflicting URL error.
#[test]
fn lock_editable() -> Result<()> {
let context = TestContext::new("3.12");
@@ -11086,86 +11086,16 @@ fn lock_editable() -> Result<()> {
library = { path = "../../library", editable = true }
"#})?;
- uv_snapshot!(context.filters(), context.lock(), @r###"
- success: true
- exit_code: 0
+ uv_snapshot!(context.filters(), context.lock(), @r"
+ success: false
+ exit_code: 2
----- stdout -----
----- stderr -----
- Resolved 3 packages in [TIME]
- "###);
-
- let lock = context.read("uv.lock");
-
- insta::with_settings!({
- filters => context.filters(),
- }, {
- assert_snapshot!(
- lock, @r#"
- version = 1
- revision = 2
- requires-python = ">=3.12"
-
- [options]
- exclude-newer = "2024-03-25T00:00:00Z"
-
- [manifest]
- members = [
- "leaf",
- "workspace",
- ]
-
- [[package]]
- name = "leaf"
- version = "0.1.0"
- source = { editable = "packages/leaf" }
- dependencies = [
- { name = "library" },
- ]
-
- [package.metadata]
- requires-dist = [{ name = "library", editable = "library" }]
-
- [[package]]
- name = "library"
- version = "0.1.0"
- source = { editable = "library" }
-
- [[package]]
- name = "workspace"
- version = "0.1.0"
- source = { virtual = "." }
- dependencies = [
- { name = "library" },
- ]
-
- [package.metadata]
- requires-dist = [{ name = "library", directory = "library" }]
- "#
- );
- });
-
- // Re-run with `--locked`.
- uv_snapshot!(context.filters(), context.lock().arg("--locked"), @r###"
- success: true
- exit_code: 0
- ----- stdout -----
-
- ----- stderr -----
- Resolved 3 packages in [TIME]
- "###);
-
- // Install from the lockfile.
- uv_snapshot!(context.filters(), context.sync().arg("--frozen"), @r###"
- success: true
- exit_code: 0
- ----- stdout -----
-
- ----- stderr -----
- Prepared 1 package in [TIME]
- Installed 1 package in [TIME]
- + library==0.1.0 (from file://[TEMP_DIR]/library)
- "###);
+ error: Requirements contain conflicting URLs for package `library` in all marker environments:
+ - file://[TEMP_DIR]/library
+ - file://[TEMP_DIR]/library (editable)
+ ");
Ok(())
}
diff --git a/crates/uv/tests/it/sync.rs b/crates/uv/tests/it/sync.rs
index 9fecd50b0..0165cc7f6 100644
--- a/crates/uv/tests/it/sync.rs
+++ b/crates/uv/tests/it/sync.rs
@@ -10471,3 +10471,335 @@ fn sync_python_platform() -> Result<()> {
Ok(())
}
+
+/// See:
+#[test]
+#[cfg(not(windows))]
+fn conflicting_editable() -> Result<()> {
+ let context = TestContext::new("3.12");
+
+ let pyproject_toml = context.temp_dir.child("pyproject.toml");
+ pyproject_toml.write_str(
+ r#"
+ [project]
+ name = "project"
+ version = "0.1.0"
+ requires-python = ">=3.12"
+ dependencies = []
+ [dependency-groups]
+ foo = [
+ "child",
+ ]
+ bar = [
+ "child",
+ ]
+ [tool.uv]
+ conflicts = [
+ [
+ { group = "foo" },
+ { group = "bar" },
+ ],
+ ]
+ [tool.uv.sources]
+ child = [
+ { path = "./child", editable = true, group = "foo" },
+ { path = "./child", editable = false, group = "bar" },
+ ]
+ "#,
+ )?;
+
+ context
+ .temp_dir
+ .child("child")
+ .child("pyproject.toml")
+ .write_str(
+ r#"
+ [project]
+ name = "child"
+ version = "0.1.0"
+ requires-python = ">=3.12"
+ dependencies = []
+ [build-system]
+ requires = ["hatchling"]
+ build-backend = "hatchling.build"
+ "#,
+ )?;
+ context
+ .temp_dir
+ .child("child")
+ .child("src")
+ .child("child")
+ .child("__init__.py")
+ .touch()?;
+
+ uv_snapshot!(context.filters(), context.sync(), @r"
+ success: true
+ exit_code: 0
+ ----- stdout -----
+
+ ----- stderr -----
+ Resolved 3 packages in [TIME]
+ Audited in [TIME]
+ ");
+
+ let lock = context.read("uv.lock");
+
+ insta::with_settings!({
+ filters => context.filters(),
+ }, {
+ assert_snapshot!(
+ lock, @r#"
+ version = 1
+ revision = 2
+ requires-python = ">=3.12"
+ conflicts = [[
+ { package = "project", group = "bar" },
+ { package = "project", group = "foo" },
+ ]]
+
+ [options]
+ exclude-newer = "2024-03-25T00:00:00Z"
+
+ [[package]]
+ name = "child"
+ version = "0.1.0"
+ source = { directory = "child" }
+
+ [[package]]
+ name = "child"
+ version = "0.1.0"
+ source = { editable = "child" }
+
+ [[package]]
+ name = "project"
+ version = "0.1.0"
+ source = { virtual = "." }
+
+ [package.dev-dependencies]
+ bar = [
+ { name = "child", version = "0.1.0", source = { directory = "child" } },
+ ]
+ foo = [
+ { name = "child", version = "0.1.0", source = { editable = "child" } },
+ ]
+
+ [package.metadata]
+
+ [package.metadata.requires-dev]
+ bar = [{ name = "child", directory = "child" }]
+ foo = [{ name = "child", editable = "child" }]
+ "#
+ );
+ });
+
+ uv_snapshot!(context.filters(), context.sync().arg("--group").arg("foo"), @r"
+ success: true
+ exit_code: 0
+ ----- stdout -----
+
+ ----- stderr -----
+ Resolved 3 packages in [TIME]
+ Prepared 1 package in [TIME]
+ Installed 1 package in [TIME]
+ + child==0.1.0 (from file://[TEMP_DIR]/child)
+ ");
+
+ uv_snapshot!(context.filters(), context.pip_list().arg("--format").arg("json"), @r#"
+ success: true
+ exit_code: 0
+ ----- stdout -----
+ [{"name":"child","version":"0.1.0","editable_project_location":"[TEMP_DIR]/child"}]
+
+ ----- stderr -----
+ "#);
+
+ uv_snapshot!(context.filters(), context.sync().arg("--group").arg("bar"), @r"
+ success: true
+ exit_code: 0
+ ----- stdout -----
+
+ ----- stderr -----
+ Resolved 3 packages in [TIME]
+ Prepared 1 package in [TIME]
+ Uninstalled 1 package in [TIME]
+ Installed 1 package in [TIME]
+ ~ child==0.1.0 (from file://[TEMP_DIR]/child)
+ ");
+
+ uv_snapshot!(context.filters(), context.pip_list().arg("--format").arg("json"), @r#"
+ success: true
+ exit_code: 0
+ ----- stdout -----
+ [{"name":"child","version":"0.1.0"}]
+
+ ----- stderr -----
+ "#);
+
+ Ok(())
+}
+
+/// See:
+#[test]
+#[cfg(not(windows))]
+fn undeclared_editable() -> Result<()> {
+ let context = TestContext::new("3.12");
+
+ let pyproject_toml = context.temp_dir.child("pyproject.toml");
+ pyproject_toml.write_str(
+ r#"
+ [project]
+ name = "project"
+ version = "0.1.0"
+ requires-python = ">=3.12"
+ dependencies = []
+ [dependency-groups]
+ foo = [
+ "child",
+ ]
+ bar = [
+ "child",
+ ]
+ [tool.uv]
+ conflicts = [
+ [
+ { group = "foo" },
+ { group = "bar" },
+ ],
+ ]
+ [tool.uv.sources]
+ child = [
+ { path = "./child", editable = true, group = "foo" },
+ { path = "./child", group = "bar" },
+ ]
+ "#,
+ )?;
+
+ context
+ .temp_dir
+ .child("child")
+ .child("pyproject.toml")
+ .write_str(
+ r#"
+ [project]
+ name = "child"
+ version = "0.1.0"
+ requires-python = ">=3.12"
+ dependencies = []
+ [build-system]
+ requires = ["hatchling"]
+ build-backend = "hatchling.build"
+ "#,
+ )?;
+ context
+ .temp_dir
+ .child("child")
+ .child("src")
+ .child("child")
+ .child("__init__.py")
+ .touch()?;
+
+ uv_snapshot!(context.filters(), context.sync(), @r"
+ success: true
+ exit_code: 0
+ ----- stdout -----
+
+ ----- stderr -----
+ Resolved 3 packages in [TIME]
+ Audited in [TIME]
+ ");
+
+ let lock = context.read("uv.lock");
+
+ insta::with_settings!({
+ filters => context.filters(),
+ }, {
+ assert_snapshot!(
+ lock, @r#"
+ version = 1
+ revision = 2
+ requires-python = ">=3.12"
+ conflicts = [[
+ { package = "project", group = "bar" },
+ { package = "project", group = "foo" },
+ ]]
+
+ [options]
+ exclude-newer = "2024-03-25T00:00:00Z"
+
+ [[package]]
+ name = "child"
+ version = "0.1.0"
+ source = { directory = "child" }
+
+ [[package]]
+ name = "child"
+ version = "0.1.0"
+ source = { editable = "child" }
+
+ [[package]]
+ name = "project"
+ version = "0.1.0"
+ source = { virtual = "." }
+
+ [package.dev-dependencies]
+ bar = [
+ { name = "child", version = "0.1.0", source = { directory = "child" } },
+ ]
+ foo = [
+ { name = "child", version = "0.1.0", source = { editable = "child" } },
+ ]
+
+ [package.metadata]
+
+ [package.metadata.requires-dev]
+ bar = [{ name = "child", directory = "child" }]
+ foo = [{ name = "child", editable = "child" }]
+ "#
+ );
+ });
+
+ uv_snapshot!(context.filters(), context.sync().arg("--group").arg("foo"), @r"
+ success: true
+ exit_code: 0
+ ----- stdout -----
+
+ ----- stderr -----
+ Resolved 3 packages in [TIME]
+ Prepared 1 package in [TIME]
+ Installed 1 package in [TIME]
+ + child==0.1.0 (from file://[TEMP_DIR]/child)
+ ");
+
+ uv_snapshot!(context.filters(), context.pip_list().arg("--format").arg("json"), @r#"
+ success: true
+ exit_code: 0
+ ----- stdout -----
+ [{"name":"child","version":"0.1.0","editable_project_location":"[TEMP_DIR]/child"}]
+
+ ----- stderr -----
+ "#);
+
+ uv_snapshot!(context.filters(), context.sync().arg("--group").arg("bar"), @r"
+ success: true
+ exit_code: 0
+ ----- stdout -----
+
+ ----- stderr -----
+ Resolved 3 packages in [TIME]
+ Prepared 1 package in [TIME]
+ Uninstalled 1 package in [TIME]
+ Installed 1 package in [TIME]
+ ~ child==0.1.0 (from file://[TEMP_DIR]/child)
+ ");
+
+ uv_snapshot!(context.filters(), context.pip_list().arg("--format").arg("json"), @r#"
+ success: true
+ exit_code: 0
+ ----- stdout -----
+ [{"name":"child","version":"0.1.0"}]
+
+ ----- stderr -----
+ "#);
+
+ Ok(())
+}
From dbaec0537ae5cfd5a55a4fbf17d93dbe9bef04b5 Mon Sep 17 00:00:00 2001
From: Zanie Blue
Date: Fri, 11 Jul 2025 07:47:06 -0500
Subject: [PATCH 064/130] Tear miette out of the `uv venv` command (#14546)
This has some changes to the user-facing output, but makes it more
consistent with the rest of uv.
---
crates/uv/src/commands/venv.rs | 122 ++++++------------------------
crates/uv/tests/it/pip_compile.rs | 4 +-
crates/uv/tests/it/venv.rs | 104 +++++++++----------------
3 files changed, 62 insertions(+), 168 deletions(-)
diff --git a/crates/uv/src/commands/venv.rs b/crates/uv/src/commands/venv.rs
index 6d6e15758..02bc818f8 100644
--- a/crates/uv/src/commands/venv.rs
+++ b/crates/uv/src/commands/venv.rs
@@ -4,9 +4,7 @@ use std::str::FromStr;
use std::sync::Arc;
use std::vec;
-use anstream::eprint;
use anyhow::Result;
-use miette::{Diagnostic, IntoDiagnostic};
use owo_colors::OwoColorize;
use thiserror::Error;
@@ -42,6 +40,21 @@ use crate::settings::NetworkSettings;
use super::project::default_dependency_groups;
+#[derive(Error, Debug)]
+enum VenvError {
+ #[error("Failed to create virtual environment")]
+ Creation(#[source] uv_virtualenv::Error),
+
+ #[error("Failed to install seed packages into virtual environment")]
+ Seed(#[source] AnyErrorBuild),
+
+ #[error("Failed to extract interpreter tags for installing seed packages")]
+ Tags(#[source] uv_platform_tags::TagsError),
+
+ #[error("Failed to resolve `--find-links` entry")]
+ FlatIndex(#[source] uv_client::FlatIndexError),
+}
+
/// Create a virtual environment.
#[allow(clippy::unnecessary_wraps, clippy::fn_params_excessive_bools)]
pub(crate) async fn venv(
@@ -70,89 +83,6 @@ pub(crate) async fn venv(
relocatable: bool,
preview: PreviewMode,
) -> Result {
- match venv_impl(
- project_dir,
- path,
- python_request,
- install_mirrors,
- link_mode,
- index_locations,
- index_strategy,
- dependency_metadata,
- keyring_provider,
- network_settings,
- prompt,
- system_site_packages,
- seed,
- python_preference,
- python_downloads,
- allow_existing,
- exclude_newer,
- concurrency,
- no_config,
- no_project,
- cache,
- printer,
- relocatable,
- preview,
- )
- .await
- {
- Ok(status) => Ok(status),
- Err(err) => {
- eprint!("{err:?}");
- Ok(ExitStatus::Failure)
- }
- }
-}
-
-#[derive(Error, Debug, Diagnostic)]
-enum VenvError {
- #[error("Failed to create virtualenv")]
- #[diagnostic(code(uv::venv::creation))]
- Creation(#[source] uv_virtualenv::Error),
-
- #[error("Failed to install seed packages")]
- #[diagnostic(code(uv::venv::seed))]
- Seed(#[source] AnyErrorBuild),
-
- #[error("Failed to extract interpreter tags")]
- #[diagnostic(code(uv::venv::tags))]
- Tags(#[source] uv_platform_tags::TagsError),
-
- #[error("Failed to resolve `--find-links` entry")]
- #[diagnostic(code(uv::venv::flat_index))]
- FlatIndex(#[source] uv_client::FlatIndexError),
-}
-
-/// Create a virtual environment.
-#[allow(clippy::fn_params_excessive_bools)]
-async fn venv_impl(
- project_dir: &Path,
- path: Option,
- python_request: Option,
- install_mirrors: PythonInstallMirrors,
- link_mode: LinkMode,
- index_locations: &IndexLocations,
- index_strategy: IndexStrategy,
- dependency_metadata: DependencyMetadata,
- keyring_provider: KeyringProviderType,
- network_settings: &NetworkSettings,
- prompt: uv_virtualenv::Prompt,
- system_site_packages: bool,
- seed: bool,
- python_preference: PythonPreference,
- python_downloads: PythonDownloads,
- allow_existing: bool,
- exclude_newer: Option,
- concurrency: Concurrency,
- no_config: bool,
- no_project: bool,
- cache: &Cache,
- printer: Printer,
- relocatable: bool,
- preview: PreviewMode,
-) -> miette::Result {
let workspace_cache = WorkspaceCache::default();
let project = if no_project {
None
@@ -206,7 +136,7 @@ async fn venv_impl(
// If the default dependency-groups demand a higher requires-python
// we should bias an empty venv to that to avoid churn.
let default_groups = match &project {
- Some(project) => default_dependency_groups(project.pyproject_toml()).into_diagnostic()?,
+ Some(project) => default_dependency_groups(project.pyproject_toml())?,
None => DefaultGroups::default(),
};
let groups = DependencyGroups::default().with_defaults(default_groups);
@@ -221,8 +151,7 @@ async fn venv_impl(
project_dir,
no_config,
)
- .await
- .into_diagnostic()?;
+ .await?;
// Locate the Python interpreter to use in the environment
let interpreter = {
@@ -239,9 +168,8 @@ async fn venv_impl(
install_mirrors.python_downloads_json_url.as_deref(),
preview,
)
- .await
- .into_diagnostic()?;
- report_interpreter(&python, false, printer).into_diagnostic()?;
+ .await?;
+ report_interpreter(&python, false, printer)?;
python.into_interpreter()
};
@@ -268,8 +196,7 @@ async fn venv_impl(
"Creating virtual environment {}at: {}",
if seed { "with seed packages " } else { "" },
path.user_display().cyan()
- )
- .into_diagnostic()?;
+ )?;
let upgradeable = preview.is_enabled()
&& python_request
@@ -307,8 +234,7 @@ async fn venv_impl(
}
// Instantiate a client.
- let client = RegistryClientBuilder::try_from(client_builder)
- .into_diagnostic()?
+ let client = RegistryClientBuilder::try_from(client_builder)?
.cache(cache.clone())
.index_locations(index_locations)
.index_strategy(index_strategy)
@@ -400,9 +326,7 @@ async fn venv_impl(
.map_err(|err| VenvError::Seed(err.into()))?;
let changelog = Changelog::from_installed(installed);
- DefaultInstallLogger
- .on_complete(&changelog, printer)
- .into_diagnostic()?;
+ DefaultInstallLogger.on_complete(&changelog, printer)?;
}
// Determine the appropriate activation command.
@@ -431,7 +355,7 @@ async fn venv_impl(
Some(Shell::Cmd) => Some(shlex_windows(venv.scripts().join("activate"), Shell::Cmd)),
};
if let Some(act) = activation {
- writeln!(printer.stderr(), "Activate with: {}", act.green()).into_diagnostic()?;
+ writeln!(printer.stderr(), "Activate with: {}", act.green())?;
}
Ok(ExitStatus::Success)
diff --git a/crates/uv/tests/it/pip_compile.rs b/crates/uv/tests/it/pip_compile.rs
index f04c16b86..ac3549874 100644
--- a/crates/uv/tests/it/pip_compile.rs
+++ b/crates/uv/tests/it/pip_compile.rs
@@ -17411,11 +17411,11 @@ fn compile_broken_active_venv() -> Result<()> {
.arg(&broken_system_python)
.arg("venv2"), @r"
success: false
- exit_code: 1
+ exit_code: 2
----- stdout -----
----- stderr -----
- × No interpreter found at path `python3.14159`
+ error: No interpreter found at path `python3.14159`
");
// Simulate a removed Python interpreter
diff --git a/crates/uv/tests/it/venv.rs b/crates/uv/tests/it/venv.rs
index 52291c05d..43cacb640 100644
--- a/crates/uv/tests/it/venv.rs
+++ b/crates/uv/tests/it/venv.rs
@@ -656,13 +656,13 @@ fn create_venv_respects_group_requires_python() -> Result<()> {
uv_snapshot!(context.filters(), context.venv().arg("--python").arg("3.11"), @r"
success: false
- exit_code: 1
+ exit_code: 2
----- stdout -----
----- stderr -----
- × Found conflicting Python requirements:
- │ - foo: <3.12
- │ - foo:dev: >=3.12
+ error: Found conflicting Python requirements:
+ - foo: <3.12
+ - foo:dev: >=3.12
"
);
@@ -808,7 +808,7 @@ fn seed_older_python_version() {
#[test]
fn create_venv_unknown_python_minor() {
- let context = TestContext::new_with_versions(&["3.12"]);
+ let context = TestContext::new_with_versions(&["3.12"]).with_filtered_python_sources();
let mut command = context.venv();
command
@@ -819,34 +819,22 @@ fn create_venv_unknown_python_minor() {
// Unset this variable to force what the user would see
.env_remove(EnvVars::UV_TEST_PYTHON_PATH);
- if cfg!(windows) {
- uv_snapshot!(&mut command, @r###"
- success: false
- exit_code: 1
- ----- stdout -----
+ uv_snapshot!(context.filters(), &mut command, @r"
+ success: false
+ exit_code: 2
+ ----- stdout -----
- ----- stderr -----
- × No interpreter found for Python 3.100 in managed installations, search path, or registry
- "###
- );
- } else {
- uv_snapshot!(&mut command, @r###"
- success: false
- exit_code: 1
- ----- stdout -----
-
- ----- stderr -----
- × No interpreter found for Python 3.100 in managed installations or search path
- "###
- );
- }
+ ----- stderr -----
+ error: No interpreter found for Python 3.100 in [PYTHON SOURCES]
+ "
+ );
context.venv.assert(predicates::path::missing());
}
#[test]
fn create_venv_unknown_python_patch() {
- let context = TestContext::new_with_versions(&["3.12"]);
+ let context = TestContext::new_with_versions(&["3.12"]).with_filtered_python_sources();
let mut command = context.venv();
command
@@ -857,27 +845,15 @@ fn create_venv_unknown_python_patch() {
// Unset this variable to force what the user would see
.env_remove(EnvVars::UV_TEST_PYTHON_PATH);
- if cfg!(windows) {
- uv_snapshot!(&mut command, @r###"
- success: false
- exit_code: 1
- ----- stdout -----
+ uv_snapshot!(context.filters(), &mut command, @r"
+ success: false
+ exit_code: 2
+ ----- stdout -----
- ----- stderr -----
- × No interpreter found for Python 3.12.100 in managed installations, search path, or registry
- "###
- );
- } else {
- uv_snapshot!(&mut command, @r"
- success: false
- exit_code: 1
- ----- stdout -----
-
- ----- stderr -----
- × No interpreter found for Python 3.12.100 in managed installations or search path
- "
- );
- }
+ ----- stderr -----
+ error: No interpreter found for Python 3.12.[X] in [PYTHON SOURCES]
+ "
+ );
context.venv.assert(predicates::path::missing());
}
@@ -915,19 +891,17 @@ fn file_exists() -> Result<()> {
uv_snapshot!(context.filters(), context.venv()
.arg(context.venv.as_os_str())
.arg("--python")
- .arg("3.12"), @r###"
+ .arg("3.12"), @r"
success: false
- exit_code: 1
+ exit_code: 2
----- stdout -----
----- stderr -----
Using CPython 3.12.[X] interpreter at: [PYTHON-3.12]
Creating virtual environment at: .venv
- uv::venv::creation
-
- × Failed to create virtualenv
- ╰─▶ File exists at `.venv`
- "###
+ error: Failed to create virtual environment
+ Caused by: File exists at `.venv`
+ "
);
Ok(())
@@ -970,19 +944,17 @@ fn non_empty_dir_exists() -> Result<()> {
uv_snapshot!(context.filters(), context.venv()
.arg(context.venv.as_os_str())
.arg("--python")
- .arg("3.12"), @r###"
+ .arg("3.12"), @r"
success: false
- exit_code: 1
+ exit_code: 2
----- stdout -----
----- stderr -----
Using CPython 3.12.[X] interpreter at: [PYTHON-3.12]
Creating virtual environment at: .venv
- uv::venv::creation
-
- × Failed to create virtualenv
- ╰─▶ The directory `.venv` exists, but it's not a virtual environment
- "###
+ error: Failed to create virtual environment
+ Caused by: The directory `.venv` exists, but it's not a virtual environment
+ "
);
Ok(())
@@ -1000,19 +972,17 @@ fn non_empty_dir_exists_allow_existing() -> Result<()> {
uv_snapshot!(context.filters(), context.venv()
.arg(context.venv.as_os_str())
.arg("--python")
- .arg("3.12"), @r###"
+ .arg("3.12"), @r"
success: false
- exit_code: 1
+ exit_code: 2
----- stdout -----
----- stderr -----
Using CPython 3.12.[X] interpreter at: [PYTHON-3.12]
Creating virtual environment at: .venv
- uv::venv::creation
-
- × Failed to create virtualenv
- ╰─▶ The directory `.venv` exists, but it's not a virtual environment
- "###
+ error: Failed to create virtual environment
+ Caused by: The directory `.venv` exists, but it's not a virtual environment
+ "
);
uv_snapshot!(context.filters(), context.venv()
From 9cf78217413fe6bee28fbe44e67689802c7b7486 Mon Sep 17 00:00:00 2001
From: Aria Desires
Date: Fri, 11 Jul 2025 13:01:41 -0400
Subject: [PATCH 065/130] Add missing validations for disallowed `uv.toml`
fields (#14322)
We weren't following our usual "destructure all the options" pattern in
this function, and several "this isn't actually read from uv.toml"
fields slipped through the cracks over time since folks forgot it
existed.
Fixes part of #14308, although we could still try to make the warning in
FilesystemOptions more accurate?
You could argue this is a breaking change, but I think it ultimately
isn't really, because we were already silently ignoring these fields.
Now we properly error.
---
crates/uv-settings/src/lib.rs | 50 ++++++++++++++++++++++++++----
crates/uv-settings/src/settings.rs | 2 +-
2 files changed, 45 insertions(+), 7 deletions(-)
diff --git a/crates/uv-settings/src/lib.rs b/crates/uv-settings/src/lib.rs
index cad600cfc..84aef8f28 100644
--- a/crates/uv-settings/src/lib.rs
+++ b/crates/uv-settings/src/lib.rs
@@ -201,33 +201,71 @@ fn read_file(path: &Path) -> Result {
/// Validate that an [`Options`] schema is compatible with `uv.toml`.
fn validate_uv_toml(path: &Path, options: &Options) -> Result<(), Error> {
+ let Options {
+ globals: _,
+ top_level: _,
+ install_mirrors: _,
+ publish: _,
+ add: _,
+ pip: _,
+ cache_keys: _,
+ override_dependencies: _,
+ constraint_dependencies: _,
+ build_constraint_dependencies: _,
+ environments: _,
+ required_environments: _,
+ conflicts,
+ workspace,
+ sources,
+ dev_dependencies,
+ default_groups,
+ dependency_groups,
+ managed,
+ package,
+ build_backend,
+ } = options;
// The `uv.toml` format is not allowed to include any of the following, which are
// permitted by the schema since they _can_ be included in `pyproject.toml` files
// (and we want to use `deny_unknown_fields`).
- if options.workspace.is_some() {
+ if conflicts.is_some() {
+ return Err(Error::PyprojectOnlyField(path.to_path_buf(), "conflicts"));
+ }
+ if workspace.is_some() {
return Err(Error::PyprojectOnlyField(path.to_path_buf(), "workspace"));
}
- if options.sources.is_some() {
+ if sources.is_some() {
return Err(Error::PyprojectOnlyField(path.to_path_buf(), "sources"));
}
- if options.dev_dependencies.is_some() {
+ if dev_dependencies.is_some() {
return Err(Error::PyprojectOnlyField(
path.to_path_buf(),
"dev-dependencies",
));
}
- if options.default_groups.is_some() {
+ if default_groups.is_some() {
return Err(Error::PyprojectOnlyField(
path.to_path_buf(),
"default-groups",
));
}
- if options.managed.is_some() {
+ if dependency_groups.is_some() {
+ return Err(Error::PyprojectOnlyField(
+ path.to_path_buf(),
+ "dependency-groups",
+ ));
+ }
+ if managed.is_some() {
return Err(Error::PyprojectOnlyField(path.to_path_buf(), "managed"));
}
- if options.package.is_some() {
+ if package.is_some() {
return Err(Error::PyprojectOnlyField(path.to_path_buf(), "package"));
}
+ if build_backend.is_some() {
+ return Err(Error::PyprojectOnlyField(
+ path.to_path_buf(),
+ "build-backend",
+ ));
+ }
Ok(())
}
diff --git a/crates/uv-settings/src/settings.rs b/crates/uv-settings/src/settings.rs
index d80ccce2f..e057cb40a 100644
--- a/crates/uv-settings/src/settings.rs
+++ b/crates/uv-settings/src/settings.rs
@@ -103,7 +103,7 @@ pub struct Options {
cache-keys = [{ file = "pyproject.toml" }, { file = "requirements.txt" }, { git = { commit = true } }]
"#
)]
- cache_keys: Option>,
+ pub cache_keys: Option>,
// NOTE(charlie): These fields are shared with `ToolUv` in
// `crates/uv-workspace/src/pyproject.rs`. The documentation lives on that struct.
From 6df7dab2df6e5a9b3bf36183851dd9d7c0824c9f Mon Sep 17 00:00:00 2001
From: Charlie Marsh
Date: Mon, 14 Jul 2025 13:18:39 -0400
Subject: [PATCH 066/130] Use an ephemeral environment for `uv run --with`
invocations (#14447)
This PR creates separation between the `--with` environment and the
environment we actually run in, which in turn solves issues like
https://github.com/astral-sh/uv/issues/12889 whereby two invocations
share the same `--with` environment, causing them to collide by way of
sharing an overlay.
Closes https://github.com/astral-sh/uv/issues/7643.
---
crates/uv-python/src/environment.rs | 2 +-
crates/uv/src/commands/project/environment.rs | 166 ++++++++----------
crates/uv/src/commands/project/mod.rs | 3 -
crates/uv/src/commands/project/run.rs | 116 ++++++++----
crates/uv/src/commands/tool/run.rs | 4 -
crates/uv/tests/it/lock.rs | 2 +-
crates/uv/tests/it/run.rs | 13 +-
7 files changed, 159 insertions(+), 147 deletions(-)
diff --git a/crates/uv-python/src/environment.rs b/crates/uv-python/src/environment.rs
index 02f9fd683..07f3ddb54 100644
--- a/crates/uv-python/src/environment.rs
+++ b/crates/uv-python/src/environment.rs
@@ -174,7 +174,7 @@ impl PythonEnvironment {
/// N.B. This function also works for system Python environments and users depend on this.
pub fn from_root(root: impl AsRef, cache: &Cache) -> Result {
debug!(
- "Checking for Python environment at `{}`",
+ "Checking for Python environment at: `{}`",
root.as_ref().user_display()
);
match root.as_ref().try_exists() {
diff --git a/crates/uv/src/commands/project/environment.rs b/crates/uv/src/commands/project/environment.rs
index a3cda28c1..cf1add99a 100644
--- a/crates/uv/src/commands/project/environment.rs
+++ b/crates/uv/src/commands/project/environment.rs
@@ -17,6 +17,69 @@ use crate::commands::project::{
use crate::printer::Printer;
use crate::settings::{NetworkSettings, ResolverInstallerSettings};
+/// An ephemeral [`PythonEnvironment`] for running an individual command.
+#[derive(Debug)]
+pub(crate) struct EphemeralEnvironment(PythonEnvironment);
+
+impl From for EphemeralEnvironment {
+ fn from(environment: PythonEnvironment) -> Self {
+ Self(environment)
+ }
+}
+
+impl From for PythonEnvironment {
+ fn from(environment: EphemeralEnvironment) -> Self {
+ environment.0
+ }
+}
+
+impl EphemeralEnvironment {
+ /// Set the ephemeral overlay for a Python environment.
+ #[allow(clippy::result_large_err)]
+ pub(crate) fn set_overlay(&self, contents: impl AsRef<[u8]>) -> Result<(), ProjectError> {
+ let site_packages = self
+ .0
+ .site_packages()
+ .next()
+ .ok_or(ProjectError::NoSitePackages)?;
+ let overlay_path = site_packages.join("_uv_ephemeral_overlay.pth");
+ fs_err::write(overlay_path, contents)?;
+ Ok(())
+ }
+
+ /// Enable system site packages for a Python environment.
+ #[allow(clippy::result_large_err)]
+ pub(crate) fn set_system_site_packages(&self) -> Result<(), ProjectError> {
+ self.0
+ .set_pyvenv_cfg("include-system-site-packages", "true")?;
+ Ok(())
+ }
+
+ /// Set the `extends-environment` key in the `pyvenv.cfg` file to the given path.
+ ///
+ /// Ephemeral environments created by `uv run --with` extend a parent (virtual or system)
+ /// environment by adding a `.pth` file to the ephemeral environment's `site-packages`
+ /// directory. The `pth` file contains Python code to dynamically add the parent
+ /// environment's `site-packages` directory to Python's import search paths in addition to
+ /// the ephemeral environment's `site-packages` directory. This works well at runtime, but
+ /// is too dynamic for static analysis tools like ty to understand. As such, we
+ /// additionally write the `sys.prefix` of the parent environment to to the
+ /// `extends-environment` key of the ephemeral environment's `pyvenv.cfg` file, making it
+ /// easier for these tools to statically and reliably understand the relationship between
+ /// the two environments.
+ #[allow(clippy::result_large_err)]
+ pub(crate) fn set_parent_environment(
+ &self,
+ parent_environment_sys_prefix: &Path,
+ ) -> Result<(), ProjectError> {
+ self.0.set_pyvenv_cfg(
+ "extends-environment",
+ &parent_environment_sys_prefix.escape_for_python(),
+ )?;
+ Ok(())
+ }
+}
+
/// A [`PythonEnvironment`] stored in the cache.
#[derive(Debug)]
pub(crate) struct CachedEnvironment(PythonEnvironment);
@@ -44,15 +107,13 @@ impl CachedEnvironment {
printer: Printer,
preview: PreviewMode,
) -> Result {
- // Resolve the "base" interpreter, which resolves to an underlying parent interpreter if the
- // given interpreter is a virtual environment.
- let base_interpreter = Self::base_interpreter(interpreter, cache)?;
+ let interpreter = Self::base_interpreter(interpreter, cache)?;
// Resolve the requirements with the interpreter.
let resolution = Resolution::from(
resolve_environment(
spec,
- &base_interpreter,
+ &interpreter,
build_constraints.clone(),
&settings.resolver,
network_settings,
@@ -80,29 +141,20 @@ impl CachedEnvironment {
// Use the canonicalized base interpreter path since that's the interpreter we performed the
// resolution with and the interpreter the environment will be created with.
//
- // We also include the canonicalized `sys.prefix` of the non-base interpreter, that is, the
- // virtual environment's path. Originally, we shared cached environments independent of the
- // environment they'd be layered on top of. However, this causes collisions as the overlay
- // `.pth` file can be overridden by another instance of uv. Including this element in the key
- // avoids this problem at the cost of creating separate cached environments for identical
- // `--with` invocations across projects. We use `sys.prefix` rather than `sys.executable` so
- // we can canonicalize it without invalidating the purpose of the element — it'd probably be
- // safe to just use the absolute `sys.executable` as well.
- //
- // TODO(zanieb): Since we're not sharing these environmments across projects, we should move
- // [`CachedEvnvironment::set_overlay`] etc. here since the values there should be constant
- // now.
+ // We cache environments independent of the environment they'd be layered on top of. The
+ // assumption is such that the environment will _not_ be modified by the user or uv;
+ // otherwise, we risk cache poisoning. For example, if we were to write a `.pth` file to
+ // the cached environment, it would be shared across all projects that use the same
+ // interpreter and the same cached dependencies.
//
// TODO(zanieb): We should include the version of the base interpreter in the hash, so if
// the interpreter at the canonicalized path changes versions we construct a new
// environment.
- let environment_hash = cache_digest(&(
- &canonicalize_executable(base_interpreter.sys_executable())?,
- &interpreter.sys_prefix().canonicalize()?,
- ));
+ let interpreter_hash =
+ cache_digest(&canonicalize_executable(interpreter.sys_executable())?);
// Search in the content-addressed cache.
- let cache_entry = cache.entry(CacheBucket::Environments, environment_hash, resolution_hash);
+ let cache_entry = cache.entry(CacheBucket::Environments, interpreter_hash, resolution_hash);
if cache.refresh().is_none() {
if let Ok(root) = cache.resolve_link(cache_entry.path()) {
@@ -116,7 +168,7 @@ impl CachedEnvironment {
let temp_dir = cache.venv_dir()?;
let venv = uv_virtualenv::create_venv(
temp_dir.path(),
- base_interpreter,
+ interpreter,
uv_virtualenv::Prompt::None,
false,
false,
@@ -150,76 +202,6 @@ impl CachedEnvironment {
Ok(Self(PythonEnvironment::from_root(root, cache)?))
}
- /// Set the ephemeral overlay for a Python environment.
- #[allow(clippy::result_large_err)]
- pub(crate) fn set_overlay(&self, contents: impl AsRef<[u8]>) -> Result<(), ProjectError> {
- let site_packages = self
- .0
- .site_packages()
- .next()
- .ok_or(ProjectError::NoSitePackages)?;
- let overlay_path = site_packages.join("_uv_ephemeral_overlay.pth");
- fs_err::write(overlay_path, contents)?;
- Ok(())
- }
-
- /// Clear the ephemeral overlay for a Python environment, if it exists.
- #[allow(clippy::result_large_err)]
- pub(crate) fn clear_overlay(&self) -> Result<(), ProjectError> {
- let site_packages = self
- .0
- .site_packages()
- .next()
- .ok_or(ProjectError::NoSitePackages)?;
- let overlay_path = site_packages.join("_uv_ephemeral_overlay.pth");
- match fs_err::remove_file(overlay_path) {
- Ok(()) => (),
- Err(err) if err.kind() == std::io::ErrorKind::NotFound => (),
- Err(err) => return Err(ProjectError::OverlayRemoval(err)),
- }
- Ok(())
- }
-
- /// Enable system site packages for a Python environment.
- #[allow(clippy::result_large_err)]
- pub(crate) fn set_system_site_packages(&self) -> Result<(), ProjectError> {
- self.0
- .set_pyvenv_cfg("include-system-site-packages", "true")?;
- Ok(())
- }
-
- /// Disable system site packages for a Python environment.
- #[allow(clippy::result_large_err)]
- pub(crate) fn clear_system_site_packages(&self) -> Result<(), ProjectError> {
- self.0
- .set_pyvenv_cfg("include-system-site-packages", "false")?;
- Ok(())
- }
-
- /// Set the `extends-environment` key in the `pyvenv.cfg` file to the given path.
- ///
- /// Ephemeral environments created by `uv run --with` extend a parent (virtual or system)
- /// environment by adding a `.pth` file to the ephemeral environment's `site-packages`
- /// directory. The `pth` file contains Python code to dynamically add the parent
- /// environment's `site-packages` directory to Python's import search paths in addition to
- /// the ephemeral environment's `site-packages` directory. This works well at runtime, but
- /// is too dynamic for static analysis tools like ty to understand. As such, we
- /// additionally write the `sys.prefix` of the parent environment to the
- /// `extends-environment` key of the ephemeral environment's `pyvenv.cfg` file, making it
- /// easier for these tools to statically and reliably understand the relationship between
- /// the two environments.
- #[allow(clippy::result_large_err)]
- pub(crate) fn set_parent_environment(
- &self,
- parent_environment_sys_prefix: &Path,
- ) -> Result<(), ProjectError> {
- self.0.set_pyvenv_cfg(
- "extends-environment",
- &parent_environment_sys_prefix.escape_for_python(),
- )?;
- Ok(())
- }
-
/// Return the [`Interpreter`] to use for the cached environment, based on a given
/// [`Interpreter`].
///
diff --git a/crates/uv/src/commands/project/mod.rs b/crates/uv/src/commands/project/mod.rs
index 774009f63..eaccaefa6 100644
--- a/crates/uv/src/commands/project/mod.rs
+++ b/crates/uv/src/commands/project/mod.rs
@@ -200,9 +200,6 @@ pub(crate) enum ProjectError {
#[error("Failed to parse PEP 723 script metadata")]
Pep723ScriptTomlParse(#[source] toml::de::Error),
- #[error("Failed to remove ephemeral overlay")]
- OverlayRemoval(#[source] std::io::Error),
-
#[error("Failed to find `site-packages` directory for environment")]
NoSitePackages,
diff --git a/crates/uv/src/commands/project/run.rs b/crates/uv/src/commands/project/run.rs
index 63850f563..16ebf88fb 100644
--- a/crates/uv/src/commands/project/run.rs
+++ b/crates/uv/src/commands/project/run.rs
@@ -45,7 +45,7 @@ use crate::commands::pip::loggers::{
DefaultInstallLogger, DefaultResolveLogger, SummaryInstallLogger, SummaryResolveLogger,
};
use crate::commands::pip::operations::Modifications;
-use crate::commands::project::environment::CachedEnvironment;
+use crate::commands::project::environment::{CachedEnvironment, EphemeralEnvironment};
use crate::commands::project::install_target::InstallTarget;
use crate::commands::project::lock::LockMode;
use crate::commands::project::lock_target::LockTarget;
@@ -944,7 +944,7 @@ hint: If you are running a script with `{}` in the shebang, you may need to incl
// If necessary, create an environment for the ephemeral requirements or command.
let base_site_packages = SitePackages::from_interpreter(&base_interpreter)?;
- let ephemeral_env = match spec {
+ let requirements_env = match spec {
None => None,
Some(spec)
if can_skip_ephemeral(&spec, &base_interpreter, &base_site_packages, &settings) =>
@@ -952,7 +952,7 @@ hint: If you are running a script with `{}` in the shebang, you may need to incl
None
}
Some(spec) => {
- debug!("Syncing ephemeral requirements");
+ debug!("Syncing `--with` requirements to cached environment");
// Read the build constraints from the lock file.
let build_constraints = base_lock
@@ -1013,54 +1013,92 @@ hint: If you are running a script with `{}` in the shebang, you may need to incl
Err(err) => return Err(err.into()),
};
- Some(environment)
+ Some(PythonEnvironment::from(environment))
}
};
- // If we're running in an ephemeral environment, add a path file to enable loading of
- // the base environment's site packages. Setting `PYTHONPATH` is insufficient, as it doesn't
- // resolve `.pth` files in the base environment.
+ // If we're layering requirements atop the project environment, run the command in an ephemeral,
+ // isolated environment. Otherwise, modifications to the "active virtual environment" would
+ // poison the cache.
+ let ephemeral_dir = requirements_env
+ .as_ref()
+ .map(|_| cache.venv_dir())
+ .transpose()?;
+
+ let ephemeral_env = ephemeral_dir
+ .as_ref()
+ .map(|dir| {
+ debug!(
+ "Creating ephemeral environment at: `{}`",
+ dir.path().simplified_display()
+ );
+
+ uv_virtualenv::create_venv(
+ dir.path(),
+ base_interpreter.clone(),
+ uv_virtualenv::Prompt::None,
+ false,
+ false,
+ false,
+ false,
+ false,
+ preview,
+ )
+ })
+ .transpose()?
+ .map(EphemeralEnvironment::from);
+
+ // If we're running in an ephemeral environment, add a path file to enable loading from the
+ // `--with` requirements environment and the project environment site packages.
//
- // `sitecustomize.py` would be an alternative, but it can be shadowed by an existing such
- // module in the python installation.
+ // Setting `PYTHONPATH` is insufficient, as it doesn't resolve `.pth` files in the base
+ // environment. Adding `sitecustomize.py` would be an alternative, but it can be shadowed by an
+ // existing such module in the python installation.
if let Some(ephemeral_env) = ephemeral_env.as_ref() {
- let site_packages = base_interpreter
- .site_packages()
- .next()
- .ok_or_else(|| ProjectError::NoSitePackages)?;
- ephemeral_env.set_overlay(format!(
- "import site; site.addsitedir(\"{}\")",
- site_packages.escape_for_python()
- ))?;
+ if let Some(requirements_env) = requirements_env.as_ref() {
+ let requirements_site_packages =
+ requirements_env.site_packages().next().ok_or_else(|| {
+ anyhow!("Requirements environment has no site packages directory")
+ })?;
+ let base_site_packages = base_interpreter
+ .site_packages()
+ .next()
+ .ok_or_else(|| anyhow!("Base environment has no site packages directory"))?;
- // Write the `sys.prefix` of the parent environment to the `extends-environment` key of the `pyvenv.cfg`
- // file. This helps out static-analysis tools such as ty (see docs on
- // `CachedEnvironment::set_parent_environment`).
- //
- // Note that we do this even if the parent environment is not a virtual environment.
- // For ephemeral environments created by `uv run --with`, the parent environment's
- // `site-packages` directory is added to `sys.path` even if the parent environment is not
- // a virtual environment and even if `--system-site-packages` was not explicitly selected.
- ephemeral_env.set_parent_environment(base_interpreter.sys_prefix())?;
+ ephemeral_env.set_overlay(format!(
+ "import site; site.addsitedir(\"{}\"); site.addsitedir(\"{}\");",
+ base_site_packages.escape_for_python(),
+ requirements_site_packages.escape_for_python(),
+ ))?;
- // If `--system-site-packages` is enabled, add the system site packages to the ephemeral
- // environment.
- if base_interpreter.is_virtualenv()
- && PyVenvConfiguration::parse(base_interpreter.sys_prefix().join("pyvenv.cfg"))
- .is_ok_and(|cfg| cfg.include_system_site_packages())
- {
- ephemeral_env.set_system_site_packages()?;
- } else {
- ephemeral_env.clear_system_site_packages()?;
+ // Write the `sys.prefix` of the parent environment to the `extends-environment` key of the `pyvenv.cfg`
+ // file. This helps out static-analysis tools such as ty (see docs on
+ // `CachedEnvironment::set_parent_environment`).
+ //
+ // Note that we do this even if the parent environment is not a virtual environment.
+ // For ephemeral environments created by `uv run --with`, the parent environment's
+ // `site-packages` directory is added to `sys.path` even if the parent environment is not
+ // a virtual environment and even if `--system-site-packages` was not explicitly selected.
+ ephemeral_env.set_parent_environment(base_interpreter.sys_prefix())?;
+
+ // If `--system-site-packages` is enabled, add the system site packages to the ephemeral
+ // environment.
+ if base_interpreter.is_virtualenv()
+ && PyVenvConfiguration::parse(base_interpreter.sys_prefix().join("pyvenv.cfg"))
+ .is_ok_and(|cfg| cfg.include_system_site_packages())
+ {
+ ephemeral_env.set_system_site_packages()?;
+ }
}
}
- // Cast from `CachedEnvironment` to `PythonEnvironment`.
+ // Cast to `PythonEnvironment`.
let ephemeral_env = ephemeral_env.map(PythonEnvironment::from);
// Determine the Python interpreter to use for the command, if necessary.
let interpreter = ephemeral_env
.as_ref()
+ .or(requirements_env.as_ref())
.map_or_else(|| &base_interpreter, |env| env.interpreter());
// Check if any run command is given.
@@ -1143,6 +1181,12 @@ hint: If you are running a script with `{}` in the shebang, you may need to incl
.as_ref()
.map(PythonEnvironment::scripts)
.into_iter()
+ .chain(
+ requirements_env
+ .as_ref()
+ .map(PythonEnvironment::scripts)
+ .into_iter(),
+ )
.chain(std::iter::once(base_interpreter.scripts()))
.chain(
// On Windows, non-virtual Python distributions put `python.exe` in the top-level
diff --git a/crates/uv/src/commands/tool/run.rs b/crates/uv/src/commands/tool/run.rs
index f6b79774c..a1faa1153 100644
--- a/crates/uv/src/commands/tool/run.rs
+++ b/crates/uv/src/commands/tool/run.rs
@@ -1081,9 +1081,5 @@ async fn get_or_create_environment(
},
};
- // Clear any existing overlay.
- environment.clear_overlay()?;
- environment.clear_system_site_packages()?;
-
Ok((from, environment.into()))
}
diff --git a/crates/uv/tests/it/lock.rs b/crates/uv/tests/it/lock.rs
index 477b4b039..d7ac9b47a 100644
--- a/crates/uv/tests/it/lock.rs
+++ b/crates/uv/tests/it/lock.rs
@@ -15806,7 +15806,7 @@ fn lock_explicit_default_index() -> Result<()> {
DEBUG Adding root workspace member: `[TEMP_DIR]/`
DEBUG No Python version file found in workspace: [TEMP_DIR]/
DEBUG Using Python request `>=3.12` from `requires-python` metadata
- DEBUG Checking for Python environment at `.venv`
+ DEBUG Checking for Python environment at: `.venv`
DEBUG The project environment's Python version satisfies the request: `Python >=3.12`
DEBUG Using request timeout of [TIME]
DEBUG Found static `pyproject.toml` for: project @ file://[TEMP_DIR]/
diff --git a/crates/uv/tests/it/run.rs b/crates/uv/tests/it/run.rs
index 93420cca0..6a1eb6093 100644
--- a/crates/uv/tests/it/run.rs
+++ b/crates/uv/tests/it/run.rs
@@ -1302,7 +1302,6 @@ fn run_with_pyvenv_cfg_file() -> Result<()> {
uv = [UV_VERSION]
version_info = 3.12.[X]
include-system-site-packages = false
- relocatable = true
extends-environment = [PARENT_VENV]
@@ -4778,7 +4777,6 @@ fn run_groups_include_requires_python() -> Result<()> {
baz = ["iniconfig"]
dev = ["sniffio", {include-group = "foo"}, {include-group = "baz"}]
-
[tool.uv.dependency-groups]
foo = {requires-python="<3.13"}
bar = {requires-python=">=3.13"}
@@ -4923,8 +4921,8 @@ fn run_repeated() -> Result<()> {
Resolved 1 package in [TIME]
"###);
- // Re-running as a tool does require reinstalling `typing-extensions`, since the base venv is
- // different.
+ // Re-running as a tool doesn't require reinstalling `typing-extensions`, since the environment
+ // is cached.
uv_snapshot!(
context.filters(),
context.tool_run().arg("--with").arg("typing-extensions").arg("python").arg("-c").arg("import typing_extensions; import iniconfig"), @r#"
@@ -4934,8 +4932,6 @@ fn run_repeated() -> Result<()> {
----- stderr -----
Resolved 1 package in [TIME]
- Installed 1 package in [TIME]
- + typing-extensions==4.10.0
Traceback (most recent call last):
File "", line 1, in
import typing_extensions; import iniconfig
@@ -4982,8 +4978,7 @@ fn run_without_overlay() -> Result<()> {
+ typing-extensions==4.10.0
"###);
- // Import `iniconfig` in the context of a `tool run` command, which should fail. Note that
- // typing-extensions gets installed again, because the venv is not shared.
+ // Import `iniconfig` in the context of a `tool run` command, which should fail.
uv_snapshot!(
context.filters(),
context.tool_run().arg("--with").arg("typing-extensions").arg("python").arg("-c").arg("import typing_extensions; import iniconfig"), @r#"
@@ -4993,8 +4988,6 @@ fn run_without_overlay() -> Result<()> {
----- stderr -----
Resolved 1 package in [TIME]
- Installed 1 package in [TIME]
- + typing-extensions==4.10.0
Traceback (most recent call last):
File "", line 1, in
import typing_extensions; import iniconfig
From 2850dc05992c47a10a1f968d65c59c1e21cf1df2 Mon Sep 17 00:00:00 2001
From: Aria Desires
Date: Mon, 14 Jul 2025 13:47:52 -0400
Subject: [PATCH 067/130] make `--check` outdated a non-error status 1 (#14167)
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
In the case of `uv sync` all we really need to do is handle the
`OutdatedEnvironment` error (precisely the error we yield only on
dry-runs when everything Works but we determine things are outdated) in
`OperationDiagnostic::report` (the post-processor on all
`operations::install` calls) because any diagnostic handled by that gets
downgraded to from status 2 to status 1 (although I don't know if that's
really intentional or a random other bug in our status handling... but I
figured it's best to highlight that other potential status code
incongruence than not rely on it 😄).
Fixes #12603
---------
Co-authored-by: John Mumm
---
crates/uv/src/commands/diagnostics.rs | 4 +
crates/uv/src/commands/project/lock.rs | 11 +-
crates/uv/src/commands/project/mod.rs | 2 +-
crates/uv/src/commands/project/sync.rs | 37 +++-
crates/uv/tests/it/lock.rs | 233 +++++++++++++++++--------
crates/uv/tests/it/sync.rs | 28 +--
6 files changed, 217 insertions(+), 98 deletions(-)
diff --git a/crates/uv/src/commands/diagnostics.rs b/crates/uv/src/commands/diagnostics.rs
index 7a9fcbd35..02412d683 100644
--- a/crates/uv/src/commands/diagnostics.rs
+++ b/crates/uv/src/commands/diagnostics.rs
@@ -127,6 +127,10 @@ impl OperationDiagnostic {
native_tls_hint(err);
None
}
+ pip::operations::Error::OutdatedEnvironment => {
+ anstream::eprint!("{}", err);
+ None
+ }
err => Some(err),
}
}
diff --git a/crates/uv/src/commands/project/lock.rs b/crates/uv/src/commands/project/lock.rs
index f79557d9e..833e59a13 100644
--- a/crates/uv/src/commands/project/lock.rs
+++ b/crates/uv/src/commands/project/lock.rs
@@ -234,6 +234,10 @@ pub(crate) async fn lock(
Ok(ExitStatus::Success)
}
+ Err(err @ ProjectError::LockMismatch(..)) => {
+ writeln!(printer.stderr(), "{}", err.to_string().bold())?;
+ Ok(ExitStatus::Failure)
+ }
Err(ProjectError::Operation(err)) => {
diagnostics::OperationDiagnostic::native_tls(network_settings.native_tls)
.report(err)
@@ -346,8 +350,11 @@ impl<'env> LockOperation<'env> {
.await?;
// If the lockfile changed, return an error.
- if matches!(result, LockResult::Changed(_, _)) {
- return Err(ProjectError::LockMismatch(Box::new(result.into_lock())));
+ if let LockResult::Changed(prev, cur) = result {
+ return Err(ProjectError::LockMismatch(
+ prev.map(Box::new),
+ Box::new(cur),
+ ));
}
Ok(result)
diff --git a/crates/uv/src/commands/project/mod.rs b/crates/uv/src/commands/project/mod.rs
index eaccaefa6..fde2b638c 100644
--- a/crates/uv/src/commands/project/mod.rs
+++ b/crates/uv/src/commands/project/mod.rs
@@ -75,7 +75,7 @@ pub(crate) enum ProjectError {
#[error(
"The lockfile at `uv.lock` needs to be updated, but `--locked` was provided. To update the lockfile, run `uv lock`."
)]
- LockMismatch(Box),
+ LockMismatch(Option>, Box),
#[error(
"Unable to find lockfile at `uv.lock`. To create a lockfile, run `uv lock` or `uv sync`."
diff --git a/crates/uv/src/commands/project/sync.rs b/crates/uv/src/commands/project/sync.rs
index 5843df6be..40aa1b352 100644
--- a/crates/uv/src/commands/project/sync.rs
+++ b/crates/uv/src/commands/project/sync.rs
@@ -330,10 +330,19 @@ pub(crate) async fn sync(
.report(err)
.map_or(Ok(ExitStatus::Failure), |err| Err(err.into()));
}
- Err(ProjectError::LockMismatch(lock)) if dry_run.enabled() => {
- // The lockfile is mismatched, but we're in dry-run mode. We should proceed with the
- // sync operation, but exit with a non-zero status.
- Outcome::LockMismatch(lock)
+ Err(ProjectError::LockMismatch(prev, cur)) => {
+ if dry_run.enabled() {
+ // The lockfile is mismatched, but we're in dry-run mode. We should proceed with the
+ // sync operation, but exit with a non-zero status.
+ Outcome::LockMismatch(prev, cur)
+ } else {
+ writeln!(
+ printer.stderr(),
+ "{}",
+ ProjectError::LockMismatch(prev, cur).to_string().bold()
+ )?;
+ return Ok(ExitStatus::Failure);
+ }
}
Err(err) => return Err(err.into()),
};
@@ -398,7 +407,14 @@ pub(crate) async fn sync(
match outcome {
Outcome::Success(..) => Ok(ExitStatus::Success),
- Outcome::LockMismatch(lock) => Err(ProjectError::LockMismatch(lock).into()),
+ Outcome::LockMismatch(prev, cur) => {
+ writeln!(
+ printer.stderr(),
+ "{}",
+ ProjectError::LockMismatch(prev, cur).to_string().bold()
+ )?;
+ Ok(ExitStatus::Failure)
+ }
}
}
@@ -409,15 +425,18 @@ enum Outcome {
/// The `lock` operation was successful.
Success(LockResult),
/// The `lock` operation successfully resolved, but failed due to a mismatch (e.g., with `--locked`).
- LockMismatch(Box),
+ LockMismatch(Option>, Box),
}
impl Outcome {
/// Return the [`Lock`] associated with this outcome.
fn lock(&self) -> &Lock {
match self {
- Self::Success(lock) => lock.lock(),
- Self::LockMismatch(lock) => lock,
+ Self::Success(lock) => match lock {
+ LockResult::Changed(_, lock) => lock,
+ LockResult::Unchanged(lock) => lock,
+ },
+ Self::LockMismatch(_prev, cur) => cur,
}
}
}
@@ -1179,7 +1198,7 @@ impl From<(&LockTarget<'_>, &LockMode<'_>, &Outcome)> for LockReport {
}
}
// TODO(zanieb): We don't have a way to report the outcome of the lock yet
- Outcome::LockMismatch(_) => LockAction::Check,
+ Outcome::LockMismatch(..) => LockAction::Check,
},
dry_run: matches!(mode, LockMode::DryRun(_)),
}
diff --git a/crates/uv/tests/it/lock.rs b/crates/uv/tests/it/lock.rs
index d7ac9b47a..faf37a83a 100644
--- a/crates/uv/tests/it/lock.rs
+++ b/crates/uv/tests/it/lock.rs
@@ -6660,15 +6660,15 @@ fn lock_invalid_hash() -> Result<()> {
"#)?;
// Re-run with `--locked`.
- uv_snapshot!(context.filters(), context.lock().arg("--locked"), @r###"
+ uv_snapshot!(context.filters(), context.lock().arg("--locked"), @r"
success: false
- exit_code: 2
+ exit_code: 1
----- stdout -----
----- stderr -----
Resolved 4 packages in [TIME]
- error: The lockfile at `uv.lock` needs to be updated, but `--locked` was provided. To update the lockfile, run `uv lock`.
- "###);
+ The lockfile at `uv.lock` needs to be updated, but `--locked` was provided. To update the lockfile, run `uv lock`.
+ ");
// Install from the lockfile.
uv_snapshot!(context.filters(), context.sync().arg("--frozen"), @r###"
@@ -11743,6 +11743,95 @@ fn unconditional_overlapping_marker_disjoint_version_constraints() -> Result<()>
Ok(())
}
+/// Checks the output of `uv lock --check` when there isn't a lock
+#[test]
+fn check_no_lock() -> Result<()> {
+ let context = TestContext::new("3.12");
+
+ let pyproject_toml = context.temp_dir.child("pyproject.toml");
+ pyproject_toml.write_str(
+ r#"
+ [project]
+ name = "myproject"
+ version = "0.1.0"
+ requires-python = ">=3.11"
+ dependencies = ["sortedcollections"]
+ "#,
+ )?;
+
+ uv_snapshot!(context.filters(), context.lock()
+ .arg("--check"), @r"
+ success: false
+ exit_code: 2
+ ----- stdout -----
+
+ ----- stderr -----
+ error: Unable to find lockfile at `uv.lock`. To create a lockfile, run `uv lock` or `uv sync`.
+ ");
+ Ok(())
+}
+
+/// Checks the output of `uv lock --check` when the lock is outdated
+#[test]
+fn check_outdated_lock() -> Result<()> {
+ let context = TestContext::new("3.12");
+
+ let pyproject_toml = context.temp_dir.child("pyproject.toml");
+ pyproject_toml.write_str(
+ r#"
+ [project]
+ name = "myproject"
+ version = "0.1.0"
+ requires-python = ">=3.11"
+ dependencies = ["sortedcollections"]
+ "#,
+ )?;
+
+ // Generate the lock
+ uv_snapshot!(context.filters(), context.lock(), @r"
+ success: true
+ exit_code: 0
+ ----- stdout -----
+
+ ----- stderr -----
+ Resolved 3 packages in [TIME]
+ ");
+
+ // Check the --check returns fine
+ uv_snapshot!(context.filters(), context.lock()
+ .arg("--check"), @r"
+ success: true
+ exit_code: 0
+ ----- stdout -----
+
+ ----- stderr -----
+ Resolved 3 packages in [TIME]
+ ");
+
+ // Edit dependencies so the lock is invalid
+ pyproject_toml.write_str(
+ r#"
+ [project]
+ name = "project"
+ version = "0.1.0"
+ requires-python = ">=3.11"
+ dependencies = ["iniconfig"]
+ "#,
+ )?;
+
+ uv_snapshot!(context.filters(), context.lock()
+ .arg("--check"), @r"
+ success: false
+ exit_code: 1
+ ----- stdout -----
+
+ ----- stderr -----
+ Resolved 2 packages in [TIME]
+ The lockfile at `uv.lock` needs to be updated, but `--locked` was provided. To update the lockfile, run `uv lock`.
+ ");
+ Ok(())
+}
+
/// This checks that markers that normalize to 'false', which are serialized
/// to the lockfile as `python_full_version < '0'`, get read back as false.
/// Otherwise `uv lock --check` will always fail.
@@ -12094,15 +12183,15 @@ fn lock_remove_member() -> Result<()> {
)?;
// Re-run with `--locked`. This should fail.
- uv_snapshot!(context.filters(), context.lock().arg("--locked"), @r###"
+ uv_snapshot!(context.filters(), context.lock().arg("--locked"), @r"
success: false
- exit_code: 2
+ exit_code: 1
----- stdout -----
----- stderr -----
Resolved 1 package in [TIME]
- error: The lockfile at `uv.lock` needs to be updated, but `--locked` was provided. To update the lockfile, run `uv lock`.
- "###);
+ The lockfile at `uv.lock` needs to be updated, but `--locked` was provided. To update the lockfile, run `uv lock`.
+ ");
// Re-run without `--locked`.
uv_snapshot!(context.filters(), context.lock(), @r###"
@@ -12239,15 +12328,15 @@ fn lock_add_member() -> Result<()> {
)?;
// Re-run with `--locked`. This should fail.
- uv_snapshot!(context.filters(), context.lock().arg("--locked"), @r###"
+ uv_snapshot!(context.filters(), context.lock().arg("--locked"), @r"
success: false
- exit_code: 2
+ exit_code: 1
----- stdout -----
----- stderr -----
Resolved 5 packages in [TIME]
- error: The lockfile at `uv.lock` needs to be updated, but `--locked` was provided. To update the lockfile, run `uv lock`.
- "###);
+ The lockfile at `uv.lock` needs to be updated, but `--locked` was provided. To update the lockfile, run `uv lock`.
+ ");
// Re-run with `--offline`. This should also fail, during the resolve phase.
uv_snapshot!(context.filters(), context.lock().arg("--locked").arg("--offline").arg("--no-cache"), @r###"
@@ -12476,15 +12565,15 @@ fn lock_redundant_add_member() -> Result<()> {
// Re-run with `--locked`. This will fail, though in theory it could succeed, since the current
// _resolution_ satisfies the requirements, even if the inputs are not identical
- uv_snapshot!(context.filters(), context.lock().arg("--locked"), @r###"
+ uv_snapshot!(context.filters(), context.lock().arg("--locked"), @r"
success: false
- exit_code: 2
+ exit_code: 1
----- stdout -----
----- stderr -----
Resolved 4 packages in [TIME]
- error: The lockfile at `uv.lock` needs to be updated, but `--locked` was provided. To update the lockfile, run `uv lock`.
- "###);
+ The lockfile at `uv.lock` needs to be updated, but `--locked` was provided. To update the lockfile, run `uv lock`.
+ ");
// Re-run without `--locked`.
uv_snapshot!(context.filters(), context.lock(), @r###"
@@ -12674,15 +12763,15 @@ fn lock_new_constraints() -> Result<()> {
)?;
// Re-run with `--locked`. This should fail.
- uv_snapshot!(context.filters(), context.lock().arg("--locked"), @r###"
+ uv_snapshot!(context.filters(), context.lock().arg("--locked"), @r"
success: false
- exit_code: 2
+ exit_code: 1
----- stdout -----
----- stderr -----
Resolved 4 packages in [TIME]
- error: The lockfile at `uv.lock` needs to be updated, but `--locked` was provided. To update the lockfile, run `uv lock`.
- "###);
+ The lockfile at `uv.lock` needs to be updated, but `--locked` was provided. To update the lockfile, run `uv lock`.
+ ");
// Re-run without `--locked`.
uv_snapshot!(context.filters(), context.lock(), @r###"
@@ -12883,16 +12972,16 @@ fn lock_remove_member_non_project() -> Result<()> {
)?;
// Re-run with `--locked`. This should fail.
- uv_snapshot!(context.filters(), context.lock().arg("--locked"), @r###"
+ uv_snapshot!(context.filters(), context.lock().arg("--locked"), @r"
success: false
- exit_code: 2
+ exit_code: 1
----- stdout -----
----- stderr -----
warning: No `requires-python` value found in the workspace. Defaulting to `>=3.12`.
Resolved in [TIME]
- error: The lockfile at `uv.lock` needs to be updated, but `--locked` was provided. To update the lockfile, run `uv lock`.
- "###);
+ The lockfile at `uv.lock` needs to be updated, but `--locked` was provided. To update the lockfile, run `uv lock`.
+ ");
// Re-run without `--locked`.
uv_snapshot!(context.filters(), context.lock(), @r###"
@@ -13015,15 +13104,15 @@ fn lock_rename_project() -> Result<()> {
)?;
// Re-run with `--locked`. This should fail.
- uv_snapshot!(context.filters(), context.lock().arg("--locked"), @r###"
+ uv_snapshot!(context.filters(), context.lock().arg("--locked"), @r"
success: false
- exit_code: 2
+ exit_code: 1
----- stdout -----
----- stderr -----
Resolved 2 packages in [TIME]
- error: The lockfile at `uv.lock` needs to be updated, but `--locked` was provided. To update the lockfile, run `uv lock`.
- "###);
+ The lockfile at `uv.lock` needs to be updated, but `--locked` was provided. To update the lockfile, run `uv lock`.
+ ");
// Re-run without `--locked`.
uv_snapshot!(context.filters(), context.lock(), @r###"
@@ -14015,15 +14104,15 @@ fn lock_constrained_environment() -> Result<()> {
)?;
// Re-run with `--locked`. This should fail.
- uv_snapshot!(context.filters(), context.lock().arg("--locked"), @r###"
+ uv_snapshot!(context.filters(), context.lock().arg("--locked"), @r"
success: false
- exit_code: 2
+ exit_code: 1
----- stdout -----
----- stderr -----
Resolved 8 packages in [TIME]
- error: The lockfile at `uv.lock` needs to be updated, but `--locked` was provided. To update the lockfile, run `uv lock`.
- "###);
+ The lockfile at `uv.lock` needs to be updated, but `--locked` was provided. To update the lockfile, run `uv lock`.
+ ");
uv_snapshot!(context.filters(), context.lock(), @r###"
success: true
@@ -15278,15 +15367,15 @@ fn lock_add_empty_dependency_group() -> Result<()> {
)?;
// Re-run with `--locked`; this should fail.
- uv_snapshot!(context.filters(), context.lock().arg("--locked"), @r###"
+ uv_snapshot!(context.filters(), context.lock().arg("--locked"), @r"
success: false
- exit_code: 2
+ exit_code: 1
----- stdout -----
----- stderr -----
Resolved 2 packages in [TIME]
- error: The lockfile at `uv.lock` needs to be updated, but `--locked` was provided. To update the lockfile, run `uv lock`.
- "###);
+ The lockfile at `uv.lock` needs to be updated, but `--locked` was provided. To update the lockfile, run `uv lock`.
+ ");
// Re-lock the project.
uv_snapshot!(context.filters(), context.lock(), @r###"
@@ -15360,15 +15449,15 @@ fn lock_add_empty_dependency_group() -> Result<()> {
)?;
// Re-run with `--locked`; this should fail.
- uv_snapshot!(context.filters(), context.lock().arg("--locked"), @r###"
+ uv_snapshot!(context.filters(), context.lock().arg("--locked"), @r"
success: false
- exit_code: 2
+ exit_code: 1
----- stdout -----
----- stderr -----
Resolved 2 packages in [TIME]
- error: The lockfile at `uv.lock` needs to be updated, but `--locked` was provided. To update the lockfile, run `uv lock`.
- "###);
+ The lockfile at `uv.lock` needs to be updated, but `--locked` was provided. To update the lockfile, run `uv lock`.
+ ");
// Re-lock the project.
uv_snapshot!(context.filters(), context.lock(), @r###"
@@ -23253,15 +23342,15 @@ fn lock_dynamic_to_static() -> Result<()> {
)?;
// Rerunning with `--locked` should fail, since the project is no longer dynamic.
- uv_snapshot!(context.filters(), context.lock().arg("--locked"), @r###"
+ uv_snapshot!(context.filters(), context.lock().arg("--locked"), @r"
success: false
- exit_code: 2
+ exit_code: 1
----- stdout -----
----- stderr -----
Resolved 1 package in [TIME]
- error: The lockfile at `uv.lock` needs to be updated, but `--locked` was provided. To update the lockfile, run `uv lock`.
- "###);
+ The lockfile at `uv.lock` needs to be updated, but `--locked` was provided. To update the lockfile, run `uv lock`.
+ ");
uv_snapshot!(context.filters(), context.lock(), @r###"
success: true
@@ -23384,15 +23473,15 @@ fn lock_static_to_dynamic() -> Result<()> {
.write_str("__version__ = '0.1.0'")?;
// Rerunning with `--locked` should fail, since the project is no longer static.
- uv_snapshot!(context.filters(), context.lock().arg("--locked"), @r###"
+ uv_snapshot!(context.filters(), context.lock().arg("--locked"), @r"
success: false
- exit_code: 2
+ exit_code: 1
----- stdout -----
----- stderr -----
Resolved 1 package in [TIME]
- error: The lockfile at `uv.lock` needs to be updated, but `--locked` was provided. To update the lockfile, run `uv lock`.
- "###);
+ The lockfile at `uv.lock` needs to be updated, but `--locked` was provided. To update the lockfile, run `uv lock`.
+ ");
uv_snapshot!(context.filters(), context.lock(), @r###"
success: true
@@ -23486,15 +23575,15 @@ fn lock_bump_static_version() -> Result<()> {
)?;
// Rerunning with `--locked` should fail.
- uv_snapshot!(context.filters(), context.lock().arg("--locked"), @r###"
+ uv_snapshot!(context.filters(), context.lock().arg("--locked"), @r"
success: false
- exit_code: 2
+ exit_code: 1
----- stdout -----
----- stderr -----
Resolved 1 package in [TIME]
- error: The lockfile at `uv.lock` needs to be updated, but `--locked` was provided. To update the lockfile, run `uv lock`.
- "###);
+ The lockfile at `uv.lock` needs to be updated, but `--locked` was provided. To update the lockfile, run `uv lock`.
+ ");
uv_snapshot!(context.filters(), context.lock(), @r###"
success: true
@@ -25302,15 +25391,15 @@ fn lock_script() -> Result<()> {
})?;
// Re-run with `--locked`.
- uv_snapshot!(context.filters(), context.lock().arg("--script").arg("script.py").arg("--locked"), @r###"
+ uv_snapshot!(context.filters(), context.lock().arg("--script").arg("script.py").arg("--locked"), @r"
success: false
- exit_code: 2
+ exit_code: 1
----- stdout -----
----- stderr -----
Resolved 4 packages in [TIME]
- error: The lockfile at `uv.lock` needs to be updated, but `--locked` was provided. To update the lockfile, run `uv lock`.
- "###);
+ The lockfile at `uv.lock` needs to be updated, but `--locked` was provided. To update the lockfile, run `uv lock`.
+ ");
Ok(())
}
@@ -27631,15 +27720,15 @@ fn lock_empty_extra() -> Result<()> {
)?;
// Re-run with `--locked`. We expect this to fail, since we've added an extra.
- uv_snapshot!(context.filters(), context.lock().arg("--locked"), @r###"
+ uv_snapshot!(context.filters(), context.lock().arg("--locked"), @r"
success: false
- exit_code: 2
+ exit_code: 1
----- stdout -----
----- stderr -----
Resolved 3 packages in [TIME]
- error: The lockfile at `uv.lock` needs to be updated, but `--locked` was provided. To update the lockfile, run `uv lock`.
- "###);
+ The lockfile at `uv.lock` needs to be updated, but `--locked` was provided. To update the lockfile, run `uv lock`.
+ ");
uv_snapshot!(context.filters(), context.lock(), @r###"
success: true
@@ -27667,15 +27756,15 @@ fn lock_empty_extra() -> Result<()> {
)?;
// Re-run with `--locked`. We expect this to fail, since we've added an extra.
- uv_snapshot!(context.filters(), context.lock().arg("--locked"), @r###"
+ uv_snapshot!(context.filters(), context.lock().arg("--locked"), @r"
success: false
- exit_code: 2
+ exit_code: 1
----- stdout -----
----- stderr -----
Resolved 3 packages in [TIME]
- error: The lockfile at `uv.lock` needs to be updated, but `--locked` was provided. To update the lockfile, run `uv lock`.
- "###);
+ The lockfile at `uv.lock` needs to be updated, but `--locked` was provided. To update the lockfile, run `uv lock`.
+ ");
uv_snapshot!(context.filters(), context.lock(), @r###"
success: true
@@ -28341,12 +28430,12 @@ fn lock_trailing_slash_index_url_in_pyproject_not_index_argument() -> Result<()>
// Re-run with `--locked`.
uv_snapshot!(context.filters(), context.lock().arg("--locked"), @r"
success: false
- exit_code: 2
+ exit_code: 1
----- stdout -----
----- stderr -----
Resolved 4 packages in [TIME]
- error: The lockfile at `uv.lock` needs to be updated, but `--locked` was provided. To update the lockfile, run `uv lock`.
+ The lockfile at `uv.lock` needs to be updated, but `--locked` was provided. To update the lockfile, run `uv lock`.
");
Ok(())
@@ -28432,12 +28521,12 @@ fn lock_trailing_slash_index_url_in_lockfile_not_pyproject() -> Result<()> {
// Run `uv lock --locked`.
uv_snapshot!(context.filters(), context.lock().arg("--locked"), @r"
success: false
- exit_code: 2
+ exit_code: 1
----- stdout -----
----- stderr -----
Resolved 4 packages in [TIME]
- error: The lockfile at `uv.lock` needs to be updated, but `--locked` was provided. To update the lockfile, run `uv lock`.
+ The lockfile at `uv.lock` needs to be updated, but `--locked` was provided. To update the lockfile, run `uv lock`.
");
Ok(())
@@ -28523,12 +28612,12 @@ fn lock_trailing_slash_index_url_in_pyproject_and_not_lockfile() -> Result<()> {
// Run `uv lock --locked`.
uv_snapshot!(context.filters(), context.lock().arg("--locked"), @r"
success: false
- exit_code: 2
+ exit_code: 1
----- stdout -----
----- stderr -----
Resolved 4 packages in [TIME]
- error: The lockfile at `uv.lock` needs to be updated, but `--locked` was provided. To update the lockfile, run `uv lock`.
+ The lockfile at `uv.lock` needs to be updated, but `--locked` was provided. To update the lockfile, run `uv lock`.
");
Ok(())
@@ -28714,12 +28803,12 @@ fn lock_trailing_slash_find_links() -> Result<()> {
// Re-run with `--locked`
uv_snapshot!(context.filters(), context.lock().arg("--locked"), @r"
success: false
- exit_code: 2
+ exit_code: 1
----- stdout -----
----- stderr -----
Resolved 2 packages in [TIME]
- error: The lockfile at `uv.lock` needs to be updated, but `--locked` was provided. To update the lockfile, run `uv lock`.
+ The lockfile at `uv.lock` needs to be updated, but `--locked` was provided. To update the lockfile, run `uv lock`.
");
uv_snapshot!(context.filters(), context.lock(), @r"
diff --git a/crates/uv/tests/it/sync.rs b/crates/uv/tests/it/sync.rs
index 0165cc7f6..c225225b8 100644
--- a/crates/uv/tests/it/sync.rs
+++ b/crates/uv/tests/it/sync.rs
@@ -88,12 +88,12 @@ fn locked() -> Result<()> {
// Running with `--locked` should error.
uv_snapshot!(context.filters(), context.sync().arg("--locked"), @r"
success: false
- exit_code: 2
+ exit_code: 1
----- stdout -----
----- stderr -----
Resolved 2 packages in [TIME]
- error: The lockfile at `uv.lock` needs to be updated, but `--locked` was provided. To update the lockfile, run `uv lock`.
+ The lockfile at `uv.lock` needs to be updated, but `--locked` was provided. To update the lockfile, run `uv lock`.
");
let updated = context.read("uv.lock");
@@ -424,12 +424,12 @@ fn sync_json() -> Result<()> {
.arg("--locked")
.arg("--output-format").arg("json"), @r"
success: false
- exit_code: 2
+ exit_code: 1
----- stdout -----
----- stderr -----
Resolved 2 packages in [TIME]
- error: The lockfile at `uv.lock` needs to be updated, but `--locked` was provided. To update the lockfile, run `uv lock`.
+ The lockfile at `uv.lock` needs to be updated, but `--locked` was provided. To update the lockfile, run `uv lock`.
");
Ok(())
@@ -894,7 +894,7 @@ fn check() -> Result<()> {
// Running `uv sync --check` should fail.
uv_snapshot!(context.filters(), context.sync().arg("--check"), @r"
success: false
- exit_code: 2
+ exit_code: 1
----- stdout -----
----- stderr -----
@@ -904,7 +904,7 @@ fn check() -> Result<()> {
Would download 1 package
Would install 1 package
+ iniconfig==2.0.0
- error: The environment is outdated; run `uv sync` to update the environment
+ The environment is outdated; run `uv sync` to update the environment
");
// Sync the environment.
@@ -8626,7 +8626,7 @@ fn sync_dry_run_and_locked() -> Result<()> {
// Running with `--locked` and `--dry-run` should error.
uv_snapshot!(context.filters(), context.sync().arg("--locked").arg("--dry-run"), @r"
success: false
- exit_code: 2
+ exit_code: 1
----- stdout -----
----- stderr -----
@@ -8635,7 +8635,7 @@ fn sync_dry_run_and_locked() -> Result<()> {
Would download 1 package
Would install 1 package
+ iniconfig==2.0.0
- error: The lockfile at `uv.lock` needs to be updated, but `--locked` was provided. To update the lockfile, run `uv lock`.
+ The lockfile at `uv.lock` needs to be updated, but `--locked` was provided. To update the lockfile, run `uv lock`.
");
let updated = context.read("uv.lock");
@@ -8962,13 +8962,13 @@ fn sync_locked_script() -> Result<()> {
// Re-run with `--locked`.
uv_snapshot!(&filters, context.sync().arg("--script").arg("script.py").arg("--locked"), @r"
success: false
- exit_code: 2
+ exit_code: 1
----- stdout -----
----- stderr -----
Using script environment at: [CACHE_DIR]/environments-v2/script-[HASH]
Resolved 4 packages in [TIME]
- error: The lockfile at `uv.lock` needs to be updated, but `--locked` was provided. To update the lockfile, run `uv lock`.
+ The lockfile at `uv.lock` needs to be updated, but `--locked` was provided. To update the lockfile, run `uv lock`.
");
uv_snapshot!(&filters, context.sync().arg("--script").arg("script.py"), @r"
@@ -9064,14 +9064,14 @@ fn sync_locked_script() -> Result<()> {
// Re-run with `--locked`.
uv_snapshot!(&filters, context.sync().arg("--script").arg("script.py").arg("--locked"), @r"
success: false
- exit_code: 2
+ exit_code: 1
----- stdout -----
----- stderr -----
Updating script environment at: [CACHE_DIR]/environments-v2/script-[HASH]
warning: Ignoring existing lockfile due to fork markers being disjoint with `requires-python`: `python_full_version >= '3.11'` vs `python_full_version >= '3.8' and python_full_version < '3.11'`
Resolved 6 packages in [TIME]
- error: The lockfile at `uv.lock` needs to be updated, but `--locked` was provided. To update the lockfile, run `uv lock`.
+ The lockfile at `uv.lock` needs to be updated, but `--locked` was provided. To update the lockfile, run `uv lock`.
");
uv_snapshot!(&filters, context.sync().arg("--script").arg("script.py"), @r"
@@ -9944,12 +9944,12 @@ fn sync_build_constraints() -> Result<()> {
// This should fail, given that the build constraints have changed.
uv_snapshot!(context.filters(), context.sync().arg("--locked"), @r"
success: false
- exit_code: 2
+ exit_code: 1
----- stdout -----
----- stderr -----
Resolved 2 packages in [TIME]
- error: The lockfile at `uv.lock` needs to be updated, but `--locked` was provided. To update the lockfile, run `uv lock`.
+ The lockfile at `uv.lock` needs to be updated, but `--locked` was provided. To update the lockfile, run `uv lock`.
");
// Changing the build constraints should lead to a re-resolve.
From 95c0b71f7709e3097b78ef39d9d13be26b4c7f2c Mon Sep 17 00:00:00 2001
From: Aria Desires
Date: Wed, 16 Jul 2025 09:24:05 -0400
Subject: [PATCH 068/130] Remove `uv version` fallback (#14161)
Fixes #14157
---------
Co-authored-by: John Mumm
---
crates/uv/src/commands/project/version.rs | 18 +---------
crates/uv/src/lib.rs | 10 ------
crates/uv/tests/it/version.rs | 43 ++++++++---------------
3 files changed, 16 insertions(+), 55 deletions(-)
diff --git a/crates/uv/src/commands/project/version.rs b/crates/uv/src/commands/project/version.rs
index ed1e9e246..efba226b9 100644
--- a/crates/uv/src/commands/project/version.rs
+++ b/crates/uv/src/commands/project/version.rs
@@ -19,7 +19,6 @@ use uv_pep440::{BumpCommand, PrereleaseKind, Version};
use uv_pep508::PackageName;
use uv_python::{PythonDownloads, PythonPreference, PythonRequest};
use uv_settings::PythonInstallMirrors;
-use uv_warnings::warn_user;
use uv_workspace::pyproject_mut::Error;
use uv_workspace::{
DiscoveryOptions, WorkspaceCache,
@@ -58,7 +57,6 @@ pub(crate) async fn project_version(
mut bump: Vec,
short: bool,
output_format: VersionFormat,
- strict: bool,
project_dir: &Path,
package: Option,
dry_run: bool,
@@ -80,21 +78,7 @@ pub(crate) async fn project_version(
preview: PreviewMode,
) -> Result {
// Read the metadata
- let project = match find_target(project_dir, package.as_ref()).await {
- Ok(target) => target,
- Err(err) => {
- // If strict, hard bail on failing to find the pyproject.toml
- if strict {
- return Err(err)?;
- }
- // Otherwise, warn and provide fallback to the old `uv version` from before 0.7.0
- warn_user!(
- "Failed to read project metadata ({err}). Running `{}` for compatibility. This fallback will be removed in the future; pass `--preview` to force an error.",
- "uv self version".green()
- );
- return self_version(short, output_format, printer);
- }
- };
+ let project = find_target(project_dir, package.as_ref()).await?;
let pyproject_path = project.root().join("pyproject.toml");
let Some(name) = project.project_name().cloned() else {
diff --git a/crates/uv/src/lib.rs b/crates/uv/src/lib.rs
index 995738638..433f5afd3 100644
--- a/crates/uv/src/lib.rs
+++ b/crates/uv/src/lib.rs
@@ -1062,7 +1062,6 @@ async fn run(mut cli: Cli) -> Result {
}
Commands::Project(project) => {
Box::pin(run_project(
- cli.top_level.global_args.project.is_some(),
project,
&project_dir,
run_command,
@@ -1663,7 +1662,6 @@ async fn run(mut cli: Cli) -> Result {
/// Run a [`ProjectCommand`].
async fn run_project(
- project_was_explicit: bool,
project_command: Box,
project_dir: &Path,
command: Option,
@@ -2055,19 +2053,11 @@ async fn run_project(
.combine(Refresh::from(args.settings.resolver.upgrade.clone())),
);
- // If they specified any of these flags, they probably don't mean `uv self version`
- let strict = project_was_explicit
- || globals.preview.is_enabled()
- || args.dry_run
- || !args.bump.is_empty()
- || args.value.is_some()
- || args.package.is_some();
Box::pin(commands::project_version(
args.value,
args.bump,
args.short,
args.output_format,
- strict,
project_dir,
args.package,
args.dry_run,
diff --git a/crates/uv/tests/it/version.rs b/crates/uv/tests/it/version.rs
index 3c5e28e0f..78dd64252 100644
--- a/crates/uv/tests/it/version.rs
+++ b/crates/uv/tests/it/version.rs
@@ -1437,8 +1437,8 @@ fn version_set_dynamic() -> Result<()> {
Ok(())
}
-// Should fallback to `uv --version` if this pyproject.toml isn't usable for whatever reason
-// (In this case, because tool.uv.managed = false)
+/// Previously would fallback to `uv --version` if this pyproject.toml isn't usable for whatever reason
+/// (In this case, because tool.uv.managed = false)
#[test]
fn version_get_fallback_unmanaged() -> Result<()> {
let context = TestContext::new("3.12");
@@ -1456,13 +1456,12 @@ fn version_get_fallback_unmanaged() -> Result<()> {
)?;
uv_snapshot!(context.filters(), context.version(), @r"
- success: true
- exit_code: 0
+ success: false
+ exit_code: 2
----- stdout -----
- uv [VERSION] ([COMMIT] DATE)
----- stderr -----
- warning: Failed to read project metadata (The project is marked as unmanaged: `[TEMP_DIR]/`). Running `uv self version` for compatibility. This fallback will be removed in the future; pass `--preview` to force an error.
+ error: The project is marked as unmanaged: `[TEMP_DIR]/`
");
let pyproject = fs_err::read_to_string(&pyproject_toml)?;
@@ -1507,13 +1506,12 @@ fn version_get_fallback_unmanaged_short() -> Result<()> {
.collect::>();
uv_snapshot!(filters, context.version()
.arg("--short"), @r"
- success: true
- exit_code: 0
+ success: false
+ exit_code: 2
----- stdout -----
- [VERSION] ([COMMIT] DATE)
----- stderr -----
- warning: Failed to read project metadata (The project is marked as unmanaged: `[TEMP_DIR]/`). Running `uv self version` for compatibility. This fallback will be removed in the future; pass `--preview` to force an error.
+ error: The project is marked as unmanaged: `[TEMP_DIR]/`
");
let pyproject = fs_err::read_to_string(&pyproject_toml)?;
@@ -1587,25 +1585,14 @@ fn version_get_fallback_unmanaged_json() -> Result<()> {
.collect::>();
if git_version_info_expected() {
uv_snapshot!(filters, context.version()
- .arg("--output-format").arg("json"), @r#"
- success: true
- exit_code: 0
- ----- stdout -----
- {
- "package_name": "uv",
- "version": "[VERSION]",
- "commit_info": {
- "short_commit_hash": "[LONGHASH]",
- "commit_hash": "[LONGHASH]",
- "commit_date": "[DATE]",
- "last_tag": "[TAG]",
- "commits_since_last_tag": [COUNT]
- }
- }
+ .arg("--output-format").arg("json"), @r"
+ success: false
+ exit_code: 2
+ ----- stdout -----
- ----- stderr -----
- warning: Failed to read project metadata (The project is marked as unmanaged: `[TEMP_DIR]/`). Running `uv self version` for compatibility. This fallback will be removed in the future; pass `--preview` to force an error.
- "#);
+ ----- stderr -----
+ error: The project is marked as unmanaged: `[TEMP_DIR]/`
+ ");
} else {
uv_snapshot!(filters, context.version()
.arg("--output-format").arg("json"), @r#"
From 3c9aea87b41f01b26775facd1e3d34828ef53b9f Mon Sep 17 00:00:00 2001
From: konsti
Date: Wed, 16 Jul 2025 19:07:08 +0100
Subject: [PATCH 069/130] `uv init`: Make `uv_build` the default build backend
(from `hatchling`) (#14661)
Closes https://github.com/astral-sh/uv/issues/14298
Switch the default build backend for `uv init` from `hatchling` to
`uv_build`.
This change affects the following two commands:
* `uv init --lib`
* `uv init [--app] --package`
It does not affect `uv init` or `uv init --app` without `--package`. `uv
init --build-backend <...>` also works as before.
**Before**
```
$ uv init --lib project
$ cat project/pyproject.toml
[project]
name = "project"
version = "0.1.0"
description = "Add your description here"
readme = "README.md"
authors = [
{ name = "konstin", email = "konstin@mailbox.org" }
]
requires-python = ">=3.13.2"
dependencies = []
[build-system]
requires = ["hatchling"]
build-backend = "hatchling.build"
```
**After**
```
$ uv init --lib project
$ cat project/pyproject.toml
[project]
name = "project"
version = "0.1.0"
description = "Add your description here"
readme = "README.md"
authors = [
{ name = "konstin", email = "konstin@mailbox.org" }
]
requires-python = ">=3.13.2"
dependencies = []
[build-system]
requires = ["uv_build>=0.7.20,<0.8"]
build-backend = "uv_build"
```
I cleaned up some tests for consistency in the second commit.
---
crates/uv-build-backend/src/metadata.rs | 14 +-
crates/uv/src/commands/project/init.rs | 21 +--
crates/uv/tests/it/build.rs | 17 +-
crates/uv/tests/it/build_backend.rs | 20 +-
crates/uv/tests/it/common/mod.rs | 8 +
crates/uv/tests/it/init.rs | 235 +++++++-----------------
docs/concepts/build-backend.md | 7 +-
7 files changed, 99 insertions(+), 223 deletions(-)
diff --git a/crates/uv-build-backend/src/metadata.rs b/crates/uv-build-backend/src/metadata.rs
index 00a207c7a..296c76a2b 100644
--- a/crates/uv-build-backend/src/metadata.rs
+++ b/crates/uv-build-backend/src/metadata.rs
@@ -171,7 +171,7 @@ impl PyProjectToml {
///
/// ```toml
/// [build-system]
- /// requires = ["uv_build>=0.4.15,<5"]
+ /// requires = ["uv_build>=0.4.15,<0.5"]
/// build-backend = "uv_build"
/// ```
pub fn check_build_system(&self, uv_version: &str) -> Vec {
@@ -826,7 +826,7 @@ mod tests {
{payload}
[build-system]
- requires = ["uv_build>=0.4.15,<5"]
+ requires = ["uv_build>=0.4.15,<0.5"]
build-backend = "uv_build"
"#
}
@@ -909,7 +909,7 @@ mod tests {
foo-bar = "foo:bar"
[build-system]
- requires = ["uv_build>=0.4.15,<5"]
+ requires = ["uv_build>=0.4.15,<0.5"]
build-backend = "uv_build"
"#
};
@@ -1036,7 +1036,7 @@ mod tests {
foo-bar = "foo:bar"
[build-system]
- requires = ["uv_build>=0.4.15,<5"]
+ requires = ["uv_build>=0.4.15,<0.5"]
build-backend = "uv_build"
"#
};
@@ -1104,7 +1104,7 @@ mod tests {
let contents = extend_project("");
let pyproject_toml = PyProjectToml::parse(&contents).unwrap();
assert_snapshot!(
- pyproject_toml.check_build_system("1.0.0+test").join("\n"),
+ pyproject_toml.check_build_system("0.4.15+test").join("\n"),
@""
);
}
@@ -1135,7 +1135,7 @@ mod tests {
version = "0.1.0"
[build-system]
- requires = ["uv_build>=0.4.15,<5", "wheel"]
+ requires = ["uv_build>=0.4.15,<0.5", "wheel"]
build-backend = "uv_build"
"#};
let pyproject_toml = PyProjectToml::parse(contents).unwrap();
@@ -1171,7 +1171,7 @@ mod tests {
version = "0.1.0"
[build-system]
- requires = ["uv_build>=0.4.15,<5"]
+ requires = ["uv_build>=0.4.15,<0.5"]
build-backend = "setuptools"
"#};
let pyproject_toml = PyProjectToml::parse(contents).unwrap();
diff --git a/crates/uv/src/commands/project/init.rs b/crates/uv/src/commands/project/init.rs
index 9ff321a72..4fd79b1c2 100644
--- a/crates/uv/src/commands/project/init.rs
+++ b/crates/uv/src/commands/project/init.rs
@@ -63,9 +63,6 @@ pub(crate) async fn init(
printer: Printer,
preview: PreviewMode,
) -> Result {
- if build_backend == Some(ProjectBuildBackend::Uv) && preview.is_disabled() {
- warn_user_once!("The uv build backend is experimental and may change without warning");
- }
match init_kind {
InitKind::Script => {
let Some(path) = explicit_path.as_deref() else {
@@ -596,7 +593,6 @@ async fn init_project(
author_from,
no_readme,
package,
- preview,
)?;
if let Some(workspace) = workspace {
@@ -724,7 +720,6 @@ impl InitProjectKind {
author_from: Option,
no_readme: bool,
package: bool,
- preview: PreviewMode,
) -> Result<()> {
match self {
InitProjectKind::Application => InitProjectKind::init_application(
@@ -739,7 +734,6 @@ impl InitProjectKind {
author_from,
no_readme,
package,
- preview,
),
InitProjectKind::Library => InitProjectKind::init_library(
name,
@@ -753,7 +747,6 @@ impl InitProjectKind {
author_from,
no_readme,
package,
- preview,
),
}
}
@@ -772,7 +765,6 @@ impl InitProjectKind {
author_from: Option,
no_readme: bool,
package: bool,
- preview: PreviewMode,
) -> Result<()> {
fs_err::create_dir_all(path)?;
@@ -805,11 +797,7 @@ impl InitProjectKind {
}
// Add a build system
- let build_backend = match build_backend {
- Some(build_backend) => build_backend,
- None if preview.is_enabled() => ProjectBuildBackend::Uv,
- None => ProjectBuildBackend::Hatch,
- };
+ let build_backend = build_backend.unwrap_or(ProjectBuildBackend::Uv);
pyproject.push('\n');
pyproject.push_str(&pyproject_build_system(name, build_backend));
pyproject_build_backend_prerequisites(name, path, build_backend)?;
@@ -859,7 +847,6 @@ impl InitProjectKind {
author_from: Option,
no_readme: bool,
package: bool,
- preview: PreviewMode,
) -> Result<()> {
if !package {
return Err(anyhow!("Library projects must be packaged"));
@@ -880,11 +867,7 @@ impl InitProjectKind {
);
// Always include a build system if the project is packaged.
- let build_backend = match build_backend {
- Some(build_backend) => build_backend,
- None if preview.is_enabled() => ProjectBuildBackend::Uv,
- None => ProjectBuildBackend::Hatch,
- };
+ let build_backend = build_backend.unwrap_or(ProjectBuildBackend::Uv);
pyproject.push('\n');
pyproject.push_str(&pyproject_build_system(name, build_backend));
pyproject_build_backend_prerequisites(name, path, build_backend)?;
diff --git a/crates/uv/tests/it/build.rs b/crates/uv/tests/it/build.rs
index 3d08a90d4..656c68d3f 100644
--- a/crates/uv/tests/it/build.rs
+++ b/crates/uv/tests/it/build.rs
@@ -1439,7 +1439,6 @@ fn build_fast_path() -> Result<()> {
let built_by_uv = current_dir()?.join("../../scripts/packages/built-by-uv");
uv_snapshot!(context.build()
- .arg("--preview")
.arg(&built_by_uv)
.arg("--out-dir")
.arg(context.temp_dir.join("output1")), @r###"
@@ -1465,7 +1464,6 @@ fn build_fast_path() -> Result<()> {
.assert(predicate::path::is_file());
uv_snapshot!(context.build()
- .arg("--preview")
.arg(&built_by_uv)
.arg("--out-dir")
.arg(context.temp_dir.join("output2"))
@@ -1485,7 +1483,6 @@ fn build_fast_path() -> Result<()> {
.assert(predicate::path::is_file());
uv_snapshot!(context.build()
- .arg("--preview")
.arg(&built_by_uv)
.arg("--out-dir")
.arg(context.temp_dir.join("output3"))
@@ -1505,7 +1502,6 @@ fn build_fast_path() -> Result<()> {
.assert(predicate::path::is_file());
uv_snapshot!(context.build()
- .arg("--preview")
.arg(&built_by_uv)
.arg("--out-dir")
.arg(context.temp_dir.join("output4"))
@@ -1545,7 +1541,6 @@ fn build_list_files() -> Result<()> {
// By default, we build the wheel from the source dist, which we need to do even for the list
// task.
uv_snapshot!(context.build()
- .arg("--preview")
.arg(&built_by_uv)
.arg("--out-dir")
.arg(context.temp_dir.join("output1"))
@@ -1601,7 +1596,6 @@ fn build_list_files() -> Result<()> {
.assert(predicate::path::missing());
uv_snapshot!(context.build()
- .arg("--preview")
.arg(&built_by_uv)
.arg("--out-dir")
.arg(context.temp_dir.join("output2"))
@@ -1670,7 +1664,6 @@ fn build_list_files_errors() -> Result<()> {
// In CI, we run with link mode settings.
filters.push(("--link-mode ", ""));
uv_snapshot!(filters, context.build()
- .arg("--preview")
.arg(&built_by_uv)
.arg("--out-dir")
.arg(context.temp_dir.join("output1"))
@@ -1694,7 +1687,6 @@ fn build_list_files_errors() -> Result<()> {
// Windows normalization
filters.push(("/crates/uv/../../", "/"));
uv_snapshot!(filters, context.build()
- .arg("--preview")
.arg(&anyio_local)
.arg("--out-dir")
.arg(context.temp_dir.join("output2"))
@@ -1987,12 +1979,7 @@ fn force_pep517() -> Result<()> {
// We need to use a real `uv_build` package.
let context = TestContext::new("3.12").with_exclude_newer("2025-05-27T00:00:00Z");
- context
- .init()
- .arg("--build-backend")
- .arg("uv")
- .assert()
- .success();
+ context.init().assert().success();
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(indoc! {r#"
@@ -2026,7 +2013,7 @@ fn force_pep517() -> Result<()> {
----- stderr -----
Building source distribution...
- Error: Missing module directory for `does_not_exist` in `src`. Found: `temp`
+ Error: Missing source directory at: `src`
× Failed to build `[TEMP_DIR]/`
├─▶ The build backend returned an error
╰─▶ Call to `uv_build.build_sdist` failed (exit status: 1)
diff --git a/crates/uv/tests/it/build_backend.rs b/crates/uv/tests/it/build_backend.rs
index b3bd337ae..ae3a7a740 100644
--- a/crates/uv/tests/it/build_backend.rs
+++ b/crates/uv/tests/it/build_backend.rs
@@ -222,8 +222,7 @@ fn preserve_executable_bit() -> Result<()> {
let project_dir = context.temp_dir.path().join("preserve_executable_bit");
context
.init()
- .arg("--build-backend")
- .arg("uv")
+ .arg("--lib")
.arg(&project_dir)
.assert()
.success();
@@ -296,7 +295,7 @@ fn rename_module() -> Result<()> {
module-name = "bar"
[build-system]
- requires = ["uv_build>=0.5,<0.8"]
+ requires = ["uv_build>=0.7,<10000"]
build-backend = "uv_build"
"#})?;
@@ -377,7 +376,7 @@ fn rename_module_editable_build() -> Result<()> {
module-name = "bar"
[build-system]
- requires = ["uv_build>=0.5,<0.8"]
+ requires = ["uv_build>=0.7,<10000"]
build-backend = "uv_build"
"#})?;
@@ -436,7 +435,7 @@ fn build_module_name_normalization() -> Result<()> {
version = "1.0.0"
[build-system]
- requires = ["uv_build>=0.5,<0.8"]
+ requires = ["uv_build>=0.7,<10000"]
build-backend = "uv_build"
[tool.uv.build-backend]
@@ -548,7 +547,7 @@ fn build_sdist_with_long_path() -> Result<()> {
version = "1.0.0"
[build-system]
- requires = ["uv_build>=0.7,<0.8"]
+ requires = ["uv_build>=0.7,<10000"]
build-backend = "uv_build"
"#})?;
context
@@ -591,7 +590,7 @@ fn sdist_error_without_module() -> Result<()> {
version = "1.0.0"
[build-system]
- requires = ["uv_build>=0.7,<0.8"]
+ requires = ["uv_build>=0.7,<10000"]
build-backend = "uv_build"
"#})?;
@@ -661,7 +660,7 @@ fn complex_namespace_packages() -> Result<()> {
module-name = "{project_name_dist_info}.{part_name}"
[build-system]
- requires = ["uv_build>=0.5.15,<10000"]
+ requires = ["uv_build>=0.7,<10000"]
build-backend = "uv_build"
"#
};
@@ -770,8 +769,7 @@ fn symlinked_file() -> Result<()> {
let project = context.temp_dir.child("project");
context
.init()
- .arg("--build-backend")
- .arg("uv")
+ .arg("--lib")
.arg(project.path())
.assert()
.success();
@@ -783,7 +781,7 @@ fn symlinked_file() -> Result<()> {
license-files = ["LICENSE"]
[build-system]
- requires = ["uv_build>=0.5.15,<10000"]
+ requires = ["uv_build>=0.7,<10000"]
build-backend = "uv_build"
"#
})?;
diff --git a/crates/uv/tests/it/common/mod.rs b/crates/uv/tests/it/common/mod.rs
index d4a73f953..bc6e65f4e 100644
--- a/crates/uv/tests/it/common/mod.rs
+++ b/crates/uv/tests/it/common/mod.rs
@@ -664,6 +664,14 @@ impl TestContext {
));
// For wiremock tests
filters.push((r"127\.0\.0\.1:\d*".to_string(), "[LOCALHOST]".to_string()));
+ // Avoid breaking the tests when bumping the uv version
+ filters.push((
+ format!(
+ r#"requires = \["uv_build>={},<[0-9.]+"\]"#,
+ uv_version::version()
+ ),
+ r#"requires = ["uv_build>=[CURRENT_VERSION],<[NEXT_BREAKING]"]"#.to_string(),
+ ));
Self {
root: ChildPath::new(root.path()),
diff --git a/crates/uv/tests/it/init.rs b/crates/uv/tests/it/init.rs
index c5993d670..3f374eada 100644
--- a/crates/uv/tests/it/init.rs
+++ b/crates/uv/tests/it/init.rs
@@ -314,7 +314,7 @@ fn init_application_package() -> Result<()> {
filters => context.filters(),
}, {
assert_snapshot!(
- pyproject, @r###"
+ pyproject, @r#"
[project]
name = "foo"
version = "0.1.0"
@@ -327,9 +327,9 @@ fn init_application_package() -> Result<()> {
foo = "foo:main"
[build-system]
- requires = ["hatchling"]
- build-backend = "hatchling.build"
- "###
+ requires = ["uv_build>=[CURRENT_VERSION],<[NEXT_BREAKING]"]
+ build-backend = "uv_build"
+ "#
);
});
@@ -390,7 +390,7 @@ fn init_library() -> Result<()> {
filters => context.filters(),
}, {
assert_snapshot!(
- pyproject, @r###"
+ pyproject, @r#"
[project]
name = "foo"
version = "0.1.0"
@@ -400,9 +400,9 @@ fn init_library() -> Result<()> {
dependencies = []
[build-system]
- requires = ["hatchling"]
- build-backend = "hatchling.build"
- "###
+ requires = ["uv_build>=[CURRENT_VERSION],<[NEXT_BREAKING]"]
+ build-backend = "uv_build"
+ "#
);
});
@@ -446,91 +446,6 @@ fn init_library() -> Result<()> {
Ok(())
}
-/// Test the uv build backend with using `uv init --lib --preview`. To be merged with the regular
-/// init lib test once the uv build backend becomes the stable default.
-#[test]
-fn init_library_preview() -> Result<()> {
- let context = TestContext::new("3.12");
-
- let child = context.temp_dir.child("foo");
- child.create_dir_all()?;
-
- let pyproject_toml = child.join("pyproject.toml");
- let init_py = child.join("src").join("foo").join("__init__.py");
- let py_typed = child.join("src").join("foo").join("py.typed");
-
- uv_snapshot!(context.filters(), context.init().current_dir(&child).arg("--lib").arg("--preview"), @r###"
- success: true
- exit_code: 0
- ----- stdout -----
-
- ----- stderr -----
- Initialized project `foo`
- "###);
-
- let pyproject = fs_err::read_to_string(&pyproject_toml)?;
- let mut filters = context.filters();
- filters.push((r#"\["uv_build>=.*,<.*"\]"#, r#"["uv_build[SPECIFIERS]"]"#));
- insta::with_settings!({
- filters => filters,
- }, {
- assert_snapshot!(
- pyproject, @r#"
- [project]
- name = "foo"
- version = "0.1.0"
- description = "Add your description here"
- readme = "README.md"
- requires-python = ">=3.12"
- dependencies = []
-
- [build-system]
- requires = ["uv_build[SPECIFIERS]"]
- build-backend = "uv_build"
- "#
- );
- });
-
- let init = fs_err::read_to_string(init_py)?;
- insta::with_settings!({
- filters => context.filters(),
- }, {
- assert_snapshot!(
- init, @r###"
- def hello() -> str:
- return "Hello from foo!"
- "###
- );
- });
-
- let py_typed = fs_err::read_to_string(py_typed)?;
- insta::with_settings!({
- filters => context.filters(),
- }, {
- assert_snapshot!(
- py_typed, @""
- );
- });
-
- uv_snapshot!(context.filters(), context.run().arg("--preview").current_dir(&child).arg("python").arg("-c").arg("import foo; print(foo.hello())"), @r###"
- success: true
- exit_code: 0
- ----- stdout -----
- Hello from foo!
-
- ----- stderr -----
- warning: `VIRTUAL_ENV=[VENV]/` does not match the project environment path `.venv` and will be ignored; use `--active` to target the active environment instead
- Using CPython 3.12.[X] interpreter at: [PYTHON-3.12]
- Creating virtual environment at: .venv
- Resolved 1 package in [TIME]
- Prepared 1 package in [TIME]
- Installed 1 package in [TIME]
- + foo==0.1.0 (from file://[TEMP_DIR]/foo)
- "###);
-
- Ok(())
-}
-
/// Test the uv build backend with using `uv init --package --preview`. To be merged with the regular
/// init lib test once the uv build backend becomes the stable default.
#[test]
@@ -550,10 +465,8 @@ fn init_package_preview() -> Result<()> {
"###);
let pyproject = fs_err::read_to_string(child.join("pyproject.toml"))?;
- let mut filters = context.filters();
- filters.push((r#"\["uv_build>=.*,<.*"\]"#, r#"["uv_build[SPECIFIERS]"]"#));
insta::with_settings!({
- filters => filters,
+ filters => context.filters(),
}, {
assert_snapshot!(
pyproject, @r#"
@@ -569,7 +482,7 @@ fn init_package_preview() -> Result<()> {
foo = "foo:main"
[build-system]
- requires = ["uv_build[SPECIFIERS]"]
+ requires = ["uv_build>=[CURRENT_VERSION],<[NEXT_BREAKING]"]
build-backend = "uv_build"
"#
);
@@ -615,7 +528,7 @@ fn init_bare_lib() {
filters => context.filters(),
}, {
assert_snapshot!(
- pyproject, @r###"
+ pyproject, @r#"
[project]
name = "foo"
version = "0.1.0"
@@ -623,9 +536,9 @@ fn init_bare_lib() {
dependencies = []
[build-system]
- requires = ["hatchling"]
- build-backend = "hatchling.build"
- "###
+ requires = ["uv_build>=[CURRENT_VERSION],<[NEXT_BREAKING]"]
+ build-backend = "uv_build"
+ "#
);
});
}
@@ -667,7 +580,7 @@ fn init_bare_package() {
filters => context.filters(),
}, {
assert_snapshot!(
- pyproject, @r###"
+ pyproject, @r#"
[project]
name = "foo"
version = "0.1.0"
@@ -675,9 +588,9 @@ fn init_bare_package() {
dependencies = []
[build-system]
- requires = ["hatchling"]
- build-backend = "hatchling.build"
- "###
+ requires = ["uv_build>=[CURRENT_VERSION],<[NEXT_BREAKING]"]
+ build-backend = "uv_build"
+ "#
);
});
}
@@ -1154,7 +1067,7 @@ fn init_library_current_dir() -> Result<()> {
filters => context.filters(),
}, {
assert_snapshot!(
- pyproject, @r###"
+ pyproject, @r#"
[project]
name = "foo"
version = "0.1.0"
@@ -1164,9 +1077,9 @@ fn init_library_current_dir() -> Result<()> {
dependencies = []
[build-system]
- requires = ["hatchling"]
- build-backend = "hatchling.build"
- "###
+ requires = ["uv_build>=[CURRENT_VERSION],<[NEXT_BREAKING]"]
+ build-backend = "uv_build"
+ "#
);
});
@@ -1283,7 +1196,7 @@ fn init_dot_args() -> Result<()> {
filters => context.filters(),
}, {
assert_snapshot!(
- pyproject, @r###"
+ pyproject, @r#"
[project]
name = "foo"
version = "0.1.0"
@@ -1293,9 +1206,9 @@ fn init_dot_args() -> Result<()> {
dependencies = []
[build-system]
- requires = ["hatchling"]
- build-backend = "hatchling.build"
- "###
+ requires = ["uv_build>=[CURRENT_VERSION],<[NEXT_BREAKING]"]
+ build-backend = "uv_build"
+ "#
);
});
@@ -1361,7 +1274,7 @@ fn init_workspace() -> Result<()> {
filters => context.filters(),
}, {
assert_snapshot!(
- pyproject, @r###"
+ pyproject, @r#"
[project]
name = "foo"
version = "0.1.0"
@@ -1371,9 +1284,9 @@ fn init_workspace() -> Result<()> {
dependencies = []
[build-system]
- requires = ["hatchling"]
- build-backend = "hatchling.build"
- "###
+ requires = ["uv_build>=[CURRENT_VERSION],<[NEXT_BREAKING]"]
+ build-backend = "uv_build"
+ "#
);
});
@@ -1546,7 +1459,7 @@ fn init_workspace_relative_sub_package() -> Result<()> {
filters => context.filters(),
}, {
assert_snapshot!(
- pyproject, @r###"
+ pyproject, @r#"
[project]
name = "foo"
version = "0.1.0"
@@ -1556,9 +1469,9 @@ fn init_workspace_relative_sub_package() -> Result<()> {
dependencies = []
[build-system]
- requires = ["hatchling"]
- build-backend = "hatchling.build"
- "###
+ requires = ["uv_build>=[CURRENT_VERSION],<[NEXT_BREAKING]"]
+ build-backend = "uv_build"
+ "#
);
});
@@ -1643,7 +1556,7 @@ fn init_workspace_outside() -> Result<()> {
filters => context.filters(),
}, {
assert_snapshot!(
- pyproject, @r###"
+ pyproject, @r#"
[project]
name = "foo"
version = "0.1.0"
@@ -1653,9 +1566,9 @@ fn init_workspace_outside() -> Result<()> {
dependencies = []
[build-system]
- requires = ["hatchling"]
- build-backend = "hatchling.build"
- "###
+ requires = ["uv_build>=[CURRENT_VERSION],<[NEXT_BREAKING]"]
+ build-backend = "uv_build"
+ "#
);
});
@@ -1725,7 +1638,7 @@ fn init_normalized_names() -> Result<()> {
filters => context.filters(),
}, {
assert_snapshot!(
- pyproject, @r###"
+ pyproject, @r#"
[project]
name = "foo-bar"
version = "0.1.0"
@@ -1735,9 +1648,9 @@ fn init_normalized_names() -> Result<()> {
dependencies = []
[build-system]
- requires = ["hatchling"]
- build-backend = "hatchling.build"
- "###
+ requires = ["uv_build>=[CURRENT_VERSION],<[NEXT_BREAKING]"]
+ build-backend = "uv_build"
+ "#
);
});
@@ -3008,8 +2921,8 @@ fn init_with_author() {
dependencies = []
[build-system]
- requires = ["hatchling"]
- build-backend = "hatchling.build"
+ requires = ["uv_build>=[CURRENT_VERSION],<[NEXT_BREAKING]"]
+ build-backend = "uv_build"
"#
);
});
@@ -3038,8 +2951,8 @@ fn init_with_author() {
dependencies = []
[build-system]
- requires = ["hatchling"]
- build-backend = "hatchling.build"
+ requires = ["uv_build>=[CURRENT_VERSION],<[NEXT_BREAKING]"]
+ build-backend = "uv_build"
"#
);
});
@@ -3822,9 +3735,9 @@ fn init_lib_build_backend_scikit() -> Result<()> {
Ok(())
}
-/// Run `uv init --app --package --build-backend uv` to create a packaged application project
+/// Run `uv init --app --package --build-backend hatchling` to create a packaged application project
#[test]
-fn init_application_package_uv() -> Result<()> {
+fn init_application_package_hatchling() -> Result<()> {
let context = TestContext::new("3.12");
let child = context.temp_dir.child("foo");
@@ -3833,41 +3746,34 @@ fn init_application_package_uv() -> Result<()> {
let pyproject_toml = child.join("pyproject.toml");
let init_py = child.join("src").join("foo").join("__init__.py");
- uv_snapshot!(context.filters(), context.init().current_dir(&child).arg("--app").arg("--package").arg("--build-backend").arg("uv"), @r###"
+ uv_snapshot!(context.filters(), context.init().current_dir(&child).arg("--app").arg("--package").arg("--build-backend").arg("hatchling"), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
- warning: The uv build backend is experimental and may change without warning
Initialized project `foo`
"###);
let pyproject = fs_err::read_to_string(&pyproject_toml)?;
- let mut filters = context.filters();
- filters.push((r#"\["uv_build>=.*,<.*"\]"#, r#"["uv_build[SPECIFIERS]"]"#));
- insta::with_settings!({
- filters => filters,
- }, {
- assert_snapshot!(
- pyproject, @r###"
- [project]
- name = "foo"
- version = "0.1.0"
- description = "Add your description here"
- readme = "README.md"
- requires-python = ">=3.12"
- dependencies = []
+ assert_snapshot!(
+ pyproject, @r#"
+ [project]
+ name = "foo"
+ version = "0.1.0"
+ description = "Add your description here"
+ readme = "README.md"
+ requires-python = ">=3.12"
+ dependencies = []
- [project.scripts]
- foo = "foo:main"
+ [project.scripts]
+ foo = "foo:main"
- [build-system]
- requires = ["uv_build[SPECIFIERS]"]
- build-backend = "uv_build"
- "###
- );
- });
+ [build-system]
+ requires = ["hatchling"]
+ build-backend = "hatchling.build"
+ "#
+ );
let init = fs_err::read_to_string(init_py)?;
insta::with_settings!({
@@ -3881,8 +3787,7 @@ fn init_application_package_uv() -> Result<()> {
);
});
- // Use preview to go through the fast path.
- uv_snapshot!(context.filters(), context.run().arg("--preview").arg("foo").current_dir(&child).env_remove(EnvVars::VIRTUAL_ENV), @r###"
+ uv_snapshot!(context.filters(), context.run().arg("foo").current_dir(&child).env_remove(EnvVars::VIRTUAL_ENV), @r###"
success: true
exit_code: 0
----- stdout -----
@@ -3935,8 +3840,8 @@ fn init_with_description() -> Result<()> {
dependencies = []
[build-system]
- requires = ["hatchling"]
- build-backend = "hatchling.build"
+ requires = ["uv_build>=[CURRENT_VERSION],<[NEXT_BREAKING]"]
+ build-backend = "uv_build"
"#
);
});
@@ -3977,8 +3882,8 @@ fn init_without_description() -> Result<()> {
dependencies = []
[build-system]
- requires = ["hatchling"]
- build-backend = "hatchling.build"
+ requires = ["uv_build>=[CURRENT_VERSION],<[NEXT_BREAKING]"]
+ build-backend = "uv_build"
"#
);
});
diff --git a/docs/concepts/build-backend.md b/docs/concepts/build-backend.md
index 5f52463bf..d2edf1bad 100644
--- a/docs/concepts/build-backend.md
+++ b/docs/concepts/build-backend.md
@@ -1,10 +1,5 @@
# The uv build backend
-!!! note
-
- Currently, the default build backend for `uv init` is
- [hatchling](https://pypi.org/project/hatchling/). This will change to `uv` in a future version.
-
A build backend transforms a source tree (i.e., a directory) into a source distribution or a wheel.
uv supports all build backends (as specified by [PEP 517](https://peps.python.org/pep-0517/)), but
@@ -49,7 +44,7 @@ build-backend = "uv_build"
To create a new project that uses the uv build backend, use `uv init`:
```console
-$ uv init --build-backend uv
+$ uv init
```
When the project is built, e.g., with [`uv build`](../guides/package.md), the uv build backend will
From 25e69458b1f6bf8aa937bb0d83b660e8d5c088e7 Mon Sep 17 00:00:00 2001
From: Zanie Blue
Date: Wed, 16 Jul 2025 14:26:42 -0500
Subject: [PATCH 070/130] Stabilize addition of Python versions to the Windows
registry (#14625)
Following #14614 this is non-fatal and has an opt-out so it should be
safe to stabilize.
---
crates/uv/src/commands/python/install.rs | 2 +-
crates/uv/src/commands/python/uninstall.rs | 2 +-
docs/concepts/python-versions.md | 15 +++++++++++++++
3 files changed, 17 insertions(+), 2 deletions(-)
diff --git a/crates/uv/src/commands/python/install.rs b/crates/uv/src/commands/python/install.rs
index feb0cf7c7..b9d4660df 100644
--- a/crates/uv/src/commands/python/install.rs
+++ b/crates/uv/src/commands/python/install.rs
@@ -501,7 +501,7 @@ pub(crate) async fn install(
);
}
- if preview.is_enabled() && !matches!(registry, Some(false)) {
+ if !matches!(registry, Some(false)) {
#[cfg(windows)]
{
match uv_python::windows_registry::create_registry_entry(installation) {
diff --git a/crates/uv/src/commands/python/uninstall.rs b/crates/uv/src/commands/python/uninstall.rs
index 642942d07..dd306fc4d 100644
--- a/crates/uv/src/commands/python/uninstall.rs
+++ b/crates/uv/src/commands/python/uninstall.rs
@@ -211,7 +211,7 @@ async fn do_uninstall(
}
#[cfg(windows)]
- if preview.is_enabled() {
+ {
uv_python::windows_registry::remove_registry_entry(
&matching_installations,
all,
diff --git a/docs/concepts/python-versions.md b/docs/concepts/python-versions.md
index a7472bea8..ee18fa9da 100644
--- a/docs/concepts/python-versions.md
+++ b/docs/concepts/python-versions.md
@@ -435,3 +435,18 @@ are not yet available for musl Linux on ARM).
### PyPy distributions
PyPy distributions are provided by the PyPy project.
+
+## Registration in the Windows registry
+
+On Windows, installation of managed Python versions will register them with the Windows registry as
+defined by [PEP 514](https://peps.python.org/pep-0514/).
+
+After installation, the Python versions can be selected with the `py` launcher, e.g.:
+
+```console
+$ uv python install 3.13.1
+$ py -V:Astral/CPython3.13.1
+```
+
+On uninstall, uv will remove the registry entry for the target version as well as any broken
+registry entries.
From 2df06ebfbc2d4df90d47ef9a9e631ba9926712e0 Mon Sep 17 00:00:00 2001
From: John Mumm
Date: Wed, 16 Jul 2025 21:25:48 +0200
Subject: [PATCH 071/130] Require `uv venv --clear` before removing an existing
directory (#14309)
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
By default, `uv venv ` currently removes the `
directory if it exists. This can be surprising behavior: not everyone
expects an existing environment to be overwritten. This PR updates the
default to fail if a non-empty `` directory already exists
and neither `--allow-existing` nor the new `-c/--clear` option is
provided (if a TTY is detected, it prompts first). If it's not a TTY,
then uv will only warn and not fail for now — we'll make this an error
in the future. I've also added a corresponding `UV_VENV_CLEAR` env var.
I've chosen to use `--clear` instead of `--force` for this option
because it is used by the `venv` module and `virtualenv` and will be
familiar to users. I also think its meaning is clearer in this context
than `--force` (which could plausibly mean force overwrite just the
virtual environment files, which is what our current `--allow-existing`
option does).
Closes #1472.
---------
Co-authored-by: Zanie Blue
---
.github/workflows/ci.yml | 10 +-
Cargo.lock | 4 +
crates/uv-build-frontend/src/lib.rs | 2 +-
crates/uv-cli/src/compat.rs | 9 -
crates/uv-cli/src/lib.rs | 13 +-
crates/uv-console/src/lib.rs | 32 +++-
crates/uv-static/src/env_vars.rs | 4 +
crates/uv-tool/src/lib.rs | 2 +-
crates/uv-virtualenv/Cargo.toml | 4 +
crates/uv-virtualenv/src/lib.rs | 6 +-
crates/uv-virtualenv/src/virtualenv.rs | 169 ++++++++++++++----
crates/uv/src/commands/project/environment.rs | 16 +-
crates/uv/src/commands/project/mod.rs | 8 +-
crates/uv/src/commands/project/run.rs | 8 +-
crates/uv/src/commands/venv.rs | 5 +-
crates/uv/src/lib.rs | 4 +-
crates/uv/src/settings.rs | 3 +
crates/uv/tests/it/cache_prune.rs | 2 +-
crates/uv/tests/it/common/mod.rs | 1 +
crates/uv/tests/it/pip_install.rs | 7 +-
crates/uv/tests/it/pip_sync.rs | 2 +-
crates/uv/tests/it/sync.rs | 1 +
crates/uv/tests/it/venv.rs | 94 +++++-----
docs/reference/cli.md | 6 +-
docs/reference/environment.md | 5 +
25 files changed, 282 insertions(+), 135 deletions(-)
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index e9beddcc5..4fb67346e 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -1042,7 +1042,7 @@ jobs:
- name: "Create a virtual environment (uv)"
run: |
- ./uv venv -p 3.13t --managed-python
+ ./uv venv -c -p 3.13t --managed-python
- name: "Check version (uv)"
run: |
@@ -1087,7 +1087,7 @@ jobs:
- name: "Create a virtual environment (uv)"
run: |
- ./uv venv -p 3.13 --managed-python
+ ./uv venv -c -p 3.13 --managed-python
- name: "Check version (uv)"
run: |
@@ -1132,7 +1132,7 @@ jobs:
- name: "Create a virtual environment (uv)"
run: |
- ./uv venv -p 3.13 --managed-python
+ ./uv venv -c -p 3.13 --managed-python
- name: "Check version (uv)"
run: |
@@ -1758,14 +1758,14 @@ jobs:
./uv run --no-project python -c "from built_by_uv import greet; print(greet())"
# Test both `build_wheel` and `build_sdist` through uv
- ./uv venv -v
+ ./uv venv -c -v
./uv build -v --force-pep517 scripts/packages/built-by-uv --find-links crates/uv-build/dist --offline
./uv pip install -v scripts/packages/built-by-uv/dist/*.tar.gz --find-links crates/uv-build/dist --offline --no-deps
./uv run --no-project python -c "from built_by_uv import greet; print(greet())"
# Test both `build_wheel` and `build_sdist` through the official `build`
rm -rf scripts/packages/built-by-uv/dist/
- ./uv venv -v
+ ./uv venv -c -v
./uv pip install build
# Add the uv binary to PATH for `build` to find
PATH="$(pwd):$PATH" UV_OFFLINE=1 UV_FIND_LINKS=crates/uv-build/dist ./uv run --no-project python -m build -v --installer uv scripts/packages/built-by-uv
diff --git a/Cargo.lock b/Cargo.lock
index 2963b6374..0900699cb 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -5998,18 +5998,22 @@ version = "0.7.22"
name = "uv-virtualenv"
version = "0.0.4"
dependencies = [
+ "console 0.15.11",
"fs-err 3.1.1",
"itertools 0.14.0",
+ "owo-colors",
"pathdiff",
"self-replace",
"thiserror 2.0.12",
"tracing",
"uv-configuration",
+ "uv-console",
"uv-fs",
"uv-pypi-types",
"uv-python",
"uv-shell",
"uv-version",
+ "uv-warnings",
]
[[package]]
diff --git a/crates/uv-build-frontend/src/lib.rs b/crates/uv-build-frontend/src/lib.rs
index 5cbaece2e..67bee9619 100644
--- a/crates/uv-build-frontend/src/lib.rs
+++ b/crates/uv-build-frontend/src/lib.rs
@@ -331,7 +331,7 @@ impl SourceBuild {
interpreter.clone(),
uv_virtualenv::Prompt::None,
false,
- false,
+ uv_virtualenv::OnExisting::Remove,
false,
false,
false,
diff --git a/crates/uv-cli/src/compat.rs b/crates/uv-cli/src/compat.rs
index d29afa760..344d1a4e7 100644
--- a/crates/uv-cli/src/compat.rs
+++ b/crates/uv-cli/src/compat.rs
@@ -266,9 +266,6 @@ enum Resolver {
/// These represent a subset of the `virtualenv` interface that uv supports by default.
#[derive(Args)]
pub struct VenvCompatArgs {
- #[clap(long, hide = true)]
- clear: bool,
-
#[clap(long, hide = true)]
no_seed: bool,
@@ -289,12 +286,6 @@ impl CompatArgs for VenvCompatArgs {
/// behavior. If an argument is passed that does _not_ match uv's behavior, this method will
/// return an error.
fn validate(&self) -> Result<()> {
- if self.clear {
- warn_user!(
- "virtualenv's `--clear` has no effect (uv always clears the virtual environment)"
- );
- }
-
if self.no_seed {
warn_user!(
"virtualenv's `--no-seed` has no effect (uv omits seed packages by default)"
diff --git a/crates/uv-cli/src/lib.rs b/crates/uv-cli/src/lib.rs
index 4c01fd780..5df818654 100644
--- a/crates/uv-cli/src/lib.rs
+++ b/crates/uv-cli/src/lib.rs
@@ -2615,16 +2615,23 @@ pub struct VenvArgs {
#[arg(long, value_parser = clap::builder::BoolishValueParser::new(), env = EnvVars::UV_VENV_SEED)]
pub seed: bool,
+ /// Remove any existing files or directories at the target path.
+ ///
+ /// By default, `uv venv` will exit with an error if the given path is non-empty. The
+ /// `--clear` option will instead clear a non-empty path before creating a new virtual
+ /// environment.
+ #[clap(long, short, overrides_with = "allow_existing", value_parser = clap::builder::BoolishValueParser::new(), env = EnvVars::UV_VENV_CLEAR)]
+ pub clear: bool,
+
/// Preserve any existing files or directories at the target path.
///
- /// By default, `uv venv` will remove an existing virtual environment at the given path, and
- /// exit with an error if the path is non-empty but _not_ a virtual environment. The
+ /// By default, `uv venv` will exit with an error if the given path is non-empty. The
/// `--allow-existing` option will instead write to the given path, regardless of its contents,
/// and without clearing it beforehand.
///
/// WARNING: This option can lead to unexpected behavior if the existing virtual environment and
/// the newly-created virtual environment are linked to different Python interpreters.
- #[clap(long)]
+ #[clap(long, overrides_with = "clear")]
pub allow_existing: bool,
/// The path to the virtual environment to create.
diff --git a/crates/uv-console/src/lib.rs b/crates/uv-console/src/lib.rs
index 807b77aa4..24c5eea16 100644
--- a/crates/uv-console/src/lib.rs
+++ b/crates/uv-console/src/lib.rs
@@ -6,6 +6,25 @@ use std::{cmp::Ordering, iter};
/// This is a slimmed-down version of `dialoguer::Confirm`, with the post-confirmation report
/// enabled.
pub fn confirm(message: &str, term: &Term, default: bool) -> std::io::Result {
+ confirm_inner(message, None, term, default)
+}
+
+/// Prompt the user for confirmation in the given [`Term`], with a hint.
+pub fn confirm_with_hint(
+ message: &str,
+ hint: &str,
+ term: &Term,
+ default: bool,
+) -> std::io::Result {
+ confirm_inner(message, Some(hint), term, default)
+}
+
+fn confirm_inner(
+ message: &str,
+ hint: Option<&str>,
+ term: &Term,
+ default: bool,
+) -> std::io::Result {
let prompt = format!(
"{} {} {} {} {}",
style("?".to_string()).for_stderr().yellow(),
@@ -18,6 +37,13 @@ pub fn confirm(message: &str, term: &Term, default: bool) -> std::io::Result std::io::Result {
- if metadata.is_file() {
- return Err(Error::Io(io::Error::new(
- io::ErrorKind::AlreadyExists,
- format!("File exists at `{}`", location.user_display()),
- )));
- } else if metadata.is_dir() {
- if allow_existing {
- debug!("Allowing existing directory");
- } else if uv_fs::is_virtualenv_base(location) {
- debug!("Removing existing directory");
-
- // On Windows, if the current executable is in the directory, guard against
- // self-deletion.
- #[cfg(windows)]
- if let Ok(itself) = std::env::current_exe() {
- let target = std::path::absolute(location)?;
- if itself.starts_with(&target) {
- debug!("Detected self-delete of executable: {}", itself.display());
- self_replace::self_delete_outside_path(location)?;
- }
- }
-
- fs::remove_dir_all(location)?;
- fs::create_dir_all(location)?;
- } else if location
- .read_dir()
- .is_ok_and(|mut dir| dir.next().is_none())
+ Ok(metadata) if metadata.is_file() => {
+ return Err(Error::Io(io::Error::new(
+ io::ErrorKind::AlreadyExists,
+ format!("File exists at `{}`", location.user_display()),
+ )));
+ }
+ Ok(metadata) if metadata.is_dir() => {
+ let name = if uv_fs::is_virtualenv_base(location) {
+ "virtual environment"
+ } else {
+ "directory"
+ };
+ match on_existing {
+ OnExisting::Allow => {
+ debug!("Allowing existing {name} due to `--allow-existing`");
+ }
+ OnExisting::Remove => {
+ debug!("Removing existing {name} due to `--clear`");
+ remove_venv_directory(location)?;
+ }
+ OnExisting::Fail
+ if location
+ .read_dir()
+ .is_ok_and(|mut dir| dir.next().is_none()) =>
{
debug!("Ignoring empty directory");
- } else {
- return Err(Error::Io(io::Error::new(
- io::ErrorKind::AlreadyExists,
- format!(
- "The directory `{}` exists, but it's not a virtual environment",
- location.user_display()
- ),
- )));
+ }
+ OnExisting::Fail => {
+ match confirm_clear(location, name)? {
+ Some(true) => {
+ debug!("Removing existing {name} due to confirmation");
+ remove_venv_directory(location)?;
+ }
+ Some(false) => {
+ let hint = format!(
+ "Use the `{}` flag or set `{}` to replace the existing {name}",
+ "--clear".green(),
+ "UV_VENV_CLEAR=1".green()
+ );
+ return Err(Error::Io(io::Error::new(
+ io::ErrorKind::AlreadyExists,
+ format!(
+ "A {name} already exists at: {}\n\n{}{} {hint}",
+ location.user_display(),
+ "hint".bold().cyan(),
+ ":".bold(),
+ ),
+ )));
+ }
+ // When we don't have a TTY, warn that the behavior will change in the future
+ None => {
+ warn_user_once!(
+ "A {name} already exists at `{}`. In the future, uv will require `{}` to replace it",
+ location.user_display(),
+ "--clear".green(),
+ );
+ }
+ }
}
}
}
+ Ok(_) => {
+ // It's not a file or a directory
+ return Err(Error::Io(io::Error::new(
+ io::ErrorKind::AlreadyExists,
+ format!("Object already exists at `{}`", location.user_display()),
+ )));
+ }
Err(err) if err.kind() == io::ErrorKind::NotFound => {
fs::create_dir_all(location)?;
}
@@ -464,6 +494,71 @@ pub(crate) fn create(
})
}
+/// Prompt a confirmation that the virtual environment should be cleared.
+///
+/// If not a TTY, returns `None`.
+fn confirm_clear(location: &Path, name: &'static str) -> Result, io::Error> {
+ let term = Term::stderr();
+ if term.is_term() {
+ let prompt = format!(
+ "A {name} already exists at `{}`. Do you want to replace it?",
+ location.user_display(),
+ );
+ let hint = format!(
+ "Use the `{}` flag or set `{}` to skip this prompt",
+ "--clear".green(),
+ "UV_VENV_CLEAR=1".green()
+ );
+ Ok(Some(uv_console::confirm_with_hint(
+ &prompt, &hint, &term, true,
+ )?))
+ } else {
+ Ok(None)
+ }
+}
+
+fn remove_venv_directory(location: &Path) -> Result<(), Error> {
+ // On Windows, if the current executable is in the directory, guard against
+ // self-deletion.
+ #[cfg(windows)]
+ if let Ok(itself) = std::env::current_exe() {
+ let target = std::path::absolute(location)?;
+ if itself.starts_with(&target) {
+ debug!("Detected self-delete of executable: {}", itself.display());
+ self_replace::self_delete_outside_path(location)?;
+ }
+ }
+
+ fs::remove_dir_all(location)?;
+ fs::create_dir_all(location)?;
+
+ Ok(())
+}
+
+#[derive(Debug, Default, Copy, Clone, Eq, PartialEq)]
+pub enum OnExisting {
+ /// Fail if the directory already exists and is non-empty.
+ #[default]
+ Fail,
+ /// Allow an existing directory, overwriting virtual environment files while retaining other
+ /// files in the directory.
+ Allow,
+ /// Remove an existing directory.
+ Remove,
+}
+
+impl OnExisting {
+ pub fn from_args(allow_existing: bool, clear: bool) -> Self {
+ if allow_existing {
+ OnExisting::Allow
+ } else if clear {
+ OnExisting::Remove
+ } else {
+ OnExisting::default()
+ }
+ }
+}
+
#[derive(Debug, Copy, Clone)]
enum WindowsExecutable {
/// The `python.exe` executable (or `venvlauncher.exe` launcher shim).
diff --git a/crates/uv/src/commands/project/environment.rs b/crates/uv/src/commands/project/environment.rs
index cf1add99a..4f9d936c5 100644
--- a/crates/uv/src/commands/project/environment.rs
+++ b/crates/uv/src/commands/project/environment.rs
@@ -2,13 +2,6 @@ use std::path::Path;
use tracing::debug;
-use uv_cache::{Cache, CacheBucket};
-use uv_cache_key::{cache_digest, hash_digest};
-use uv_configuration::{Concurrency, Constraints, PreviewMode};
-use uv_distribution_types::{Name, Resolution};
-use uv_fs::PythonExt;
-use uv_python::{Interpreter, PythonEnvironment, canonicalize_executable};
-
use crate::commands::pip::loggers::{InstallLogger, ResolveLogger};
use crate::commands::pip::operations::Modifications;
use crate::commands::project::{
@@ -17,6 +10,13 @@ use crate::commands::project::{
use crate::printer::Printer;
use crate::settings::{NetworkSettings, ResolverInstallerSettings};
+use uv_cache::{Cache, CacheBucket};
+use uv_cache_key::{cache_digest, hash_digest};
+use uv_configuration::{Concurrency, Constraints, PreviewMode};
+use uv_distribution_types::{Name, Resolution};
+use uv_fs::PythonExt;
+use uv_python::{Interpreter, PythonEnvironment, canonicalize_executable};
+
/// An ephemeral [`PythonEnvironment`] for running an individual command.
#[derive(Debug)]
pub(crate) struct EphemeralEnvironment(PythonEnvironment);
@@ -171,7 +171,7 @@ impl CachedEnvironment {
interpreter,
uv_virtualenv::Prompt::None,
false,
- false,
+ uv_virtualenv::OnExisting::Remove,
true,
false,
false,
diff --git a/crates/uv/src/commands/project/mod.rs b/crates/uv/src/commands/project/mod.rs
index fde2b638c..23655c1ca 100644
--- a/crates/uv/src/commands/project/mod.rs
+++ b/crates/uv/src/commands/project/mod.rs
@@ -1336,7 +1336,7 @@ impl ProjectEnvironment {
interpreter,
prompt,
false,
- false,
+ uv_virtualenv::OnExisting::Remove,
false,
false,
upgradeable,
@@ -1375,7 +1375,7 @@ impl ProjectEnvironment {
interpreter,
prompt,
false,
- false,
+ uv_virtualenv::OnExisting::Remove,
false,
false,
upgradeable,
@@ -1527,7 +1527,7 @@ impl ScriptEnvironment {
interpreter,
prompt,
false,
- false,
+ uv_virtualenv::OnExisting::Remove,
false,
false,
upgradeable,
@@ -1563,7 +1563,7 @@ impl ScriptEnvironment {
interpreter,
prompt,
false,
- false,
+ uv_virtualenv::OnExisting::Remove,
false,
false,
upgradeable,
diff --git a/crates/uv/src/commands/project/run.rs b/crates/uv/src/commands/project/run.rs
index 16ebf88fb..ba8935013 100644
--- a/crates/uv/src/commands/project/run.rs
+++ b/crates/uv/src/commands/project/run.rs
@@ -465,7 +465,7 @@ hint: If you are running a script with `{}` in the shebang, you may need to incl
interpreter,
uv_virtualenv::Prompt::None,
false,
- false,
+ uv_virtualenv::OnExisting::Remove,
false,
false,
false,
@@ -670,7 +670,7 @@ hint: If you are running a script with `{}` in the shebang, you may need to incl
interpreter,
uv_virtualenv::Prompt::None,
false,
- false,
+ uv_virtualenv::OnExisting::Remove,
false,
false,
false,
@@ -907,7 +907,7 @@ hint: If you are running a script with `{}` in the shebang, you may need to incl
interpreter,
uv_virtualenv::Prompt::None,
false,
- false,
+ uv_virtualenv::OnExisting::Remove,
false,
false,
false,
@@ -1038,7 +1038,7 @@ hint: If you are running a script with `{}` in the shebang, you may need to incl
base_interpreter.clone(),
uv_virtualenv::Prompt::None,
false,
- false,
+ uv_virtualenv::OnExisting::Remove,
false,
false,
false,
diff --git a/crates/uv/src/commands/venv.rs b/crates/uv/src/commands/venv.rs
index 02bc818f8..92eb1ead7 100644
--- a/crates/uv/src/commands/venv.rs
+++ b/crates/uv/src/commands/venv.rs
@@ -27,6 +27,7 @@ use uv_resolver::{ExcludeNewer, FlatIndex};
use uv_settings::PythonInstallMirrors;
use uv_shell::{Shell, shlex_posix, shlex_windows};
use uv_types::{AnyErrorBuild, BuildContext, BuildIsolation, BuildStack, HashStrategy};
+use uv_virtualenv::OnExisting;
use uv_warnings::warn_user;
use uv_workspace::{DiscoveryOptions, VirtualProject, WorkspaceCache, WorkspaceError};
@@ -73,7 +74,7 @@ pub(crate) async fn venv(
prompt: uv_virtualenv::Prompt,
system_site_packages: bool,
seed: bool,
- allow_existing: bool,
+ on_existing: OnExisting,
exclude_newer: Option,
concurrency: Concurrency,
no_config: bool,
@@ -209,7 +210,7 @@ pub(crate) async fn venv(
interpreter,
prompt,
system_site_packages,
- allow_existing,
+ on_existing,
relocatable,
seed,
upgradeable,
diff --git a/crates/uv/src/lib.rs b/crates/uv/src/lib.rs
index 433f5afd3..9c9b41065 100644
--- a/crates/uv/src/lib.rs
+++ b/crates/uv/src/lib.rs
@@ -1032,6 +1032,8 @@ async fn run(mut cli: Cli) -> Result {
let python_request: Option =
args.settings.python.as_deref().map(PythonRequest::parse);
+ let on_existing = uv_virtualenv::OnExisting::from_args(args.allow_existing, args.clear);
+
commands::venv(
&project_dir,
args.path,
@@ -1048,7 +1050,7 @@ async fn run(mut cli: Cli) -> Result {
uv_virtualenv::Prompt::from_args(prompt),
args.system_site_packages,
args.seed,
- args.allow_existing,
+ on_existing,
args.settings.exclude_newer,
globals.concurrency,
cli.top_level.no_config,
diff --git a/crates/uv/src/settings.rs b/crates/uv/src/settings.rs
index bf3bca4a4..1ebeecba8 100644
--- a/crates/uv/src/settings.rs
+++ b/crates/uv/src/settings.rs
@@ -2623,6 +2623,7 @@ impl BuildSettings {
pub(crate) struct VenvSettings {
pub(crate) seed: bool,
pub(crate) allow_existing: bool,
+ pub(crate) clear: bool,
pub(crate) path: Option,
pub(crate) prompt: Option,
pub(crate) system_site_packages: bool,
@@ -2641,6 +2642,7 @@ impl VenvSettings {
no_system,
seed,
allow_existing,
+ clear,
path,
prompt,
system_site_packages,
@@ -2658,6 +2660,7 @@ impl VenvSettings {
Self {
seed,
allow_existing,
+ clear,
path,
prompt,
system_site_packages,
diff --git a/crates/uv/tests/it/cache_prune.rs b/crates/uv/tests/it/cache_prune.rs
index a6ec48bd4..99493fe21 100644
--- a/crates/uv/tests/it/cache_prune.rs
+++ b/crates/uv/tests/it/cache_prune.rs
@@ -227,7 +227,7 @@ fn prune_unzipped() -> Result<()> {
Removed [N] files ([SIZE])
"###);
- context.venv().assert().success();
+ context.venv().arg("--clear").assert().success();
// Reinstalling the source distribution should not require re-downloading the source
// distribution.
diff --git a/crates/uv/tests/it/common/mod.rs b/crates/uv/tests/it/common/mod.rs
index bc6e65f4e..9d3c1428f 100644
--- a/crates/uv/tests/it/common/mod.rs
+++ b/crates/uv/tests/it/common/mod.rs
@@ -1415,6 +1415,7 @@ pub fn create_venv_from_executable>(path: P, cache_dir: &ChildPat
assert_cmd::Command::new(get_bin())
.arg("venv")
.arg(path.as_ref().as_os_str())
+ .arg("--clear")
.arg("--cache-dir")
.arg(cache_dir.path())
.arg("--python")
diff --git a/crates/uv/tests/it/pip_install.rs b/crates/uv/tests/it/pip_install.rs
index 123d9066b..9cd394bbd 100644
--- a/crates/uv/tests/it/pip_install.rs
+++ b/crates/uv/tests/it/pip_install.rs
@@ -2859,7 +2859,7 @@ fn install_no_binary_cache() {
);
// Re-create the virtual environment.
- context.venv().assert().success();
+ context.venv().arg("--clear").assert().success();
// Re-install. The distribution should be installed from the cache.
uv_snapshot!(
@@ -2877,7 +2877,7 @@ fn install_no_binary_cache() {
);
// Re-create the virtual environment.
- context.venv().assert().success();
+ context.venv().arg("--clear").assert().success();
// Install with `--no-binary`. The distribution should be built from source, despite a binary
// distribution being available in the cache.
@@ -3088,7 +3088,7 @@ fn cache_priority() {
);
// Re-create the virtual environment.
- context.venv().assert().success();
+ context.venv().arg("--clear").assert().success();
// Install `idna` without a version specifier.
uv_snapshot!(
@@ -8252,6 +8252,7 @@ fn install_relocatable() -> Result<()> {
context
.venv()
.arg(context.venv.as_os_str())
+ .arg("--clear")
.arg("--python")
.arg("3.12")
.arg("--relocatable")
diff --git a/crates/uv/tests/it/pip_sync.rs b/crates/uv/tests/it/pip_sync.rs
index 537c5dff2..4b249be8c 100644
--- a/crates/uv/tests/it/pip_sync.rs
+++ b/crates/uv/tests/it/pip_sync.rs
@@ -5625,7 +5625,7 @@ fn sync_seed() -> Result<()> {
);
// Re-create the environment with seed packages.
- uv_snapshot!(context.filters(), context.venv()
+ uv_snapshot!(context.filters(), context.venv().arg("--clear")
.arg("--seed"), @r"
success: true
exit_code: 0
diff --git a/crates/uv/tests/it/sync.rs b/crates/uv/tests/it/sync.rs
index c225225b8..35a06ea57 100644
--- a/crates/uv/tests/it/sync.rs
+++ b/crates/uv/tests/it/sync.rs
@@ -9987,6 +9987,7 @@ fn sync_when_virtual_environment_incompatible_with_interpreter() -> Result<()> {
context
.venv()
.arg(context.venv.as_os_str())
+ .arg("--clear")
.arg("--python")
.arg("3.12")
.assert()
diff --git a/crates/uv/tests/it/venv.rs b/crates/uv/tests/it/venv.rs
index 43cacb640..2430e607d 100644
--- a/crates/uv/tests/it/venv.rs
+++ b/crates/uv/tests/it/venv.rs
@@ -30,10 +30,28 @@ fn create_venv() {
context.venv.assert(predicates::path::is_dir());
- // Create a virtual environment at the same location, which should replace it.
uv_snapshot!(context.filters(), context.venv()
.arg(context.venv.as_os_str())
.arg("--python")
+ .arg("3.12"), @r"
+ success: true
+ exit_code: 0
+ ----- stdout -----
+
+ ----- stderr -----
+ Using CPython 3.12.[X] interpreter at: [PYTHON-3.12]
+ Creating virtual environment at: .venv
+ warning: A virtual environment already exists at `.venv`. In the future, uv will require `--clear` to replace it
+ Activate with: source .venv/[BIN]/activate
+ "
+ );
+
+ // Create a virtual environment at the same location using `--clear`,
+ // which should replace it.
+ uv_snapshot!(context.filters(), context.venv()
+ .arg(context.venv.as_os_str())
+ .arg("--clear")
+ .arg("--python")
.arg("3.12"), @r###"
success: true
exit_code: 0
@@ -162,7 +180,7 @@ fn create_venv_project_environment() -> Result<()> {
.assert(predicates::path::is_dir());
// Or, of they opt-out with `--no-workspace` or `--no-project`
- uv_snapshot!(context.filters(), context.venv().arg("--no-workspace"), @r###"
+ uv_snapshot!(context.filters(), context.venv().arg("--clear").arg("--no-workspace"), @r###"
success: true
exit_code: 0
----- stdout -----
@@ -174,7 +192,7 @@ fn create_venv_project_environment() -> Result<()> {
"###
);
- uv_snapshot!(context.filters(), context.venv().arg("--no-project"), @r###"
+ uv_snapshot!(context.filters(), context.venv().arg("--clear").arg("--no-project"), @r###"
success: true
exit_code: 0
----- stdout -----
@@ -252,7 +270,7 @@ fn create_venv_reads_request_from_python_version_file() {
.write_str("3.12")
.unwrap();
- uv_snapshot!(context.filters(), context.venv(), @r###"
+ uv_snapshot!(context.filters(), context.venv().arg("--clear"), @r###"
success: true
exit_code: 0
----- stdout -----
@@ -291,7 +309,7 @@ fn create_venv_reads_request_from_python_versions_file() {
.write_str("3.12\n3.11")
.unwrap();
- uv_snapshot!(context.filters(), context.venv(), @r###"
+ uv_snapshot!(context.filters(), context.venv().arg("--clear"), @r###"
success: true
exit_code: 0
----- stdout -----
@@ -334,7 +352,7 @@ fn create_venv_respects_pyproject_requires_python() -> Result<()> {
"#
})?;
- uv_snapshot!(context.filters(), context.venv(), @r###"
+ uv_snapshot!(context.filters(), context.venv().arg("--clear"), @r###"
success: true
exit_code: 0
----- stdout -----
@@ -357,7 +375,7 @@ fn create_venv_respects_pyproject_requires_python() -> Result<()> {
"#
})?;
- uv_snapshot!(context.filters(), context.venv(), @r###"
+ uv_snapshot!(context.filters(), context.venv().arg("--clear"), @r###"
success: true
exit_code: 0
----- stdout -----
@@ -380,7 +398,7 @@ fn create_venv_respects_pyproject_requires_python() -> Result<()> {
"#
})?;
- uv_snapshot!(context.filters(), context.venv(), @r###"
+ uv_snapshot!(context.filters(), context.venv().arg("--clear"), @r###"
success: true
exit_code: 0
----- stdout -----
@@ -414,7 +432,7 @@ fn create_venv_respects_pyproject_requires_python() -> Result<()> {
"#
})?;
- uv_snapshot!(context.filters(), context.venv(), @r###"
+ uv_snapshot!(context.filters(), context.venv().arg("--clear"), @r###"
success: true
exit_code: 0
----- stdout -----
@@ -437,7 +455,7 @@ fn create_venv_respects_pyproject_requires_python() -> Result<()> {
"#
})?;
- uv_snapshot!(context.filters(), context.venv(), @r###"
+ uv_snapshot!(context.filters(), context.venv().arg("--clear"), @r###"
success: true
exit_code: 0
----- stdout -----
@@ -460,7 +478,7 @@ fn create_venv_respects_pyproject_requires_python() -> Result<()> {
"#
})?;
- uv_snapshot!(context.filters(), context.venv(), @r###"
+ uv_snapshot!(context.filters(), context.venv().arg("--clear"), @r###"
success: true
exit_code: 0
----- stdout -----
@@ -475,7 +493,7 @@ fn create_venv_respects_pyproject_requires_python() -> Result<()> {
context.venv.assert(predicates::path::is_dir());
// We warn if we receive an incompatible version
- uv_snapshot!(context.filters(), context.venv().arg("--python").arg("3.11"), @r"
+ uv_snapshot!(context.filters(), context.venv().arg("--clear").arg("--python").arg("3.11"), @r"
success: true
exit_code: 0
----- stdout -----
@@ -527,7 +545,7 @@ fn create_venv_respects_group_requires_python() -> Result<()> {
"#
})?;
- uv_snapshot!(context.filters(), context.venv(), @r"
+ uv_snapshot!(context.filters(), context.venv().arg("--clear"), @r"
success: true
exit_code: 0
----- stdout -----
@@ -560,7 +578,7 @@ fn create_venv_respects_group_requires_python() -> Result<()> {
"#
})?;
- uv_snapshot!(context.filters(), context.venv(), @r"
+ uv_snapshot!(context.filters(), context.venv().arg("--clear"), @r"
success: true
exit_code: 0
----- stdout -----
@@ -593,7 +611,7 @@ fn create_venv_respects_group_requires_python() -> Result<()> {
"#
})?;
- uv_snapshot!(context.filters(), context.venv(), @r"
+ uv_snapshot!(context.filters(), context.venv().arg("--clear"), @r"
success: true
exit_code: 0
----- stdout -----
@@ -621,7 +639,7 @@ fn create_venv_respects_group_requires_python() -> Result<()> {
"#
})?;
- uv_snapshot!(context.filters(), context.venv().arg("--python").arg("3.11"), @r"
+ uv_snapshot!(context.filters(), context.venv().arg("--clear").arg("--python").arg("3.11"), @r"
success: true
exit_code: 0
----- stdout -----
@@ -654,7 +672,7 @@ fn create_venv_respects_group_requires_python() -> Result<()> {
"#
})?;
- uv_snapshot!(context.filters(), context.venv().arg("--python").arg("3.11"), @r"
+ uv_snapshot!(context.filters(), context.venv().arg("--clear").arg("--python").arg("3.11"), @r"
success: false
exit_code: 2
----- stdout -----
@@ -945,15 +963,15 @@ fn non_empty_dir_exists() -> Result<()> {
.arg(context.venv.as_os_str())
.arg("--python")
.arg("3.12"), @r"
- success: false
- exit_code: 2
+ success: true
+ exit_code: 0
----- stdout -----
----- stderr -----
Using CPython 3.12.[X] interpreter at: [PYTHON-3.12]
Creating virtual environment at: .venv
- error: Failed to create virtual environment
- Caused by: The directory `.venv` exists, but it's not a virtual environment
+ warning: A directory already exists at `.venv`. In the future, uv will require `--clear` to replace it
+ Activate with: source .venv/[BIN]/activate
"
);
@@ -973,15 +991,15 @@ fn non_empty_dir_exists_allow_existing() -> Result<()> {
.arg(context.venv.as_os_str())
.arg("--python")
.arg("3.12"), @r"
- success: false
- exit_code: 2
+ success: true
+ exit_code: 0
----- stdout -----
----- stderr -----
Using CPython 3.12.[X] interpreter at: [PYTHON-3.12]
Creating virtual environment at: .venv
- error: Failed to create virtual environment
- Caused by: The directory `.venv` exists, but it's not a virtual environment
+ warning: A directory already exists at `.venv`. In the future, uv will require `--clear` to replace it
+ Activate with: source .venv/[BIN]/activate
"
);
@@ -1102,31 +1120,6 @@ fn windows_shims() -> Result<()> {
Ok(())
}
-#[test]
-fn virtualenv_compatibility() {
- let context = TestContext::new_with_versions(&["3.12"]);
-
- // Create a virtual environment at `.venv`, passing the redundant `--clear` flag.
- uv_snapshot!(context.filters(), context.venv()
- .arg(context.venv.as_os_str())
- .arg("--clear")
- .arg("--python")
- .arg("3.12"), @r###"
- success: true
- exit_code: 0
- ----- stdout -----
-
- ----- stderr -----
- warning: virtualenv's `--clear` has no effect (uv always clears the virtual environment)
- Using CPython 3.12.[X] interpreter at: [PYTHON-3.12]
- Creating virtual environment at: .venv
- Activate with: source .venv/[BIN]/activate
- "###
- );
-
- context.venv.assert(predicates::path::is_dir());
-}
-
#[test]
fn verify_pyvenv_cfg() {
let context = TestContext::new("3.12");
@@ -1154,6 +1147,7 @@ fn verify_pyvenv_cfg_relocatable() {
context
.venv()
.arg(context.venv.as_os_str())
+ .arg("--clear")
.arg("--python")
.arg("3.12")
.arg("--relocatable")
diff --git a/docs/reference/cli.md b/docs/reference/cli.md
index 881c96697..9be647449 100644
--- a/docs/reference/cli.md
+++ b/docs/reference/cli.md
@@ -4683,7 +4683,7 @@ uv venv [OPTIONS] [PATH]
Options
--allow-existingPreserve any existing files or directories at the target path.
-By default, uv venv will remove an existing virtual environment at the given path, and exit with an error if the path is non-empty but not a virtual environment. The --allow-existing option will instead write to the given path, regardless of its contents, and without clearing it beforehand.
+By default, uv venv will exit with an error if the given path is non-empty. The --allow-existing option will instead write to the given path, regardless of its contents, and without clearing it beforehand.
WARNING: This option can lead to unexpected behavior if the existing virtual environment and the newly-created virtual environment are linked to different Python interpreters.
--allow-insecure-host , --trusted-host allow-insecure-host Allow insecure connections to a host.
Can be provided multiple times.
@@ -4692,7 +4692,9 @@ uv venv [OPTIONS] [PATH]
May also be set with the UV_INSECURE_HOST environment variable.
--cache-dir cache-dir Path to the cache directory.
Defaults to $XDG_CACHE_HOME/uv or $HOME/.cache/uv on macOS and Linux, and %LOCALAPPDATA%\uv\cache on Windows.
To view the location of the cache directory, run uv cache dir.
-May also be set with the UV_CACHE_DIR environment variable.
--color color-choice Control the use of color in output.
+May also be set with the UV_CACHE_DIR environment variable.
--clear , -cRemove any existing files or directories at the target path.
+By default, uv venv will exit with an error if the given path is non-empty. The --clear option will instead clear a non-empty path before creating a new virtual environment.
+May also be set with the UV_VENV_CLEAR environment variable.
--color color-choice Control the use of color in output.
By default, uv will automatically detect support for colors when writing to a terminal.
Possible values:
diff --git a/docs/reference/environment.md b/docs/reference/environment.md
index a64869edb..e848d4a41 100644
--- a/docs/reference/environment.md
+++ b/docs/reference/environment.md
@@ -458,6 +458,11 @@ Equivalent to the `--torch-backend` command-line argument (e.g., `cpu`, `cu126`,
Used ephemeral environments like CI to install uv to a specific path while preventing
the installer from modifying shell profiles or environment variables.
+### `UV_VENV_CLEAR`
+
+Equivalent to the `--clear` command-line argument. If set, uv will remove any
+existing files or directories at the target path.
+
### `UV_VENV_SEED`
Install seed packages (one or more of: `pip`, `setuptools`, and `wheel`) into the virtual environment
From b98ac8c224f651a61ee3c44f6829d70cde80b3a9 Mon Sep 17 00:00:00 2001
From: Zanie Blue
Date: Wed, 16 Jul 2025 15:31:47 -0500
Subject: [PATCH 072/130] Validate that discovered interpreters meet the Python
preference (#7934)
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
Closes https://github.com/astral-sh/uv/issues/5144
e.g.
```
❯ cargo run -q -- sync --python-preference only-system
Using CPython 3.12.6 interpreter at: /opt/homebrew/opt/python@3.12/bin/python3.12
Removed virtual environment at: .venv
Creating virtual environment at: .venv
Resolved 9 packages in 14ms
Installed 8 packages in 9ms
+ anyio==4.6.0
+ certifi==2024.8.30
+ h11==0.14.0
+ httpcore==1.0.5
+ httpx==0.27.2
+ idna==3.10
+ ruff==0.6.7
+ sniffio==1.3.1
❯ cargo run -q -- sync --python-preference only-managed
Using CPython 3.12.1
Removed virtual environment at: .venv
Creating virtual environment at: .venv
Resolved 9 packages in 14ms
Installed 8 packages in 11ms
+ anyio==4.6.0
+ certifi==2024.8.30
+ h11==0.14.0
+ httpcore==1.0.5
+ httpx==0.27.2
+ idna==3.10
+ ruff==0.6.7
+ sniffio==1.3.1
```
---
crates/uv-python/src/discovery.rs | 113 ++++++++++++++++++++-
crates/uv-python/src/environment.rs | 3 +-
crates/uv-python/src/interpreter.rs | 23 ++++-
crates/uv-python/src/lib.rs | 2 +-
crates/uv-static/src/env_vars.rs | 8 ++
crates/uv/src/commands/project/mod.rs | 26 ++++-
crates/uv/tests/it/common/mod.rs | 21 ++++
crates/uv/tests/it/pip_install.rs | 55 ++++++++++
crates/uv/tests/it/python_find.rs | 51 ++++++++++
crates/uv/tests/it/run.rs | 46 +++++++++
crates/uv/tests/it/sync.rs | 141 ++++++++++++++++++++++++++
crates/uv/tests/it/venv.rs | 66 ++++++++++++
12 files changed, 544 insertions(+), 11 deletions(-)
diff --git a/crates/uv-python/src/discovery.rs b/crates/uv-python/src/discovery.rs
index c067082dd..f10b480e2 100644
--- a/crates/uv-python/src/discovery.rs
+++ b/crates/uv-python/src/discovery.rs
@@ -446,7 +446,16 @@ fn python_executables_from_installed<'a>(
.flatten();
match preference {
- PythonPreference::OnlyManaged => Box::new(from_managed_installations),
+ PythonPreference::OnlyManaged => {
+ // TODO(zanieb): Ideally, we'd create "fake" managed installation directories for tests,
+ // but for now... we'll just include the test interpreters which are always on the
+ // search path.
+ if std::env::var(uv_static::EnvVars::UV_INTERNAL__TEST_PYTHON_MANAGED).is_ok() {
+ Box::new(from_managed_installations.chain(from_search_path))
+ } else {
+ Box::new(from_managed_installations)
+ }
+ }
PythonPreference::Managed => Box::new(
from_managed_installations
.chain(from_search_path)
@@ -730,6 +739,9 @@ fn python_interpreters<'a>(
false
}
})
+ .filter_ok(move |(source, interpreter)| {
+ satisfies_python_preference(*source, interpreter, preference)
+ })
}
/// Lazily convert Python executables into interpreters.
@@ -857,6 +869,93 @@ fn source_satisfies_environment_preference(
}
}
+/// Returns true if a Python interpreter matches the [`PythonPreference`].
+pub fn satisfies_python_preference(
+ source: PythonSource,
+ interpreter: &Interpreter,
+ preference: PythonPreference,
+) -> bool {
+ // If the source is "explicit", we will not apply the Python preference, e.g., if the user has
+ // activated a virtual environment, we should always allow it. We may want to invalidate the
+ // environment in some cases, like in projects, but we can't distinguish between explicit
+ // requests for a different Python preference or a persistent preference in a configuration file
+ // which would result in overly aggressive invalidation.
+ let is_explicit = match source {
+ PythonSource::ProvidedPath
+ | PythonSource::ParentInterpreter
+ | PythonSource::ActiveEnvironment
+ | PythonSource::CondaPrefix => true,
+ PythonSource::Managed
+ | PythonSource::DiscoveredEnvironment
+ | PythonSource::SearchPath
+ | PythonSource::SearchPathFirst
+ | PythonSource::Registry
+ | PythonSource::MicrosoftStore
+ | PythonSource::BaseCondaPrefix => false,
+ };
+
+ match preference {
+ PythonPreference::OnlyManaged => {
+ // Perform a fast check using the source before querying the interpreter
+ if matches!(source, PythonSource::Managed) || interpreter.is_managed() {
+ true
+ } else {
+ if is_explicit {
+ debug!(
+ "Allowing unmanaged Python interpreter at `{}` (in conflict with the `python-preference`) since it is from source: {source}",
+ interpreter.sys_executable().display()
+ );
+ true
+ } else {
+ debug!(
+ "Ignoring Python interpreter at `{}`: only managed interpreters allowed",
+ interpreter.sys_executable().display()
+ );
+ false
+ }
+ }
+ }
+ // If not "only" a kind, any interpreter is okay
+ PythonPreference::Managed | PythonPreference::System => true,
+ PythonPreference::OnlySystem => {
+ let is_system = match source {
+ // A managed interpreter is never a system interpreter
+ PythonSource::Managed => false,
+ // We can't be sure if this is a system interpreter without checking
+ PythonSource::ProvidedPath
+ | PythonSource::ParentInterpreter
+ | PythonSource::ActiveEnvironment
+ | PythonSource::CondaPrefix
+ | PythonSource::DiscoveredEnvironment
+ | PythonSource::SearchPath
+ | PythonSource::SearchPathFirst
+ | PythonSource::Registry
+ | PythonSource::BaseCondaPrefix => !interpreter.is_managed(),
+ // Managed interpreters should never be found in the store
+ PythonSource::MicrosoftStore => true,
+ };
+
+ if is_system {
+ true
+ } else {
+ if is_explicit {
+ debug!(
+ "Allowing managed Python interpreter at `{}` (in conflict with the `python-preference`) since it is from source: {source}",
+ interpreter.sys_executable().display()
+ );
+ true
+ } else {
+ debug!(
+ "Ignoring Python interpreter at `{}`: only system interpreters allowed",
+ interpreter.sys_executable().display()
+ );
+ false
+ }
+ }
+ }
+ }
+}
+
/// Check if an encountered error is critical and should stop discovery.
///
/// Returns false when an error could be due to a faulty Python installation and we should continue searching for a working one.
@@ -2812,6 +2911,18 @@ impl PythonPreference {
}
}
}
+
+ /// Return the canonical name.
+ // TODO(zanieb): This should be a `Display` impl and we should have a different view for
+ // the sources
+ pub fn canonical_name(&self) -> &'static str {
+ match self {
+ Self::OnlyManaged => "only managed",
+ Self::Managed => "prefer managed",
+ Self::System => "prefer system",
+ Self::OnlySystem => "only system",
+ }
+ }
}
impl fmt::Display for PythonPreference {
diff --git a/crates/uv-python/src/environment.rs b/crates/uv-python/src/environment.rs
index 07f3ddb54..10cec16ad 100644
--- a/crates/uv-python/src/environment.rs
+++ b/crates/uv-python/src/environment.rs
@@ -158,8 +158,7 @@ impl PythonEnvironment {
let installation = match find_python_installation(
request,
preference,
- // Ignore managed installations when looking for environments
- PythonPreference::OnlySystem,
+ PythonPreference::default(),
cache,
preview,
)? {
diff --git a/crates/uv-python/src/interpreter.rs b/crates/uv-python/src/interpreter.rs
index fc5adb833..dd9dd1cb4 100644
--- a/crates/uv-python/src/interpreter.rs
+++ b/crates/uv-python/src/interpreter.rs
@@ -271,15 +271,28 @@ impl Interpreter {
///
/// Returns `false` if we cannot determine the path of the uv managed Python interpreters.
pub fn is_managed(&self) -> bool {
+ if let Ok(test_managed) =
+ std::env::var(uv_static::EnvVars::UV_INTERNAL__TEST_PYTHON_MANAGED)
+ {
+ // During testing, we collect interpreters into an artificial search path and need to
+ // be able to mock whether an interpreter is managed or not.
+ return test_managed.split_ascii_whitespace().any(|item| {
+ let version = ::from_str(item).expect(
+ "`UV_INTERNAL__TEST_PYTHON_MANAGED` items should be valid Python versions",
+ );
+ if version.patch().is_some() {
+ version.version() == self.python_version()
+ } else {
+ (version.major(), version.minor()) == self.python_tuple()
+ }
+ });
+ }
+
let Ok(installations) = ManagedPythonInstallations::from_settings(None) else {
return false;
};
- installations
- .find_all()
- .into_iter()
- .flatten()
- .any(|install| install.path() == self.sys_base_prefix)
+ self.sys_base_prefix.starts_with(installations.root())
}
/// Returns `Some` if the environment is externally managed, optionally including an error
diff --git a/crates/uv-python/src/lib.rs b/crates/uv-python/src/lib.rs
index ea6f0db61..2461f9006 100644
--- a/crates/uv-python/src/lib.rs
+++ b/crates/uv-python/src/lib.rs
@@ -8,7 +8,7 @@ use uv_static::EnvVars;
pub use crate::discovery::{
EnvironmentPreference, Error as DiscoveryError, PythonDownloads, PythonNotFound,
PythonPreference, PythonRequest, PythonSource, PythonVariant, VersionRequest,
- find_python_installations,
+ find_python_installations, satisfies_python_preference,
};
pub use crate::downloads::PlatformRequest;
pub use crate::environment::{InvalidEnvironmentKind, PythonEnvironment};
diff --git a/crates/uv-static/src/env_vars.rs b/crates/uv-static/src/env_vars.rs
index a99808468..f7fa6cb31 100644
--- a/crates/uv-static/src/env_vars.rs
+++ b/crates/uv-static/src/env_vars.rs
@@ -376,6 +376,14 @@ impl EnvVars {
#[attr_hidden]
pub const UV_INTERNAL__SHOW_DERIVATION_TREE: &'static str = "UV_INTERNAL__SHOW_DERIVATION_TREE";
+ /// Used to set a temporary directory for some tests.
+ #[attr_hidden]
+ pub const UV_INTERNAL__TEST_DIR: &'static str = "UV_INTERNAL__TEST_DIR";
+
+ /// Used to force treating an interpreter as "managed" during tests.
+ #[attr_hidden]
+ pub const UV_INTERNAL__TEST_PYTHON_MANAGED: &'static str = "UV_INTERNAL__TEST_PYTHON_MANAGED";
+
/// Path to system-level configuration directory on Unix systems.
pub const XDG_CONFIG_DIRS: &'static str = "XDG_CONFIG_DIRS";
diff --git a/crates/uv/src/commands/project/mod.rs b/crates/uv/src/commands/project/mod.rs
index 23655c1ca..cce02a70b 100644
--- a/crates/uv/src/commands/project/mod.rs
+++ b/crates/uv/src/commands/project/mod.rs
@@ -30,8 +30,8 @@ use uv_pep508::MarkerTreeContents;
use uv_pypi_types::{ConflictPackage, ConflictSet, Conflicts};
use uv_python::{
EnvironmentPreference, Interpreter, InvalidEnvironmentKind, PythonDownloads, PythonEnvironment,
- PythonInstallation, PythonPreference, PythonRequest, PythonVariant, PythonVersionFile,
- VersionFileDiscoveryOptions, VersionRequest,
+ PythonInstallation, PythonPreference, PythonRequest, PythonSource, PythonVariant,
+ PythonVersionFile, VersionFileDiscoveryOptions, VersionRequest, satisfies_python_preference,
};
use uv_requirements::upgrade::{LockedRequirements, read_lock_requirements};
use uv_requirements::{NamedRequirementsResolver, RequirementsSpecification};
@@ -664,6 +664,7 @@ impl ScriptInterpreter {
&venv,
EnvironmentKind::Script,
python_request.as_ref(),
+ python_preference,
requires_python
.as_ref()
.map(|(requires_python, _)| requires_python),
@@ -794,6 +795,9 @@ pub(crate) enum EnvironmentIncompatibilityError {
"The interpreter in the {0} environment has a different version ({1}) than it was created with ({2})"
)]
PyenvVersionConflict(EnvironmentKind, Version, Version),
+
+ #[error("The {0} environment's Python interpreter does not meet the Python preference: `{1}`")]
+ PythonPreference(EnvironmentKind, PythonPreference),
}
/// Whether an environment is usable for a project or script, i.e., if it matches the requirements.
@@ -801,6 +805,7 @@ fn environment_is_usable(
environment: &PythonEnvironment,
kind: EnvironmentKind,
python_request: Option<&PythonRequest>,
+ python_preference: PythonPreference,
requires_python: Option<&RequiresPython>,
cache: &Cache,
) -> Result<(), EnvironmentIncompatibilityError> {
@@ -836,6 +841,22 @@ fn environment_is_usable(
}
}
+ if satisfies_python_preference(
+ PythonSource::DiscoveredEnvironment,
+ environment.interpreter(),
+ python_preference,
+ ) {
+ trace!(
+ "The virtual environment's Python interpreter meets the Python preference: `{}`",
+ python_preference
+ );
+ } else {
+ return Err(EnvironmentIncompatibilityError::PythonPreference(
+ kind,
+ python_preference,
+ ));
+ }
+
Ok(())
}
@@ -889,6 +910,7 @@ impl ProjectInterpreter {
&venv,
EnvironmentKind::Project,
python_request.as_ref(),
+ python_preference,
requires_python.as_ref(),
cache,
) {
diff --git a/crates/uv/tests/it/common/mod.rs b/crates/uv/tests/it/common/mod.rs
index 9d3c1428f..08eeec3aa 100644
--- a/crates/uv/tests/it/common/mod.rs
+++ b/crates/uv/tests/it/common/mod.rs
@@ -187,6 +187,18 @@ impl TestContext {
"virtual environments, managed installations, search path, or registry".to_string(),
"[PYTHON SOURCES]".to_string(),
));
+ self.filters.push((
+ "virtual environments, search path, or registry".to_string(),
+ "[PYTHON SOURCES]".to_string(),
+ ));
+ self.filters.push((
+ "virtual environments, registry, or search path".to_string(),
+ "[PYTHON SOURCES]".to_string(),
+ ));
+ self.filters.push((
+ "virtual environments or search path".to_string(),
+ "[PYTHON SOURCES]".to_string(),
+ ));
self.filters.push((
"managed installations or search path".to_string(),
"[PYTHON SOURCES]".to_string(),
@@ -415,6 +427,15 @@ impl TestContext {
self
}
+ pub fn with_versions_as_managed(mut self, versions: &[&str]) -> Self {
+ self.extra_env.push((
+ EnvVars::UV_INTERNAL__TEST_PYTHON_MANAGED.into(),
+ versions.iter().join(" ").into(),
+ ));
+
+ self
+ }
+
/// Clear filters on `TestContext`.
pub fn clear_filters(mut self) -> Self {
self.filters.clear();
diff --git a/crates/uv/tests/it/pip_install.rs b/crates/uv/tests/it/pip_install.rs
index 9cd394bbd..2a7b0f404 100644
--- a/crates/uv/tests/it/pip_install.rs
+++ b/crates/uv/tests/it/pip_install.rs
@@ -11684,3 +11684,58 @@ fn strip_shebang_arguments() -> Result<()> {
Ok(())
}
+
+#[test]
+fn install_python_preference() {
+ let context =
+ TestContext::new_with_versions(&["3.12", "3.11"]).with_versions_as_managed(&["3.12"]);
+
+ // Create a managed interpreter environment
+ uv_snapshot!(context.filters(), context.venv(), @r"
+ success: true
+ exit_code: 0
+ ----- stdout -----
+
+ ----- stderr -----
+ Using CPython 3.12.[X] interpreter at: [PYTHON-3.12]
+ Creating virtual environment at: .venv
+ Activate with: source .venv/[BIN]/activate
+ ");
+
+ // Install a package, requesting managed Python
+ uv_snapshot!(context.filters(), context.pip_install().arg("anyio").arg("--managed-python"), @r"
+ success: true
+ exit_code: 0
+ ----- stdout -----
+
+ ----- stderr -----
+ Resolved 3 packages in [TIME]
+ Prepared 3 packages in [TIME]
+ Installed 3 packages in [TIME]
+ + anyio==4.3.0
+ + idna==3.6
+ + sniffio==1.3.1
+ ");
+
+ // Install a package, requesting unmanaged Python
+ // This is allowed, because the virtual environment already exists
+ uv_snapshot!(context.filters(), context.pip_install().arg("anyio").arg("--no-managed-python"), @r"
+ success: true
+ exit_code: 0
+ ----- stdout -----
+
+ ----- stderr -----
+ Audited 1 package in [TIME]
+ ");
+
+ // This also works with `VIRTUAL_ENV` unset
+ uv_snapshot!(context.filters(), context.pip_install()
+ .arg("anyio").arg("--no-managed-python").env_remove("VIRTUAL_ENV"), @r"
+ success: true
+ exit_code: 0
+ ----- stdout -----
+
+ ----- stderr -----
+ Audited 1 package in [TIME]
+ ");
+}
diff --git a/crates/uv/tests/it/python_find.rs b/crates/uv/tests/it/python_find.rs
index 49e60c068..41eceeb92 100644
--- a/crates/uv/tests/it/python_find.rs
+++ b/crates/uv/tests/it/python_find.rs
@@ -728,6 +728,57 @@ fn python_find_venv_invalid() {
"###);
}
+#[test]
+fn python_find_managed() {
+ let context: TestContext = TestContext::new_with_versions(&["3.11", "3.12"])
+ .with_filtered_python_sources()
+ .with_versions_as_managed(&["3.12"]);
+
+ // We find the managed interpreter
+ uv_snapshot!(context.filters(), context.python_find().arg("--managed-python"), @r"
+ success: true
+ exit_code: 0
+ ----- stdout -----
+ [PYTHON-3.12]
+
+ ----- stderr -----
+ ");
+
+ // Request an interpreter that cannot be satisfied
+ uv_snapshot!(context.filters(), context.python_find().arg("--managed-python").arg("3.11"), @r"
+ success: false
+ exit_code: 2
+ ----- stdout -----
+
+ ----- stderr -----
+ error: No interpreter found for Python 3.11 in virtual environments or managed installations
+ ");
+
+ let context: TestContext = TestContext::new_with_versions(&["3.11", "3.12"])
+ .with_filtered_python_sources()
+ .with_versions_as_managed(&["3.11"]);
+
+ // We find the unmanaged interpreter
+ uv_snapshot!(context.filters(), context.python_find().arg("--no-managed-python"), @r"
+ success: true
+ exit_code: 0
+ ----- stdout -----
+ [PYTHON-3.12]
+
+ ----- stderr -----
+ ");
+
+ // Request an interpreter that cannot be satisfied
+ uv_snapshot!(context.filters(), context.python_find().arg("--no-managed-python").arg("3.11"), @r"
+ success: false
+ exit_code: 2
+ ----- stdout -----
+
+ ----- stderr -----
+ error: No interpreter found for Python 3.11 in [PYTHON SOURCES]
+ ");
+}
+
/// See:
///
/// This test will not succeed on macOS if using a Homebrew provided interpreter. The interpreter
diff --git a/crates/uv/tests/it/run.rs b/crates/uv/tests/it/run.rs
index 6a1eb6093..ad8672788 100644
--- a/crates/uv/tests/it/run.rs
+++ b/crates/uv/tests/it/run.rs
@@ -5500,3 +5500,49 @@ fn run_no_sync_incompatible_python() -> Result<()> {
Ok(())
}
+
+#[test]
+fn run_python_preference_no_project() {
+ let context =
+ TestContext::new_with_versions(&["3.12", "3.11"]).with_versions_as_managed(&["3.12"]);
+
+ context.venv().assert().success();
+
+ uv_snapshot!(context.filters(), context.run().arg("python").arg("--version"), @r"
+ success: true
+ exit_code: 0
+ ----- stdout -----
+ Python 3.12.[X]
+
+ ----- stderr -----
+ ");
+
+ uv_snapshot!(context.filters(), context.run().arg("--managed-python").arg("python").arg("--version"), @r"
+ success: true
+ exit_code: 0
+ ----- stdout -----
+ Python 3.12.[X]
+
+ ----- stderr -----
+ ");
+
+ // `VIRTUAL_ENV` is set here, so we'll ignore the flag
+ uv_snapshot!(context.filters(), context.run().arg("--no-managed-python").arg("python").arg("--version"), @r"
+ success: true
+ exit_code: 0
+ ----- stdout -----
+ Python 3.12.[X]
+
+ ----- stderr -----
+ ");
+
+ // If we remove the `VIRTUAL_ENV` variable, we should get the unmanaged Python
+ uv_snapshot!(context.filters(), context.run().arg("--no-managed-python").arg("python").arg("--version").env_remove("VIRTUAL_ENV"), @r"
+ success: true
+ exit_code: 0
+ ----- stdout -----
+ Python 3.11.[X]
+
+ ----- stderr -----
+ ");
+}
diff --git a/crates/uv/tests/it/sync.rs b/crates/uv/tests/it/sync.rs
index 35a06ea57..3544f1961 100644
--- a/crates/uv/tests/it/sync.rs
+++ b/crates/uv/tests/it/sync.rs
@@ -10804,3 +10804,144 @@ fn undeclared_editable() -> Result<()> {
Ok(())
}
+
+#[test]
+fn sync_python_preference() -> Result<()> {
+ let context = TestContext::new_with_versions(&["3.12", "3.11"]);
+
+ let pyproject_toml = context.temp_dir.child("pyproject.toml");
+ pyproject_toml.write_str(
+ r#"
+ [project]
+ name = "project"
+ version = "0.1.0"
+ requires-python = ">=3.11"
+ dependencies = []
+ "#,
+ )?;
+
+ // Run an initial sync, with 3.12 as an "unmanaged" interpreter
+ context.sync().assert().success();
+
+ // Mark 3.12 as a managed interpreter for the rest of the tests
+ let context = context.with_versions_as_managed(&["3.12"]);
+ uv_snapshot!(context.filters(), context.sync(), @r"
+ success: true
+ exit_code: 0
+ ----- stdout -----
+
+ ----- stderr -----
+ Resolved 1 package in [TIME]
+ Audited in [TIME]
+ ");
+
+ // We should invalidate the environment and switch to 3.11
+ uv_snapshot!(context.filters(), context.sync().arg("--no-managed-python"), @r"
+ success: true
+ exit_code: 0
+ ----- stdout -----
+
+ ----- stderr -----
+ Using CPython 3.11.[X] interpreter at: [PYTHON-3.11]
+ Removed virtual environment at: .venv
+ Creating virtual environment at: .venv
+ Resolved 1 package in [TIME]
+ Audited in [TIME]
+ ");
+
+ // We will use the environment if it exists
+ uv_snapshot!(context.filters(), context.sync(), @r"
+ success: true
+ exit_code: 0
+ ----- stdout -----
+
+ ----- stderr -----
+ Resolved 1 package in [TIME]
+ Audited in [TIME]
+ ");
+
+ // Unless the user requests a Python preference that is incompatible
+ uv_snapshot!(context.filters(), context.sync().arg("--managed-python"), @r"
+ success: true
+ exit_code: 0
+ ----- stdout -----
+
+ ----- stderr -----
+ Using CPython 3.12.[X] interpreter at: [PYTHON-3.12]
+ Removed virtual environment at: .venv
+ Creating virtual environment at: .venv
+ Resolved 1 package in [TIME]
+ Audited in [TIME]
+ ");
+
+ // If a interpreter cannot be found, we'll fail
+ uv_snapshot!(context.filters(), context.sync().arg("--managed-python").arg("-p").arg("3.11"), @r"
+ success: false
+ exit_code: 2
+ ----- stdout -----
+
+ ----- stderr -----
+ error: No interpreter found for Python 3.11 in managed installations
+
+ hint: A managed Python download is available for Python 3.11, but Python downloads are set to 'never'
+ ");
+
+ let pyproject_toml = context.temp_dir.child("pyproject.toml");
+ pyproject_toml.write_str(
+ r#"
+ [project]
+ name = "project"
+ version = "0.1.0"
+ requires-python = ">=3.11"
+ dependencies = []
+
+ [tool.uv]
+ python-preference = "only-system"
+ "#,
+ )?;
+
+ // We'll respect a `python-preference` in the `pyproject.toml` file
+ uv_snapshot!(context.filters(), context.sync(), @r"
+ success: true
+ exit_code: 0
+ ----- stdout -----
+
+ ----- stderr -----
+ Using CPython 3.11.[X] interpreter at: [PYTHON-3.11]
+ Removed virtual environment at: .venv
+ Creating virtual environment at: .venv
+ Resolved 1 package in [TIME]
+ Audited in [TIME]
+ ");
+
+ // But it can be overridden via the CLI
+ uv_snapshot!(context.filters(), context.sync().arg("--managed-python"), @r"
+ success: true
+ exit_code: 0
+ ----- stdout -----
+
+ ----- stderr -----
+ Using CPython 3.12.[X] interpreter at: [PYTHON-3.12]
+ Removed virtual environment at: .venv
+ Creating virtual environment at: .venv
+ Resolved 1 package in [TIME]
+ Audited in [TIME]
+ ");
+
+ // `uv run` will invalidate the environment too
+ uv_snapshot!(context.filters(), context.run().arg("python").arg("--version"), @r"
+ success: true
+ exit_code: 0
+ ----- stdout -----
+ Python 3.11.[X]
+
+ ----- stderr -----
+ Using CPython 3.11.[X] interpreter at: [PYTHON-3.11]
+ Removed virtual environment at: .venv
+ Creating virtual environment at: .venv
+ Resolved 1 package in [TIME]
+ Audited in [TIME]
+ ");
+
+ Ok(())
+}
diff --git a/crates/uv/tests/it/venv.rs b/crates/uv/tests/it/venv.rs
index 2430e607d..120d7def2 100644
--- a/crates/uv/tests/it/venv.rs
+++ b/crates/uv/tests/it/venv.rs
@@ -1322,3 +1322,69 @@ fn create_venv_apostrophe() {
let stdout = String::from_utf8_lossy(&output.stdout);
assert_eq!(stdout.trim(), venv_dir.to_string_lossy());
}
+
+#[test]
+fn venv_python_preference() {
+ let context =
+ TestContext::new_with_versions(&["3.12", "3.11"]).with_versions_as_managed(&["3.12"]);
+
+ // Create a managed interpreter environment
+ uv_snapshot!(context.filters(), context.venv(), @r"
+ success: true
+ exit_code: 0
+ ----- stdout -----
+
+ ----- stderr -----
+ Using CPython 3.12.[X] interpreter at: [PYTHON-3.12]
+ Creating virtual environment at: .venv
+ Activate with: source .venv/[BIN]/activate
+ ");
+
+ uv_snapshot!(context.filters(), context.venv().arg("--no-managed-python"), @r"
+ success: true
+ exit_code: 0
+ ----- stdout -----
+
+ ----- stderr -----
+ Using CPython 3.11.[X] interpreter at: [PYTHON-3.11]
+ Creating virtual environment at: .venv
+ warning: A virtual environment already exists at `.venv`. In the future, uv will require `--clear` to replace it
+ Activate with: source .venv/[BIN]/activate
+ ");
+
+ uv_snapshot!(context.filters(), context.venv().arg("--no-managed-python"), @r"
+ success: true
+ exit_code: 0
+ ----- stdout -----
+
+ ----- stderr -----
+ Using CPython 3.11.[X] interpreter at: [PYTHON-3.11]
+ Creating virtual environment at: .venv
+ warning: A virtual environment already exists at `.venv`. In the future, uv will require `--clear` to replace it
+ Activate with: source .venv/[BIN]/activate
+ ");
+
+ uv_snapshot!(context.filters(), context.venv(), @r"
+ success: true
+ exit_code: 0
+ ----- stdout -----
+
+ ----- stderr -----
+ Using CPython 3.12.[X] interpreter at: [PYTHON-3.12]
+ Creating virtual environment at: .venv
+ warning: A virtual environment already exists at `.venv`. In the future, uv will require `--clear` to replace it
+ Activate with: source .venv/[BIN]/activate
+ ");
+
+ uv_snapshot!(context.filters(), context.venv().arg("--managed-python"), @r"
+ success: true
+ exit_code: 0
+ ----- stdout -----
+
+ ----- stderr -----
+ Using CPython 3.12.[X] interpreter at: [PYTHON-3.12]
+ Creating virtual environment at: .venv
+ warning: A virtual environment already exists at `.venv`. In the future, uv will require `--clear` to replace it
+ Activate with: source .venv/[BIN]/activate
+ ");
+}
From ff30f14d50cfdda544397f910808eeec8b20f11b Mon Sep 17 00:00:00 2001
From: John Mumm
Date: Wed, 16 Jul 2025 23:17:01 +0200
Subject: [PATCH 073/130] Build `path` sources without build systems by default
(#14413)
We currently treat path sources as virtual if they do not specify a
build system, which is surprising behavior. This PR updates the behavior
to treat path sources as packages unless the path source is explicitly
marked as `package = false` or its own `tool.uv.package` is set to
`false`.
Closes #12015
---------
Co-authored-by: Zanie Blue
---
.../uv-distribution/src/metadata/lowering.rs | 4 +-
crates/uv-workspace/src/pyproject.rs | 15 ++--
crates/uv/tests/it/edit.rs | 8 +-
crates/uv/tests/it/lock.rs | 33 +++----
crates/uv/tests/it/sync.rs | 85 +++++++++++++++++++
docs/concepts/projects/config.md | 5 +-
docs/concepts/projects/dependencies.md | 70 ++++++++++-----
7 files changed, 172 insertions(+), 48 deletions(-)
diff --git a/crates/uv-distribution/src/metadata/lowering.rs b/crates/uv-distribution/src/metadata/lowering.rs
index 54782c083..c05ac4779 100644
--- a/crates/uv-distribution/src/metadata/lowering.rs
+++ b/crates/uv-distribution/src/metadata/lowering.rs
@@ -729,12 +729,14 @@ fn path_source(
})
} else {
// Determine whether the project is a package or virtual.
+ // If the `package` option is unset, check if `tool.uv.package` is set
+ // on the path source (otherwise, default to `true`).
let is_package = package.unwrap_or_else(|| {
let pyproject_path = install_path.join("pyproject.toml");
fs_err::read_to_string(&pyproject_path)
.ok()
.and_then(|contents| PyProjectToml::from_string(contents).ok())
- .map(|pyproject_toml| pyproject_toml.is_package())
+ .and_then(|pyproject_toml| pyproject_toml.tool_uv_package())
.unwrap_or(true)
});
diff --git a/crates/uv-workspace/src/pyproject.rs b/crates/uv-workspace/src/pyproject.rs
index 124a62881..aa64c601e 100644
--- a/crates/uv-workspace/src/pyproject.rs
+++ b/crates/uv-workspace/src/pyproject.rs
@@ -83,12 +83,7 @@ impl PyProjectToml {
/// non-package ("virtual") project.
pub fn is_package(&self) -> bool {
// If `tool.uv.package` is set, defer to that explicit setting.
- if let Some(is_package) = self
- .tool
- .as_ref()
- .and_then(|tool| tool.uv.as_ref())
- .and_then(|uv| uv.package)
- {
+ if let Some(is_package) = self.tool_uv_package() {
return is_package;
}
@@ -96,6 +91,14 @@ impl PyProjectToml {
self.build_system.is_some()
}
+ /// Returns the value of `tool.uv.package` if set.
+ pub fn tool_uv_package(&self) -> Option {
+ self.tool
+ .as_ref()
+ .and_then(|tool| tool.uv.as_ref())
+ .and_then(|uv| uv.package)
+ }
+
/// Returns `true` if the project uses a dynamic version.
pub fn is_dynamic(&self) -> bool {
self.project
diff --git a/crates/uv/tests/it/edit.rs b/crates/uv/tests/it/edit.rs
index ccc0cabf2..70b8d6e50 100644
--- a/crates/uv/tests/it/edit.rs
+++ b/crates/uv/tests/it/edit.rs
@@ -13381,7 +13381,9 @@ fn add_path_with_no_workspace() -> Result<()> {
----- stderr -----
Resolved 2 packages in [TIME]
- Audited in [TIME]
+ Prepared 1 package in [TIME]
+ Installed 1 package in [TIME]
+ + dep==0.1.0 (from file://[TEMP_DIR]/dep)
");
let pyproject_toml = context.read("pyproject.toml");
@@ -13452,7 +13454,9 @@ fn add_path_outside_workspace_no_default() -> Result<()> {
Using CPython 3.12.[X] interpreter at: [PYTHON-3.12]
Creating virtual environment at: .venv
Resolved 2 packages in [TIME]
- Audited in [TIME]
+ Prepared 1 package in [TIME]
+ Installed 1 package in [TIME]
+ + dep==0.1.0 (from file://[TEMP_DIR]/external_dep)
");
let pyproject_toml = fs_err::read_to_string(workspace_toml)?;
diff --git a/crates/uv/tests/it/lock.rs b/crates/uv/tests/it/lock.rs
index faf37a83a..75d81b4c0 100644
--- a/crates/uv/tests/it/lock.rs
+++ b/crates/uv/tests/it/lock.rs
@@ -7205,12 +7205,12 @@ fn lock_exclusion() -> Result<()> {
]
[package.metadata]
- requires-dist = [{ name = "project", virtual = "../" }]
+ requires-dist = [{ name = "project", directory = "../" }]
[[package]]
name = "project"
version = "0.1.0"
- source = { virtual = "../" }
+ source = { directory = "../" }
"#
);
});
@@ -7793,7 +7793,7 @@ fn lock_dev_transitive() -> Result<()> {
[package.metadata]
requires-dist = [
{ name = "baz", editable = "baz" },
- { name = "foo", virtual = "../foo" },
+ { name = "foo", directory = "../foo" },
{ name = "iniconfig", specifier = ">1" },
]
@@ -7815,7 +7815,7 @@ fn lock_dev_transitive() -> Result<()> {
[[package]]
name = "foo"
version = "0.1.0"
- source = { virtual = "../foo" }
+ source = { directory = "../foo" }
[package.metadata]
@@ -13651,7 +13651,7 @@ fn lock_narrowed_python_version_upper() -> Result<()> {
[[package]]
name = "dependency"
version = "0.1.0"
- source = { virtual = "dependency" }
+ source = { directory = "dependency" }
dependencies = [
{ name = "iniconfig", marker = "python_full_version >= '3.10'" },
]
@@ -13677,7 +13677,7 @@ fn lock_narrowed_python_version_upper() -> Result<()> {
]
[package.metadata]
- requires-dist = [{ name = "dependency", marker = "python_full_version >= '3.10'", virtual = "dependency" }]
+ requires-dist = [{ name = "dependency", marker = "python_full_version >= '3.10'", directory = "dependency" }]
"#
);
});
@@ -17173,10 +17173,10 @@ fn lock_implicit_virtual_project() -> Result<()> {
Ok(())
}
-/// Lock a project that has a path dependency that is implicitly virtual (by way of omitting
-/// `build-system`).
+/// Lock a project that has a path dependency that is implicitly non-virtual (despite
+/// omitting `build-system`).
#[test]
-fn lock_implicit_virtual_path() -> Result<()> {
+fn lock_implicit_package_path() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
@@ -17243,7 +17243,7 @@ fn lock_implicit_virtual_path() -> Result<()> {
[[package]]
name = "child"
version = "0.1.0"
- source = { virtual = "child" }
+ source = { directory = "child" }
dependencies = [
{ name = "iniconfig" },
]
@@ -17281,7 +17281,7 @@ fn lock_implicit_virtual_path() -> Result<()> {
[package.metadata]
requires-dist = [
{ name = "anyio", specifier = ">3" },
- { name = "child", virtual = "child" },
+ { name = "child", directory = "child" },
]
[[package]]
@@ -17317,20 +17317,21 @@ fn lock_implicit_virtual_path() -> Result<()> {
Resolved 6 packages in [TIME]
"###);
- // Install from the lockfile. The virtual project should _not_ be installed.
- uv_snapshot!(context.filters(), context.sync().arg("--frozen"), @r###"
+ // Install from the lockfile. The path dependency should be installed.
+ uv_snapshot!(context.filters(), context.sync().arg("--frozen"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
- Prepared 4 packages in [TIME]
- Installed 4 packages in [TIME]
+ Prepared 5 packages in [TIME]
+ Installed 5 packages in [TIME]
+ anyio==4.3.0
+ + child==0.1.0 (from file://[TEMP_DIR]/child)
+ idna==3.6
+ iniconfig==2.0.0
+ sniffio==1.3.1
- "###);
+ ");
Ok(())
}
diff --git a/crates/uv/tests/it/sync.rs b/crates/uv/tests/it/sync.rs
index 3544f1961..bb3546e22 100644
--- a/crates/uv/tests/it/sync.rs
+++ b/crates/uv/tests/it/sync.rs
@@ -5939,6 +5939,91 @@ fn sync_override_package() -> Result<()> {
~ project==0.0.0 (from file://[TEMP_DIR]/)
");
+ // Update the source `tool.uv` to `package = true`
+ let pyproject_toml = context.temp_dir.child("core").child("pyproject.toml");
+ pyproject_toml.write_str(
+ r#"
+ [project]
+ name = "core"
+ version = "0.1.0"
+ requires-python = ">=3.12"
+
+ [build-system]
+ requires = ["hatchling"]
+ build-backend = "hatchling.build"
+
+ [tool.uv]
+ package = true
+ "#,
+ )?;
+
+ // Mark the source as `package = false`.
+ let pyproject_toml = context.temp_dir.child("pyproject.toml");
+ pyproject_toml.write_str(
+ r#"
+ [project]
+ name = "project"
+ version = "0.0.0"
+ requires-python = ">=3.12"
+ dependencies = ["core"]
+
+ [build-system]
+ requires = ["hatchling"]
+ build-backend = "hatchling.build"
+
+ [tool.uv.sources]
+ core = { path = "./core", package = false }
+ "#,
+ )?;
+
+ // Syncing the project should _not_ install `core`.
+ uv_snapshot!(context.filters(), context.sync(), @r"
+ success: true
+ exit_code: 0
+ ----- stdout -----
+
+ ----- stderr -----
+ Resolved 2 packages in [TIME]
+ Prepared 1 package in [TIME]
+ Uninstalled 1 package in [TIME]
+ Installed 1 package in [TIME]
+ ~ project==0.0.0 (from file://[TEMP_DIR]/)
+ ");
+
+ // Remove the `package = false` mark.
+ let pyproject_toml = context.temp_dir.child("pyproject.toml");
+ pyproject_toml.write_str(
+ r#"
+ [project]
+ name = "project"
+ version = "0.0.0"
+ requires-python = ">=3.12"
+ dependencies = ["core"]
+
+ [build-system]
+ requires = ["hatchling"]
+ build-backend = "hatchling.build"
+
+ [tool.uv.sources]
+ core = { path = "./core" }
+ "#,
+ )?;
+
+ // Syncing the project _should_ install `core`.
+ uv_snapshot!(context.filters(), context.sync(), @r"
+ success: true
+ exit_code: 0
+ ----- stdout -----
+
+ ----- stderr -----
+ Resolved 2 packages in [TIME]
+ Prepared 2 packages in [TIME]
+ Uninstalled 1 package in [TIME]
+ Installed 2 packages in [TIME]
+ + core==0.1.0 (from file://[TEMP_DIR]/core)
+ ~ project==0.0.0 (from file://[TEMP_DIR]/)
+ ");
+
Ok(())
}
diff --git a/docs/concepts/projects/config.md b/docs/concepts/projects/config.md
index 8efb667a1..34b62c01a 100644
--- a/docs/concepts/projects/config.md
+++ b/docs/concepts/projects/config.md
@@ -116,8 +116,9 @@ with the default build system.
the presence of a `[build-system]` table is not required in other packages. For legacy reasons,
if a build system is not defined, then `setuptools.build_meta:__legacy__` is used to build the
package. Packages you depend on may not explicitly declare their build system but are still
- installable. Similarly, if you add a dependency on a local package or install it with `uv pip`,
- uv will always attempt to build and install it.
+ installable. Similarly, if you [add a dependency on a local project](./dependencies.md#path)
+ or install it with `uv pip`, uv will attempt to build and install it regardless of the presence
+ of a `[build-system]` table.
### Build system options
diff --git a/docs/concepts/projects/dependencies.md b/docs/concepts/projects/dependencies.md
index 022db4d7e..bf11e7174 100644
--- a/docs/concepts/projects/dependencies.md
+++ b/docs/concepts/projects/dependencies.md
@@ -410,33 +410,28 @@ $ uv add ~/projects/bar/
!!! important
- An [editable installation](#editable-dependencies) is not used for path dependencies by
- default. An editable installation may be requested for project directories:
+ When using a directory as a path dependency, uv will attempt to build and install the target as
+ a package by default. See the [virtual dependency](#virtual-dependencies) documentation for
+ details.
- ```console
- $ uv add --editable ../projects/bar/
- ```
+An [editable installation](#editable-dependencies) is not used for path dependencies by default. An
+editable installation may be requested for project directories:
- Which will result in a `pyproject.toml` with:
+```console
+$ uv add --editable ../projects/bar/
+```
- ```toml title="pyproject.toml"
- [project]
- dependencies = ["bar"]
+Which will result in a `pyproject.toml` with:
- [tool.uv.sources]
- bar = { path = "../projects/bar", editable = true }
- ```
+```toml title="pyproject.toml"
+[project]
+dependencies = ["bar"]
- Similarly, if a project is marked as a [non-package](./config.md#build-systems), but you'd
- like to install it in the environment as a package, set `package = true` on the source:
+[tool.uv.sources]
+bar = { path = "../projects/bar", editable = true }
+```
- ```toml title="pyproject.toml"
- [project]
- dependencies = ["bar"]
-
- [tool.uv.sources]
- bar = { path = "../projects/bar", package = true }
- ```
+!!! tip
For multiple packages in the same repository, [_workspaces_](./workspaces.md) may be a better
fit.
@@ -808,6 +803,39 @@ Or, to opt-out of using an editable dependency in a workspace:
$ uv add --no-editable ./path/foo
```
+## Virtual dependencies
+
+uv allows dependencies to be "virtual", in which the dependency itself is not installed as a
+[package](./config.md#project-packaging), but its dependencies are.
+
+By default, only workspace members without build systems declared are virtual.
+
+A dependency with a [`path` source](#path) is not virtual unless it explicitly sets
+[`tool.uv.package = false`](../../reference/settings.md#package). Unlike working _in_ the dependent
+project with uv, the package will be built even if a [build system](./config.md#build-systems) is
+not declared.
+
+To treat a dependency as virtual, set `package = false` on the source:
+
+```toml title="pyproject.toml"
+[project]
+dependencies = ["bar"]
+
+[tool.uv.sources]
+bar = { path = "../projects/bar", package = false }
+```
+
+Similarly, if a dependency sets `tool.uv.package = false`, it can be overridden by declaring
+`package = true` on the source:
+
+```toml title="pyproject.toml"
+[project]
+dependencies = ["bar"]
+
+[tool.uv.sources]
+bar = { path = "../projects/bar", package = true }
+```
+
## Dependency specifiers
uv uses standard
From 0077f2357f4e016c871b2b651ca59a139a95f19a Mon Sep 17 00:00:00 2001
From: Zanie Blue
Date: Thu, 17 Jul 2025 11:09:13 -0500
Subject: [PATCH 074/130] Stabilize addition of Python executables to the bin
(#14626)
Closes https://github.com/astral-sh/uv/issues/14296
As mentioned in #14681, this does not stabilize the `--default`
behavior.
---
crates/uv-cli/src/lib.rs | 7 +-
crates/uv/src/commands/python/install.rs | 62 ++-
crates/uv/tests/it/common/mod.rs | 25 +-
crates/uv/tests/it/help.rs | 7 +-
crates/uv/tests/it/python_install.rs | 555 +++++++++++++++++++++--
docs/concepts/python-versions.md | 25 +-
docs/guides/install-python.md | 23 +-
docs/reference/cli.md | 2 +-
8 files changed, 593 insertions(+), 113 deletions(-)
diff --git a/crates/uv-cli/src/lib.rs b/crates/uv-cli/src/lib.rs
index 5df818654..9d7cfa6e0 100644
--- a/crates/uv-cli/src/lib.rs
+++ b/crates/uv-cli/src/lib.rs
@@ -4810,10 +4810,9 @@ pub enum PythonCommand {
/// Python versions are installed into the uv Python directory, which can be retrieved with `uv
/// python dir`.
///
- /// A `python` executable is not made globally available, managed Python versions are only used
- /// in uv commands or in active virtual environments. There is experimental support for adding
- /// Python executables to a directory on the path — use the `--preview` flag to enable this
- /// behavior and `uv python dir --bin` to retrieve the target directory.
+ /// By default, Python executables are added to a directory on the path with a minor version
+ /// suffix, e.g., `python3.13`. To install `python3` and `python`, use the `--default` flag. Use
+ /// `uv python dir --bin` to see the target directory.
///
/// Multiple Python versions may be requested.
///
diff --git a/crates/uv/src/commands/python/install.rs b/crates/uv/src/commands/python/install.rs
index b9d4660df..37d6a6777 100644
--- a/crates/uv/src/commands/python/install.rs
+++ b/crates/uv/src/commands/python/install.rs
@@ -166,12 +166,14 @@ pub(crate) async fn install(
) -> Result {
let start = std::time::Instant::now();
+ // TODO(zanieb): We should consider marking the Python installation as the default when
+ // `--default` is used. It's not clear how this overlaps with a global Python pin, but I'd be
+ // surprised if `uv python find` returned the "newest" Python version rather than the one I just
+ // installed with the `--default` flag.
if default && !preview.is_enabled() {
- writeln!(
- printer.stderr(),
- "The `--default` flag is only available in preview mode; add the `--preview` flag to use `--default`"
- )?;
- return Ok(ExitStatus::Failure);
+ warn_user!(
+ "The `--default` option is experimental and may change without warning. Pass `--preview` to disable this warning"
+ );
}
if upgrade && preview.is_disabled() {
@@ -222,6 +224,8 @@ pub(crate) async fn install(
.map(PythonVersionFile::into_versions)
.unwrap_or_else(|| {
// If no version file is found and no requests were made
+ // TODO(zanieb): We should consider differentiating between a global Python version
+ // file here, allowing a request from there to enable `is_default_install`.
is_default_install = true;
vec![if reinstall {
// On bare `--reinstall`, reinstall all Python versions
@@ -451,10 +455,10 @@ pub(crate) async fn install(
}
}
- let bin_dir = if matches!(bin, Some(true)) || preview.is_enabled() {
- Some(python_executable_dir()?)
- } else {
+ let bin_dir = if matches!(bin, Some(false)) {
None
+ } else {
+ Some(python_executable_dir()?)
};
let installations: Vec<_> = downloaded.iter().chain(satisfied.iter().copied()).collect();
@@ -469,20 +473,10 @@ pub(crate) async fn install(
e.warn_user(installation);
}
- if preview.is_disabled() {
- debug!("Skipping installation of Python executables, use `--preview` to enable.");
- continue;
- }
-
- let bin_dir = bin_dir
- .as_ref()
- .expect("We should have a bin directory with preview enabled")
- .as_path();
-
let upgradeable = (default || is_default_install)
|| requested_minor_versions.contains(&installation.key().version().python_version());
- if !matches!(bin, Some(false)) {
+ if let Some(bin_dir) = bin_dir.as_ref() {
create_bin_links(
installation,
bin_dir,
@@ -661,11 +655,7 @@ pub(crate) async fn install(
}
}
- if preview.is_enabled() && !matches!(bin, Some(false)) {
- let bin_dir = bin_dir
- .as_ref()
- .expect("We should have a bin directory with preview enabled")
- .as_path();
+ if let Some(bin_dir) = bin_dir.as_ref() {
warn_if_not_on_path(bin_dir);
}
}
@@ -749,16 +739,20 @@ fn create_bin_links(
errors: &mut Vec<(InstallErrorKind, PythonInstallationKey, Error)>,
preview: PreviewMode,
) {
- let targets =
- if (default || is_default_install) && first_request.matches_installation(installation) {
- vec![
- installation.key().executable_name_minor(),
- installation.key().executable_name_major(),
- installation.key().executable_name(),
- ]
- } else {
- vec![installation.key().executable_name_minor()]
- };
+ // TODO(zanieb): We want more feedback on the `is_default_install` behavior before stabilizing
+ // it. In particular, it may be confusing because it does not apply when versions are loaded
+ // from a `.python-version` file.
+ let targets = if (default || (is_default_install && preview.is_enabled()))
+ && first_request.matches_installation(installation)
+ {
+ vec![
+ installation.key().executable_name_minor(),
+ installation.key().executable_name_major(),
+ installation.key().executable_name(),
+ ]
+ } else {
+ vec![installation.key().executable_name_minor()]
+ };
for target in targets {
let target = bin.join(target);
diff --git a/crates/uv/tests/it/common/mod.rs b/crates/uv/tests/it/common/mod.rs
index 08eeec3aa..ab4c38247 100644
--- a/crates/uv/tests/it/common/mod.rs
+++ b/crates/uv/tests/it/common/mod.rs
@@ -220,17 +220,30 @@ impl TestContext {
/// and `.exe` suffixes.
#[must_use]
pub fn with_filtered_python_names(mut self) -> Self {
+ use env::consts::EXE_SUFFIX;
+ let exe_suffix = regex::escape(EXE_SUFFIX);
+
+ self.filters.push((
+ format!(r"python\d.\d\d{exe_suffix}"),
+ "[PYTHON]".to_string(),
+ ));
+ self.filters
+ .push((format!(r"python\d{exe_suffix}"), "[PYTHON]".to_string()));
+
if cfg!(windows) {
+ // On Windows, we want to filter out all `python.exe` instances
self.filters
- .push((r"python\.exe".to_string(), "[PYTHON]".to_string()));
+ .push((format!(r"python{exe_suffix}"), "[PYTHON]".to_string()));
+ // Including ones where we'd already stripped the `.exe` in another filter
+ self.filters
+ .push((r"[\\/]python".to_string(), "/[PYTHON]".to_string()));
} else {
+ // On Unix, it's a little trickier — we don't want to clobber use of `python` in the
+ // middle of something else, e.g., `cpython`. For this reason, we require a leading `/`.
self.filters
- .push((r"python\d.\d\d".to_string(), "[PYTHON]".to_string()));
- self.filters
- .push((r"python\d".to_string(), "[PYTHON]".to_string()));
- self.filters
- .push((r"/python".to_string(), "/[PYTHON]".to_string()));
+ .push((format!(r"/python{exe_suffix}"), "/[PYTHON]".to_string()));
}
+
self
}
diff --git a/crates/uv/tests/it/help.rs b/crates/uv/tests/it/help.rs
index d9353f7c3..d4f46b0cb 100644
--- a/crates/uv/tests/it/help.rs
+++ b/crates/uv/tests/it/help.rs
@@ -469,10 +469,9 @@ fn help_subsubcommand() {
Python versions are installed into the uv Python directory, which can be retrieved with `uv python
dir`.
- A `python` executable is not made globally available, managed Python versions are only used in uv
- commands or in active virtual environments. There is experimental support for adding Python
- executables to a directory on the path — use the `--preview` flag to enable this behavior and `uv
- python dir --bin` to retrieve the target directory.
+ By default, Python executables are added to a directory on the path with a minor version suffix,
+ e.g., `python3.13`. To install `python3` and `python`, use the `--default` flag. Use `uv python dir
+ --bin` to see the target directory.
Multiple Python versions may be requested.
diff --git a/crates/uv/tests/it/python_install.rs b/crates/uv/tests/it/python_install.rs
index 50b0b3cf5..51e394aad 100644
--- a/crates/uv/tests/it/python_install.rs
+++ b/crates/uv/tests/it/python_install.rs
@@ -30,15 +30,49 @@ fn python_install() {
----- stderr -----
Installed Python 3.13.5 in [TIME]
- + cpython-3.13.5-[PLATFORM]
+ + cpython-3.13.5-[PLATFORM] (python3.13)
");
let bin_python = context
.bin_dir
.child(format!("python3.13{}", std::env::consts::EXE_SUFFIX));
- // The executable should not be installed in the bin directory (requires preview)
- bin_python.assert(predicate::path::missing());
+ // The executable should be installed in the bin directory
+ bin_python.assert(predicate::path::exists());
+
+ // On Unix, it should be a link
+ #[cfg(unix)]
+ bin_python.assert(predicate::path::is_symlink());
+
+ // The link should be a path to the binary
+ if cfg!(unix) {
+ insta::with_settings!({
+ filters => context.filters(),
+ }, {
+ insta::assert_snapshot!(
+ read_link(&bin_python), @"[TEMP_DIR]/managed/cpython-3.13.5-[PLATFORM]/bin/python3.13"
+ );
+ });
+ } else if cfg!(windows) {
+ insta::with_settings!({
+ filters => context.filters(),
+ }, {
+ insta::assert_snapshot!(
+ read_link(&bin_python), @"[TEMP_DIR]/managed/cpython-3.13.5-[PLATFORM]/python"
+ );
+ });
+ }
+
+ // The executable should "work"
+ uv_snapshot!(context.filters(), Command::new(bin_python.as_os_str())
+ .arg("-c").arg("import subprocess; print('hello world')"), @r###"
+ success: true
+ exit_code: 0
+ ----- stdout -----
+ hello world
+
+ ----- stderr -----
+ "###);
// Should be a no-op when already installed
uv_snapshot!(context.filters(), context.python_install(), @r###"
@@ -67,9 +101,12 @@ fn python_install() {
----- stderr -----
Installed Python 3.13.5 in [TIME]
- ~ cpython-3.13.5-[PLATFORM]
+ ~ cpython-3.13.5-[PLATFORM] (python3.13)
");
+ // The executable should still be present in the bin directory
+ bin_python.assert(predicate::path::exists());
+
// Uninstallation requires an argument
uv_snapshot!(context.filters(), context.python_uninstall(), @r###"
success: false
@@ -93,8 +130,11 @@ fn python_install() {
----- stderr -----
Searching for Python versions matching: Python 3.13
Uninstalled Python 3.13.5 in [TIME]
- - cpython-3.13.5-[PLATFORM]
+ - cpython-3.13.5-[PLATFORM] (python3.13)
");
+
+ // The executable should be removed
+ bin_python.assert(predicate::path::missing());
}
#[test]
@@ -112,8 +152,8 @@ fn python_reinstall() {
----- stderr -----
Installed 2 versions in [TIME]
- + cpython-3.12.11-[PLATFORM]
- + cpython-3.13.5-[PLATFORM]
+ + cpython-3.12.11-[PLATFORM] (python3.12)
+ + cpython-3.13.5-[PLATFORM] (python3.13)
");
// Reinstall a single version
@@ -124,7 +164,7 @@ fn python_reinstall() {
----- stderr -----
Installed Python 3.13.5 in [TIME]
- ~ cpython-3.13.5-[PLATFORM]
+ ~ cpython-3.13.5-[PLATFORM] (python3.13)
");
// Reinstall multiple versions
@@ -135,8 +175,8 @@ fn python_reinstall() {
----- stderr -----
Installed 2 versions in [TIME]
- ~ cpython-3.12.11-[PLATFORM]
- ~ cpython-3.13.5-[PLATFORM]
+ ~ cpython-3.12.11-[PLATFORM] (python3.12)
+ ~ cpython-3.13.5-[PLATFORM] (python3.13)
");
// Reinstalling a version that is not installed should also work
@@ -147,7 +187,7 @@ fn python_reinstall() {
----- stderr -----
Installed Python 3.11.13 in [TIME]
- + cpython-3.11.13-[PLATFORM]
+ + cpython-3.11.13-[PLATFORM] (python3.11)
");
}
@@ -167,7 +207,7 @@ fn python_reinstall_patch() {
----- stderr -----
Installed 2 versions in [TIME]
+ cpython-3.12.6-[PLATFORM]
- + cpython-3.12.7-[PLATFORM]
+ + cpython-3.12.7-[PLATFORM] (python3.12)
");
// Reinstall all "3.12" versions
@@ -180,7 +220,7 @@ fn python_reinstall_patch() {
----- stderr -----
Installed Python 3.12.11 in [TIME]
- + cpython-3.12.11-[PLATFORM]
+ + cpython-3.12.11-[PLATFORM] (python3.12)
");
}
@@ -328,6 +368,208 @@ fn regression_cpython() {
"###);
}
+#[test]
+fn python_install_force() {
+ let context: TestContext = TestContext::new_with_versions(&[])
+ .with_filtered_python_keys()
+ .with_filtered_exe_suffix()
+ .with_managed_python_dirs();
+
+ // Install the latest version
+ uv_snapshot!(context.filters(), context.python_install(), @r"
+ success: true
+ exit_code: 0
+ ----- stdout -----
+
+ ----- stderr -----
+ Installed Python 3.13.5 in [TIME]
+ + cpython-3.13.5-[PLATFORM] (python3.13)
+ ");
+
+ let bin_python = context
+ .bin_dir
+ .child(format!("python3.13{}", std::env::consts::EXE_SUFFIX));
+
+ // You can force replacement of the executables
+ uv_snapshot!(context.filters(), context.python_install().arg("--force"), @r"
+ success: true
+ exit_code: 0
+ ----- stdout -----
+
+ ----- stderr -----
+ Installed Python 3.13.5 in [TIME]
+ + cpython-3.13.5-[PLATFORM] (python3.13)
+ ");
+
+ // The executable should still be present in the bin directory
+ bin_python.assert(predicate::path::exists());
+
+ // If an unmanaged executable is present, `--force` is required
+ fs_err::remove_file(bin_python.path()).unwrap();
+ bin_python.touch().unwrap();
+
+ uv_snapshot!(context.filters(), context.python_install().arg("3.13"), @r"
+ success: true
+ exit_code: 0
+ ----- stdout -----
+
+ ----- stderr -----
+ warning: Failed to install executable for cpython-3.13.5-[PLATFORM]
+ Caused by: Executable already exists at `[BIN]/python3.13` but is not managed by uv; use `--force` to replace it
+ ");
+
+ uv_snapshot!(context.filters(), context.python_install().arg("--force").arg("3.13"), @r"
+ success: true
+ exit_code: 0
+ ----- stdout -----
+
+ ----- stderr -----
+ Installed Python 3.13.5 in [TIME]
+ + cpython-3.13.5-[PLATFORM] (python3.13)
+ ");
+
+ bin_python.assert(predicate::path::exists());
+}
+
+#[test]
+fn python_install_minor() {
+ let context: TestContext = TestContext::new_with_versions(&[])
+ .with_filtered_python_keys()
+ .with_filtered_exe_suffix()
+ .with_managed_python_dirs();
+
+ // Install a minor version
+ uv_snapshot!(context.filters(), context.python_install().arg("3.11"), @r"
+ success: true
+ exit_code: 0
+ ----- stdout -----
+
+ ----- stderr -----
+ Installed Python 3.11.13 in [TIME]
+ + cpython-3.11.13-[PLATFORM] (python3.11)
+ ");
+
+ let bin_python = context
+ .bin_dir
+ .child(format!("python3.11{}", std::env::consts::EXE_SUFFIX));
+
+ // The executable should be installed in the bin directory
+ bin_python.assert(predicate::path::exists());
+
+ // It should be a link to the minor version
+ if cfg!(unix) {
+ insta::with_settings!({
+ filters => context.filters(),
+ }, {
+ insta::assert_snapshot!(
+ read_link(&bin_python), @"[TEMP_DIR]/managed/cpython-3.11.13-[PLATFORM]/bin/python3.11"
+ );
+ });
+ } else if cfg!(windows) {
+ insta::with_settings!({
+ filters => context.filters(),
+ }, {
+ insta::assert_snapshot!(
+ read_link(&bin_python), @"[TEMP_DIR]/managed/cpython-3.11.13-[PLATFORM]/python"
+ );
+ });
+ }
+
+ uv_snapshot!(context.filters(), context.python_uninstall().arg("3.11"), @r"
+ success: true
+ exit_code: 0
+ ----- stdout -----
+
+ ----- stderr -----
+ Searching for Python versions matching: Python 3.11
+ Uninstalled Python 3.11.13 in [TIME]
+ - cpython-3.11.13-[PLATFORM] (python3.11)
+ ");
+
+ // The executable should be removed
+ bin_python.assert(predicate::path::missing());
+}
+
+#[test]
+fn python_install_multiple_patch() {
+ let context: TestContext = TestContext::new_with_versions(&[])
+ .with_filtered_python_keys()
+ .with_filtered_exe_suffix()
+ .with_managed_python_dirs();
+
+ // Install multiple patch versions
+ uv_snapshot!(context.filters(), context.python_install().arg("3.12.8").arg("3.12.6"), @r"
+ success: true
+ exit_code: 0
+ ----- stdout -----
+
+ ----- stderr -----
+ Installed 2 versions in [TIME]
+ + cpython-3.12.6-[PLATFORM]
+ + cpython-3.12.8-[PLATFORM] (python3.12)
+ ");
+
+ let bin_python = context
+ .bin_dir
+ .child(format!("python3.12{}", std::env::consts::EXE_SUFFIX));
+
+ // The executable should be installed in the bin directory
+ bin_python.assert(predicate::path::exists());
+
+ // The link should resolve to the newer patch version
+ if cfg!(unix) {
+ insta::with_settings!({
+ filters => context.filters(),
+ }, {
+ insta::assert_snapshot!(
+ canonicalize_link_path(&bin_python), @"[TEMP_DIR]/managed/cpython-3.12.8-[PLATFORM]/bin/python3.12"
+ );
+ });
+ } else if cfg!(windows) {
+ insta::with_settings!({
+ filters => context.filters(),
+ }, {
+ insta::assert_snapshot!(
+ canonicalize_link_path(&bin_python), @"[TEMP_DIR]/managed/cpython-3.12.8-[PLATFORM]/python"
+ );
+ });
+ }
+
+ uv_snapshot!(context.filters(), context.python_uninstall().arg("3.12.8"), @r"
+ success: true
+ exit_code: 0
+ ----- stdout -----
+
+ ----- stderr -----
+ Searching for Python versions matching: Python 3.12.8
+ Uninstalled Python 3.12.8 in [TIME]
+ - cpython-3.12.8-[PLATFORM] (python3.12)
+ ");
+
+ // TODO(zanieb): This behavior is not implemented yet
+ // // The executable should be installed in the bin directory
+ // bin_python.assert(predicate::path::exists());
+
+ // // When the version is removed, the link should point to the other patch version
+ // if cfg!(unix) {
+ // insta::with_settings!({
+ // filters => context.filters(),
+ // }, {
+ // insta::assert_snapshot!(
+ // canonicalize_link_path(&bin_python), @"[TEMP_DIR]/managed/cpython-3.12.6-[PLATFORM]/bin/python3.12"
+ // );
+ // });
+ // } else if cfg!(windows) {
+ // insta::with_settings!({
+ // filters => context.filters(),
+ // }, {
+ // insta::assert_snapshot!(
+ // canonicalize_link_path(&bin_python), @"[TEMP_DIR]/managed/cpython-3.12.6-[PLATFORM]/python"
+ // );
+ // });
+ // }
+}
+
#[test]
fn python_install_preview() {
let context: TestContext = TestContext::new_with_versions(&[])
@@ -853,7 +1095,7 @@ fn python_install_freethreaded() {
----- stderr -----
Installed Python 3.13.5 in [TIME]
- + cpython-3.13.5-[PLATFORM]
+ + cpython-3.13.5-[PLATFORM] (python3.13)
");
// Should not work with older Python versions
@@ -875,7 +1117,7 @@ fn python_install_freethreaded() {
Searching for Python installations
Uninstalled 2 versions in [TIME]
- cpython-3.13.5+freethreaded-[PLATFORM] (python3.13t)
- - cpython-3.13.5-[PLATFORM]
+ - cpython-3.13.5-[PLATFORM] (python3.13)
");
}
@@ -936,15 +1178,243 @@ fn python_install_default() {
.bin_dir
.child(format!("python{}", std::env::consts::EXE_SUFFIX));
- // `--preview` is required for `--default`
- uv_snapshot!(context.filters(), context.python_install().arg("--default"), @r###"
- success: false
- exit_code: 1
+ // Install a specific version
+ uv_snapshot!(context.filters(), context.python_install().arg("3.13"), @r"
+ success: true
+ exit_code: 0
----- stdout -----
----- stderr -----
- The `--default` flag is only available in preview mode; add the `--preview` flag to use `--default`
- "###);
+ Installed Python 3.13.5 in [TIME]
+ + cpython-3.13.5-[PLATFORM] (python3.13)
+ ");
+
+ // Only the minor versioned executable should be installed
+ bin_python_minor_13.assert(predicate::path::exists());
+ bin_python_major.assert(predicate::path::missing());
+ bin_python_default.assert(predicate::path::missing());
+
+ // Install again, with `--default`
+ uv_snapshot!(context.filters(), context.python_install().arg("--default").arg("3.13"), @r"
+ success: true
+ exit_code: 0
+ ----- stdout -----
+
+ ----- stderr -----
+ warning: The `--default` option is experimental and may change without warning. Pass `--preview` to disable this warning
+ Installed Python 3.13.5 in [TIME]
+ + cpython-3.13.5-[PLATFORM] (python, python3)
+ ");
+
+ // Now all the executables should be installed
+ bin_python_minor_13.assert(predicate::path::exists());
+ bin_python_major.assert(predicate::path::exists());
+ bin_python_default.assert(predicate::path::exists());
+
+ // Uninstall
+ uv_snapshot!(context.filters(), context.python_uninstall().arg("--all"), @r"
+ success: true
+ exit_code: 0
+ ----- stdout -----
+
+ ----- stderr -----
+ Searching for Python installations
+ Uninstalled Python 3.13.5 in [TIME]
+ - cpython-3.13.5-[PLATFORM] (python, python3, python3.13)
+ ");
+
+ // The executables should be removed
+ bin_python_minor_13.assert(predicate::path::missing());
+ bin_python_major.assert(predicate::path::missing());
+ bin_python_default.assert(predicate::path::missing());
+
+ // Install the latest version, i.e., a "default install"
+ uv_snapshot!(context.filters(), context.python_install().arg("--default"), @r"
+ success: true
+ exit_code: 0
+ ----- stdout -----
+
+ ----- stderr -----
+ warning: The `--default` option is experimental and may change without warning. Pass `--preview` to disable this warning
+ Installed Python 3.13.5 in [TIME]
+ + cpython-3.13.5-[PLATFORM] (python, python3, python3.13)
+ ");
+
+ // Since it's a default install, we should include all of the executables
+ bin_python_minor_13.assert(predicate::path::exists());
+ bin_python_major.assert(predicate::path::exists());
+ bin_python_default.assert(predicate::path::exists());
+
+ // And 3.13 should be the default
+ if cfg!(unix) {
+ insta::with_settings!({
+ filters => context.filters(),
+ }, {
+ insta::assert_snapshot!(
+ read_link(&bin_python_major), @"[TEMP_DIR]/managed/cpython-3.13.5-[PLATFORM]/bin/python3.13"
+ );
+ });
+
+ insta::with_settings!({
+ filters => context.filters(),
+ }, {
+ insta::assert_snapshot!(
+ read_link(&bin_python_minor_13), @"[TEMP_DIR]/managed/cpython-3.13.5-[PLATFORM]/bin/python3.13"
+ );
+ });
+
+ insta::with_settings!({
+ filters => context.filters(),
+ }, {
+ insta::assert_snapshot!(
+ read_link(&bin_python_default), @"[TEMP_DIR]/managed/cpython-3.13.5-[PLATFORM]/bin/python3.13"
+ );
+ });
+ } else if cfg!(windows) {
+ insta::with_settings!({
+ filters => context.filters(),
+ }, {
+ insta::assert_snapshot!(
+ read_link(&bin_python_major), @"[TEMP_DIR]/managed/cpython-3.13.5-[PLATFORM]/python"
+ );
+ });
+
+ insta::with_settings!({
+ filters => context.filters(),
+ }, {
+ insta::assert_snapshot!(
+ read_link(&bin_python_minor_13), @"[TEMP_DIR]/managed/cpython-3.13.5-[PLATFORM]/python"
+ );
+ });
+
+ insta::with_settings!({
+ filters => context.filters(),
+ }, {
+ insta::assert_snapshot!(
+ read_link(&bin_python_default), @"[TEMP_DIR]/managed/cpython-3.13.5-[PLATFORM]/python"
+ );
+ });
+ }
+
+ // Uninstall again
+ uv_snapshot!(context.filters(), context.python_uninstall().arg("3.13"), @r"
+ success: true
+ exit_code: 0
+ ----- stdout -----
+
+ ----- stderr -----
+ Searching for Python versions matching: Python 3.13
+ Uninstalled Python 3.13.5 in [TIME]
+ - cpython-3.13.5-[PLATFORM] (python, python3, python3.13)
+ ");
+
+ // We should remove all the executables
+ bin_python_minor_13.assert(predicate::path::missing());
+ bin_python_major.assert(predicate::path::missing());
+ bin_python_default.assert(predicate::path::missing());
+
+ // Install multiple versions, with the `--default` flag
+ uv_snapshot!(context.filters(), context.python_install().arg("3.12").arg("3.13").arg("--default"), @r"
+ success: false
+ exit_code: 2
+ ----- stdout -----
+
+ ----- stderr -----
+ warning: The `--default` option is experimental and may change without warning. Pass `--preview` to disable this warning
+ error: The `--default` flag cannot be used with multiple targets
+ ");
+
+ // Install 3.12 as a new default
+ uv_snapshot!(context.filters(), context.python_install().arg("3.12").arg("--default"), @r"
+ success: true
+ exit_code: 0
+ ----- stdout -----
+
+ ----- stderr -----
+ warning: The `--default` option is experimental and may change without warning. Pass `--preview` to disable this warning
+ Installed Python 3.12.11 in [TIME]
+ + cpython-3.12.11-[PLATFORM] (python, python3, python3.12)
+ ");
+
+ let bin_python_minor_12 = context
+ .bin_dir
+ .child(format!("python3.12{}", std::env::consts::EXE_SUFFIX));
+
+ // All the executables should exist
+ bin_python_minor_12.assert(predicate::path::exists());
+ bin_python_major.assert(predicate::path::exists());
+ bin_python_default.assert(predicate::path::exists());
+
+ // And 3.12 should be the default
+ if cfg!(unix) {
+ insta::with_settings!({
+ filters => context.filters(),
+ }, {
+ insta::assert_snapshot!(
+ read_link(&bin_python_major), @"[TEMP_DIR]/managed/cpython-3.12.11-[PLATFORM]/bin/python3.12"
+ );
+ });
+
+ insta::with_settings!({
+ filters => context.filters(),
+ }, {
+ insta::assert_snapshot!(
+ read_link(&bin_python_minor_12), @"[TEMP_DIR]/managed/cpython-3.12.11-[PLATFORM]/bin/python3.12"
+ );
+ });
+
+ insta::with_settings!({
+ filters => context.filters(),
+ }, {
+ insta::assert_snapshot!(
+ read_link(&bin_python_default), @"[TEMP_DIR]/managed/cpython-3.12.11-[PLATFORM]/bin/python3.12"
+ );
+ });
+ } else {
+ insta::with_settings!({
+ filters => context.filters(),
+ }, {
+ insta::assert_snapshot!(
+ read_link(&bin_python_major), @"[TEMP_DIR]/managed/cpython-3.12.11-[PLATFORM]/python"
+ );
+ });
+
+ insta::with_settings!({
+ filters => context.filters(),
+ }, {
+ insta::assert_snapshot!(
+ read_link(&bin_python_minor_12), @"[TEMP_DIR]/managed/cpython-3.12.11-[PLATFORM]/python"
+ );
+ });
+
+ insta::with_settings!({
+ filters => context.filters(),
+ }, {
+ insta::assert_snapshot!(
+ read_link(&bin_python_default), @"[TEMP_DIR]/managed/cpython-3.12.11-[PLATFORM]/python"
+ );
+ });
+ }
+}
+
+#[test]
+fn python_install_default_preview() {
+ let context: TestContext = TestContext::new_with_versions(&[])
+ .with_filtered_python_keys()
+ .with_filtered_exe_suffix()
+ .with_managed_python_dirs();
+
+ let bin_python_minor_13 = context
+ .bin_dir
+ .child(format!("python3.13{}", std::env::consts::EXE_SUFFIX));
+
+ let bin_python_major = context
+ .bin_dir
+ .child(format!("python3{}", std::env::consts::EXE_SUFFIX));
+
+ let bin_python_default = context
+ .bin_dir
+ .child(format!("python{}", std::env::consts::EXE_SUFFIX));
// Install a specific version
uv_snapshot!(context.filters(), context.python_install().arg("--preview").arg("3.13"), @r"
@@ -1342,7 +1812,7 @@ fn python_install_unknown() {
#[cfg(unix)]
#[test]
-fn python_install_preview_broken_link() {
+fn python_install_broken_link() {
use assert_fs::prelude::PathCreateDir;
use fs_err::os::unix::fs::symlink;
@@ -1358,7 +1828,7 @@ fn python_install_preview_broken_link() {
symlink(context.temp_dir.join("does-not-exist"), &bin_python).unwrap();
// Install
- uv_snapshot!(context.filters(), context.python_install().arg("--preview").arg("3.13"), @r"
+ uv_snapshot!(context.filters(), context.python_install().arg("3.13"), @r"
success: true
exit_code: 0
----- stdout -----
@@ -1393,7 +1863,7 @@ fn python_install_default_from_env() {
----- stderr -----
Installed Python 3.12.11 in [TIME]
- + cpython-3.12.11-[PLATFORM]
+ + cpython-3.12.11-[PLATFORM] (python3.12)
");
// But prefer explicit requests
@@ -1404,7 +1874,7 @@ fn python_install_default_from_env() {
----- stderr -----
Installed Python 3.11.13 in [TIME]
- + cpython-3.11.13-[PLATFORM]
+ + cpython-3.11.13-[PLATFORM] (python3.11)
");
// We should ignore `UV_PYTHON` here and complain there is not a target
@@ -1431,8 +1901,8 @@ fn python_install_default_from_env() {
----- stderr -----
Searching for Python installations
Uninstalled 2 versions in [TIME]
- - cpython-3.11.13-[PLATFORM]
- - cpython-3.12.11-[PLATFORM]
+ - cpython-3.11.13-[PLATFORM] (python3.11)
+ - cpython-3.12.11-[PLATFORM] (python3.12)
");
// Uninstall with no targets should error
@@ -1516,8 +1986,6 @@ fn python_install_314() {
let context: TestContext = TestContext::new_with_versions(&[])
.with_filtered_python_keys()
.with_managed_python_dirs()
- .with_filtered_python_install_bin()
- .with_filtered_python_names()
.with_filtered_exe_suffix();
// Install 3.14
@@ -1529,7 +1997,7 @@ fn python_install_314() {
----- stderr -----
Installed Python 3.14.0b4 in [TIME]
- + cpython-3.14.0b4-[PLATFORM]
+ + cpython-3.14.0b4-[PLATFORM] (python3.14)
");
// Install a specific pre-release
@@ -1543,6 +2011,17 @@ fn python_install_314() {
+ cpython-3.14.0a4-[PLATFORM]
");
+ // Add name filtering for the `find` tests, we avoid it in `install` tests because it clobbers
+ // the version suffixes which matter in the install logs
+ let filters = context
+ .filters()
+ .iter()
+ .map(|(a, b)| ((*a).to_string(), (*b).to_string()))
+ .collect::>();
+ let context = context
+ .with_filtered_python_install_bin()
+ .with_filtered_python_names();
+
// We should be able to find this version without opt-in, because there is no stable release
// installed
uv_snapshot!(context.filters(), context.python_find().arg("3.14"), @r"
@@ -1574,14 +2053,14 @@ fn python_install_314() {
");
// If we install a stable version, that should be preferred though
- uv_snapshot!(context.filters(), context.python_install().arg("3.13"), @r"
+ uv_snapshot!(filters, context.python_install().arg("3.13"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Installed Python 3.13.5 in [TIME]
- + cpython-3.13.5-[PLATFORM]
+ + cpython-3.13.5-[PLATFORM] (python3.13)
");
uv_snapshot!(context.filters(), context.python_find().arg("3"), @r"
@@ -1621,15 +2100,15 @@ fn python_install_cached() {
----- stderr -----
Installed Python 3.13.5 in [TIME]
- + cpython-3.13.5-[PLATFORM]
+ + cpython-3.13.5-[PLATFORM] (python3.13)
");
let bin_python = context
.bin_dir
.child(format!("python3.13{}", std::env::consts::EXE_SUFFIX));
- // The executable should not be installed in the bin directory (requires preview)
- bin_python.assert(predicate::path::missing());
+ // The executable should be installed in the bin directory
+ bin_python.assert(predicate::path::exists());
// Should be a no-op when already installed
uv_snapshot!(context.filters(), context
@@ -1651,7 +2130,7 @@ fn python_install_cached() {
----- stderr -----
Searching for Python versions matching: Python 3.13
Uninstalled Python 3.13.5 in [TIME]
- - cpython-3.13.5-[PLATFORM]
+ - cpython-3.13.5-[PLATFORM] (python3.13)
");
// The cached archive can be installed offline
@@ -1665,7 +2144,7 @@ fn python_install_cached() {
----- stderr -----
Installed Python 3.13.5 in [TIME]
- + cpython-3.13.5-[PLATFORM]
+ + cpython-3.13.5-[PLATFORM] (python3.13)
");
// 3.12 isn't cached, so it can't be installed
@@ -1714,7 +2193,7 @@ fn python_install_emulated_macos() {
----- stderr -----
Installed Python 3.13.5 in [TIME]
- + cpython-3.13.5-macos-x86_64-none
+ + cpython-3.13.5-macos-x86_64-none (python3.13)
");
// It should be discoverable with `uv python find`
diff --git a/docs/concepts/python-versions.md b/docs/concepts/python-versions.md
index ee18fa9da..0c16218d4 100644
--- a/docs/concepts/python-versions.md
+++ b/docs/concepts/python-versions.md
@@ -121,28 +121,17 @@ present, uv will install all the Python versions listed in the file.
### Installing Python executables
-!!! important
-
- Support for installing Python executables is in _preview_. This means the behavior is experimental
- and subject to change.
-
-To install Python executables into your `PATH`, provide the `--preview` option:
-
-```console
-$ uv python install 3.12 --preview
-```
-
-This will install a Python executable for the requested version into `~/.local/bin`, e.g., as
-`python3.12`.
+uv installs Python executables into your `PATH` by default, e.g., `uv python install 3.12` will
+install a Python executable into `~/.local/bin`, e.g., as `python3.12`.
!!! tip
If `~/.local/bin` is not in your `PATH`, you can add it with `uv tool update-shell`.
-To install `python` and `python3` executables, include the `--default` option:
+To install `python` and `python3` executables, include the experimental `--default` option:
```console
-$ uv python install 3.12 --default --preview
+$ uv python install 3.12 --default
```
When installing Python executables, uv will only overwrite an existing executable if it is managed
@@ -153,9 +142,9 @@ uv will update executables that it manages. However, it will prefer the latest p
Python minor version by default. For example:
```console
-$ uv python install 3.12.7 --preview # Adds `python3.12` to `~/.local/bin`
-$ uv python install 3.12.6 --preview # Does not update `python3.12`
-$ uv python install 3.12.8 --preview # Updates `python3.12` to point to 3.12.8
+$ uv python install 3.12.7 # Adds `python3.12` to `~/.local/bin`
+$ uv python install 3.12.6 # Does not update `python3.12`
+$ uv python install 3.12.8 # Updates `python3.12` to point to 3.12.8
```
## Upgrading Python versions
diff --git a/docs/guides/install-python.md b/docs/guides/install-python.md
index da841eac6..374ab29fd 100644
--- a/docs/guides/install-python.md
+++ b/docs/guides/install-python.md
@@ -24,17 +24,24 @@ $ uv python install
Python does not publish official distributable binaries. As such, uv uses distributions from the Astral [`python-build-standalone`](https://github.com/astral-sh/python-build-standalone) project. See the [Python distributions](../concepts/python-versions.md#managed-python-distributions) documentation for more details.
-Once Python is installed, it will be used by `uv` commands automatically.
+Once Python is installed, it will be used by `uv` commands automatically. uv also adds the installed
+version to your `PATH`:
-!!! important
+```console
+$ python3.13
+```
- When Python is installed by uv, it will not be available globally (i.e. via the `python` command).
- Support for this feature is in _preview_. See [Installing Python executables](../concepts/python-versions.md#installing-python-executables)
- for details.
+uv only installs a _versioned_ executable by default. To install `python` and `python3` executables,
+include the experimental `--default` option:
- You can still use
- [`uv run`](../guides/scripts.md#using-different-python-versions) or
- [create and activate a virtual environment](../pip/environments.md) to use `python` directly.
+```console
+$ uv python install --default
+```
+
+!!! tip
+
+ See the documentation on [installing Python executables](../concepts/python-versions.md#installing-python-executables)
+ for more details.
## Installing a specific version
diff --git a/docs/reference/cli.md b/docs/reference/cli.md
index 9be647449..4fc832cdb 100644
--- a/docs/reference/cli.md
+++ b/docs/reference/cli.md
@@ -2739,7 +2739,7 @@ Supports CPython and PyPy. CPython distributions are downloaded from the Astral
Python versions are installed into the uv Python directory, which can be retrieved with `uv python dir`.
-A `python` executable is not made globally available, managed Python versions are only used in uv commands or in active virtual environments. There is experimental support for adding Python executables to a directory on the path — use the `--preview` flag to enable this behavior and `uv python dir --bin` to retrieve the target directory.
+By default, Python executables are added to a directory on the path with a minor version suffix, e.g., `python3.13`. To install `python3` and `python`, use the `--default` flag. Use `uv python dir --bin` to see the target directory.
Multiple Python versions may be requested.
From cd40a3452295a8d4b6af69206c43282096507c89 Mon Sep 17 00:00:00 2001
From: Zanie Blue
Date: Thu, 17 Jul 2025 13:38:02 -0500
Subject: [PATCH 075/130] Build and install workspace members that are
dependencies by default (#14663)
Regardless of the presence of a build system, as in
https://github.com/astral-sh/uv/pull/14413
---------
Co-authored-by: John Mumm
---
.../uv-distribution/src/metadata/lowering.rs | 8 +-
crates/uv-platform-tags/src/tags.rs | 2 +-
crates/uv-resolver/src/lock/mod.rs | 6 +-
crates/uv-workspace/src/pyproject.rs | 8 +-
crates/uv-workspace/src/workspace.rs | 125 ++++-
crates/uv/src/commands/build_frontend.rs | 4 +-
crates/uv/src/commands/project/lock.rs | 4 +
crates/uv/src/commands/project/lock_target.rs | 14 +-
crates/uv/src/commands/project/sync.rs | 2 +-
crates/uv/tests/it/edit.rs | 20 +-
crates/uv/tests/it/lock.rs | 459 +++++++++++++++++-
crates/uv/tests/it/lock_conflict.rs | 40 +-
crates/uv/tests/it/pip_compile.rs | 12 +-
crates/uv/tests/it/sync.rs | 100 +++-
docs/concepts/projects/dependencies.md | 54 ++-
15 files changed, 791 insertions(+), 67 deletions(-)
diff --git a/crates/uv-distribution/src/metadata/lowering.rs b/crates/uv-distribution/src/metadata/lowering.rs
index c05ac4779..a8e899bb4 100644
--- a/crates/uv-distribution/src/metadata/lowering.rs
+++ b/crates/uv-distribution/src/metadata/lowering.rs
@@ -306,7 +306,10 @@ impl LoweredRequirement {
},
url,
}
- } else if member.pyproject_toml().is_package() {
+ } else if member
+ .pyproject_toml()
+ .is_package(!workspace.is_required_member(&requirement.name))
+ {
RequirementSource::Directory {
install_path: install_path.into_boxed_path(),
url,
@@ -736,7 +739,8 @@ fn path_source(
fs_err::read_to_string(&pyproject_path)
.ok()
.and_then(|contents| PyProjectToml::from_string(contents).ok())
- .and_then(|pyproject_toml| pyproject_toml.tool_uv_package())
+ // We don't require a build system for path dependencies
+ .map(|pyproject_toml| pyproject_toml.is_package(false))
.unwrap_or(true)
});
diff --git a/crates/uv-platform-tags/src/tags.rs b/crates/uv-platform-tags/src/tags.rs
index 7381f5dd5..f2c6d6cbb 100644
--- a/crates/uv-platform-tags/src/tags.rs
+++ b/crates/uv-platform-tags/src/tags.rs
@@ -771,7 +771,7 @@ mod tests {
/// A reference list can be generated with:
/// ```text
/// $ python -c "from packaging import tags; [print(tag) for tag in tags.platform_tags()]"`
- /// ````
+ /// ```
#[test]
fn test_platform_tags_manylinux() {
let tags = compatible_tags(&Platform::new(
diff --git a/crates/uv-resolver/src/lock/mod.rs b/crates/uv-resolver/src/lock/mod.rs
index 7cbac67df..49cb851b3 100644
--- a/crates/uv-resolver/src/lock/mod.rs
+++ b/crates/uv-resolver/src/lock/mod.rs
@@ -1255,6 +1255,7 @@ impl Lock {
root: &Path,
packages: &BTreeMap,
members: &[PackageName],
+ required_members: &BTreeSet,
requirements: &[Requirement],
constraints: &[Requirement],
overrides: &[Requirement],
@@ -1282,7 +1283,10 @@ impl Lock {
// Validate that the member sources have not changed (e.g., that they've switched from
// virtual to non-virtual or vice versa).
for (name, member) in packages {
- let expected = !member.pyproject_toml().is_package();
+ // We don't require a build system, if the workspace member is a dependency
+ let expected = !member
+ .pyproject_toml()
+ .is_package(!required_members.contains(name));
let actual = self
.find_by_name(name)
.ok()
diff --git a/crates/uv-workspace/src/pyproject.rs b/crates/uv-workspace/src/pyproject.rs
index aa64c601e..4a994b801 100644
--- a/crates/uv-workspace/src/pyproject.rs
+++ b/crates/uv-workspace/src/pyproject.rs
@@ -66,7 +66,7 @@ pub struct PyProjectToml {
/// Used to determine whether a `build-system` section is present.
#[serde(default, skip_serializing)]
- build_system: Option,
+ pub build_system: Option,
}
impl PyProjectToml {
@@ -81,18 +81,18 @@ impl PyProjectToml {
/// Returns `true` if the project should be considered a Python package, as opposed to a
/// non-package ("virtual") project.
- pub fn is_package(&self) -> bool {
+ pub fn is_package(&self, require_build_system: bool) -> bool {
// If `tool.uv.package` is set, defer to that explicit setting.
if let Some(is_package) = self.tool_uv_package() {
return is_package;
}
// Otherwise, a project is assumed to be a package if `build-system` is present.
- self.build_system.is_some()
+ self.build_system.is_some() || !require_build_system
}
/// Returns the value of `tool.uv.package` if set.
- pub fn tool_uv_package(&self) -> Option {
+ fn tool_uv_package(&self) -> Option {
self.tool
.as_ref()
.and_then(|tool| tool.uv.as_ref())
diff --git a/crates/uv-workspace/src/workspace.rs b/crates/uv-workspace/src/workspace.rs
index 8d09554d9..09f2b692a 100644
--- a/crates/uv-workspace/src/workspace.rs
+++ b/crates/uv-workspace/src/workspace.rs
@@ -20,7 +20,7 @@ use uv_warnings::warn_user_once;
use crate::dependency_groups::{DependencyGroupError, FlatDependencyGroup, FlatDependencyGroups};
use crate::pyproject::{
- Project, PyProjectToml, PyprojectTomlError, Sources, ToolUvSources, ToolUvWorkspace,
+ Project, PyProjectToml, PyprojectTomlError, Source, Sources, ToolUvSources, ToolUvWorkspace,
};
type WorkspaceMembers = Arc>;
@@ -109,6 +109,8 @@ pub struct Workspace {
install_path: PathBuf,
/// The members of the workspace.
packages: WorkspaceMembers,
+ /// The workspace members that are required by other members.
+ required_members: BTreeSet,
/// The sources table from the workspace `pyproject.toml`.
///
/// This table is overridden by the project sources.
@@ -260,6 +262,7 @@ impl Workspace {
pyproject_toml: PyProjectToml,
) -> Option {
let mut packages = self.packages;
+
let member = Arc::make_mut(&mut packages).get_mut(package_name)?;
if member.root == self.install_path {
@@ -279,17 +282,33 @@ impl Workspace {
// Set the `pyproject.toml` for the member.
member.pyproject_toml = pyproject_toml;
+ // Recompute required_members with the updated data
+ let required_members = Self::collect_required_members(
+ &packages,
+ &workspace_sources,
+ &workspace_pyproject_toml,
+ );
+
Some(Self {
pyproject_toml: workspace_pyproject_toml,
sources: workspace_sources,
packages,
+ required_members,
..self
})
} else {
// Set the `pyproject.toml` for the member.
member.pyproject_toml = pyproject_toml;
- Some(Self { packages, ..self })
+ // Recompute required_members with the updated member data
+ let required_members =
+ Self::collect_required_members(&packages, &self.sources, &self.pyproject_toml);
+
+ Some(Self {
+ packages,
+ required_members,
+ ..self
+ })
}
}
@@ -303,7 +322,7 @@ impl Workspace {
/// Returns the set of all workspace members.
pub fn members_requirements(&self) -> impl Iterator- + '_ {
- self.packages.values().filter_map(|member| {
+ self.packages.iter().filter_map(|(name, member)| {
let url = VerbatimUrl::from_absolute_path(&member.root)
.expect("path is valid URL")
.with_given(member.root.to_string_lossy());
@@ -312,7 +331,10 @@ impl Workspace {
extras: Box::new([]),
groups: Box::new([]),
marker: MarkerTree::TRUE,
- source: if member.pyproject_toml.is_package() {
+ source: if member
+ .pyproject_toml()
+ .is_package(!self.is_required_member(name))
+ {
RequirementSource::Directory {
install_path: member.root.clone().into_boxed_path(),
editable: Some(true),
@@ -332,9 +354,65 @@ impl Workspace {
})
}
+ /// The workspace members that are required my another member of the workspace.
+ pub fn required_members(&self) -> &BTreeSet
{
+ &self.required_members
+ }
+
+ /// Compute the workspace members that are required by another member of the workspace.
+ ///
+ /// N.B. this checks if a workspace member is required by inspecting `tool.uv.source` entries,
+ /// but does not actually check if the source is _used_, which could result in false positives
+ /// but is easier to compute.
+ fn collect_required_members(
+ packages: &BTreeMap,
+ sources: &BTreeMap,
+ pyproject_toml: &PyProjectToml,
+ ) -> BTreeSet {
+ sources
+ .iter()
+ .filter(|(name, _)| {
+ pyproject_toml
+ .project
+ .as_ref()
+ .is_none_or(|project| project.name != **name)
+ })
+ .chain(
+ packages
+ .iter()
+ .filter_map(|(name, member)| {
+ member
+ .pyproject_toml
+ .tool
+ .as_ref()
+ .and_then(|tool| tool.uv.as_ref())
+ .and_then(|uv| uv.sources.as_ref())
+ .map(ToolUvSources::inner)
+ .map(move |sources| {
+ sources
+ .iter()
+ .filter(move |(source_name, _)| name != *source_name)
+ })
+ })
+ .flatten(),
+ )
+ .filter_map(|(package, sources)| {
+ sources
+ .iter()
+ .any(|source| matches!(source, Source::Workspace { .. }))
+ .then_some(package.clone())
+ })
+ .collect()
+ }
+
+ /// Whether a given workspace member is required by another member.
+ pub fn is_required_member(&self, name: &PackageName) -> bool {
+ self.required_members().contains(name)
+ }
+
/// Returns the set of all workspace member dependency groups.
pub fn group_requirements(&self) -> impl Iterator- + '_ {
- self.packages.values().filter_map(|member| {
+ self.packages.iter().filter_map(|(name, member)| {
let url = VerbatimUrl::from_absolute_path(&member.root)
.expect("path is valid URL")
.with_given(member.root.to_string_lossy());
@@ -368,7 +446,10 @@ impl Workspace {
extras: Box::new([]),
groups: groups.into_boxed_slice(),
marker: MarkerTree::TRUE,
- source: if member.pyproject_toml.is_package() {
+ source: if member
+ .pyproject_toml()
+ .is_package(!self.is_required_member(name))
+ {
RequirementSource::Directory {
install_path: member.root.clone().into_boxed_path(),
editable: Some(true),
@@ -746,9 +827,16 @@ impl Workspace {
.and_then(|uv| uv.index)
.unwrap_or_default();
+ let required_members = Self::collect_required_members(
+ &workspace_members,
+ &workspace_sources,
+ &workspace_pyproject_toml,
+ );
+
Ok(Workspace {
install_path: workspace_root,
packages: workspace_members,
+ required_members,
sources: workspace_sources,
indexes: workspace_indexes,
pyproject_toml: workspace_pyproject_toml,
@@ -1232,15 +1320,23 @@ impl ProjectWorkspace {
project.name.clone(),
current_project,
)]));
+ let workspace_sources = BTreeMap::default();
+ let required_members = Workspace::collect_required_members(
+ ¤t_project_as_members,
+ &workspace_sources,
+ project_pyproject_toml,
+ );
+
return Ok(Self {
project_root: project_path.clone(),
project_name: project.name.clone(),
workspace: Workspace {
install_path: project_path.clone(),
packages: current_project_as_members,
+ required_members,
// There may be package sources, but we don't need to duplicate them into the
// workspace sources.
- sources: BTreeMap::default(),
+ sources: workspace_sources,
indexes: Vec::default(),
pyproject_toml: project_pyproject_toml.clone(),
},
@@ -1692,6 +1788,7 @@ mod tests {
"pyproject_toml": "[PYPROJECT_TOML]"
}
},
+ "required_members": [],
"sources": {},
"indexes": [],
"pyproject_toml": {
@@ -1745,6 +1842,7 @@ mod tests {
"pyproject_toml": "[PYPROJECT_TOML]"
}
},
+ "required_members": [],
"sources": {},
"indexes": [],
"pyproject_toml": {
@@ -1825,6 +1923,10 @@ mod tests {
"pyproject_toml": "[PYPROJECT_TOML]"
}
},
+ "required_members": [
+ "bird-feeder",
+ "seeds"
+ ],
"sources": {
"bird-feeder": [
{
@@ -1946,6 +2048,10 @@ mod tests {
"pyproject_toml": "[PYPROJECT_TOML]"
}
},
+ "required_members": [
+ "bird-feeder",
+ "seeds"
+ ],
"sources": {},
"indexes": [],
"pyproject_toml": {
@@ -2013,6 +2119,7 @@ mod tests {
"pyproject_toml": "[PYPROJECT_TOML]"
}
},
+ "required_members": [],
"sources": {},
"indexes": [],
"pyproject_toml": {
@@ -2147,6 +2254,7 @@ mod tests {
"pyproject_toml": "[PYPROJECT_TOML]"
}
},
+ "required_members": [],
"sources": {},
"indexes": [],
"pyproject_toml": {
@@ -2254,6 +2362,7 @@ mod tests {
"pyproject_toml": "[PYPROJECT_TOML]"
}
},
+ "required_members": [],
"sources": {},
"indexes": [],
"pyproject_toml": {
@@ -2375,6 +2484,7 @@ mod tests {
"pyproject_toml": "[PYPROJECT_TOML]"
}
},
+ "required_members": [],
"sources": {},
"indexes": [],
"pyproject_toml": {
@@ -2470,6 +2580,7 @@ mod tests {
"pyproject_toml": "[PYPROJECT_TOML]"
}
},
+ "required_members": [],
"sources": {},
"indexes": [],
"pyproject_toml": {
diff --git a/crates/uv/src/commands/build_frontend.rs b/crates/uv/src/commands/build_frontend.rs
index fd6ed73d7..a830f7aef 100644
--- a/crates/uv/src/commands/build_frontend.rs
+++ b/crates/uv/src/commands/build_frontend.rs
@@ -263,7 +263,7 @@ async fn build_impl(
.get(package)
.ok_or_else(|| anyhow::anyhow!("Package `{package}` not found in workspace"))?;
- if !package.pyproject_toml().is_package() {
+ if !package.pyproject_toml().is_package(true) {
let name = &package.project().name;
let pyproject_toml = package.root().join("pyproject.toml");
return Err(anyhow::anyhow!(
@@ -300,7 +300,7 @@ async fn build_impl(
let packages: Vec<_> = workspace
.packages()
.values()
- .filter(|package| package.pyproject_toml().is_package())
+ .filter(|package| package.pyproject_toml().is_package(true))
.map(|package| AnnotatedSource {
source: Source::Directory(Cow::Borrowed(package.root())),
package: Some(package.project().name.clone()),
diff --git a/crates/uv/src/commands/project/lock.rs b/crates/uv/src/commands/project/lock.rs
index 833e59a13..e23bd97c2 100644
--- a/crates/uv/src/commands/project/lock.rs
+++ b/crates/uv/src/commands/project/lock.rs
@@ -444,6 +444,7 @@ async fn do_lock(
// Collect the requirements, etc.
let members = target.members();
let packages = target.packages();
+ let required_members = target.required_members();
let requirements = target.requirements();
let overrides = target.overrides();
let constraints = target.constraints();
@@ -693,6 +694,7 @@ async fn do_lock(
target.install_path(),
packages,
&members,
+ required_members,
&requirements,
&dependency_groups,
&constraints,
@@ -906,6 +908,7 @@ impl ValidatedLock {
install_path: &Path,
packages: &BTreeMap
,
members: &[PackageName],
+ required_members: &BTreeSet,
requirements: &[Requirement],
dependency_groups: &BTreeMap>,
constraints: &[Requirement],
@@ -1117,6 +1120,7 @@ impl ValidatedLock {
install_path,
packages,
members,
+ required_members,
requirements,
constraints,
overrides,
diff --git a/crates/uv/src/commands/project/lock_target.rs b/crates/uv/src/commands/project/lock_target.rs
index 4618b3b84..55a726bf4 100644
--- a/crates/uv/src/commands/project/lock_target.rs
+++ b/crates/uv/src/commands/project/lock_target.rs
@@ -1,4 +1,4 @@
-use std::collections::BTreeMap;
+use std::collections::{BTreeMap, BTreeSet};
use std::path::{Path, PathBuf};
use itertools::Either;
@@ -154,6 +154,18 @@ impl<'lock> LockTarget<'lock> {
}
}
+ /// Return the set of required workspace members, i.e., those that are required by other
+ /// members.
+ pub(crate) fn required_members(self) -> &'lock BTreeSet {
+ match self {
+ Self::Workspace(workspace) => workspace.required_members(),
+ Self::Script(_) => {
+ static EMPTY: BTreeSet = BTreeSet::new();
+ &EMPTY
+ }
+ }
+ }
+
/// Returns the set of supported environments for the [`LockTarget`].
pub(crate) fn environments(self) -> Option<&'lock SupportedEnvironments> {
match self {
diff --git a/crates/uv/src/commands/project/sync.rs b/crates/uv/src/commands/project/sync.rs
index 40aa1b352..8d2dd9629 100644
--- a/crates/uv/src/commands/project/sync.rs
+++ b/crates/uv/src/commands/project/sync.rs
@@ -117,7 +117,7 @@ pub(crate) async fn sync(
// TODO(lucab): improve warning content
//
if project.workspace().pyproject_toml().has_scripts()
- && !project.workspace().pyproject_toml().is_package()
+ && !project.workspace().pyproject_toml().is_package(true)
{
warn_user!(
"Skipping installation of entry points (`project.scripts`) because this project is not packaged; to install entry points, set `tool.uv.package = true` or define a `build-system`"
diff --git a/crates/uv/tests/it/edit.rs b/crates/uv/tests/it/edit.rs
index 70b8d6e50..aa494435c 100644
--- a/crates/uv/tests/it/edit.rs
+++ b/crates/uv/tests/it/edit.rs
@@ -10362,7 +10362,7 @@ fn add_self() -> Result<()> {
filters => context.filters(),
}, {
assert_snapshot!(
- pyproject_toml, @r###"
+ pyproject_toml, @r#"
[project]
name = "anyio"
version = "0.1.0"
@@ -10377,7 +10377,7 @@ fn add_self() -> Result<()> {
[tool.uv.sources]
anyio = { workspace = true }
- "###
+ "#
);
});
@@ -10398,7 +10398,7 @@ fn add_self() -> Result<()> {
filters => context.filters(),
}, {
assert_snapshot!(
- pyproject_toml, @r###"
+ pyproject_toml, @r#"
[project]
name = "anyio"
version = "0.1.0"
@@ -10418,7 +10418,7 @@ fn add_self() -> Result<()> {
dev = [
"anyio[types]",
]
- "###
+ "#
);
});
@@ -13173,7 +13173,9 @@ fn add_path_with_existing_workspace() -> Result<()> {
----- stderr -----
Added `dep` to workspace members
Resolved 3 packages in [TIME]
- Audited in [TIME]
+ Prepared 1 package in [TIME]
+ Installed 1 package in [TIME]
+ + dep==0.1.0 (from file://[TEMP_DIR]/dep)
");
let pyproject_toml = context.read("pyproject.toml");
@@ -13250,7 +13252,9 @@ fn add_path_with_workspace() -> Result<()> {
----- stderr -----
Added `dep` to workspace members
Resolved 2 packages in [TIME]
- Audited in [TIME]
+ Prepared 1 package in [TIME]
+ Installed 1 package in [TIME]
+ + dep==0.1.0 (from file://[TEMP_DIR]/dep)
");
let pyproject_toml = context.read("pyproject.toml");
@@ -13316,7 +13320,9 @@ fn add_path_within_workspace_defaults_to_workspace() -> Result<()> {
----- stderr -----
Added `dep` to workspace members
Resolved 2 packages in [TIME]
- Audited in [TIME]
+ Prepared 1 package in [TIME]
+ Installed 1 package in [TIME]
+ + dep==0.1.0 (from file://[TEMP_DIR]/dep)
");
let pyproject_toml = context.read("pyproject.toml");
diff --git a/crates/uv/tests/it/lock.rs b/crates/uv/tests/it/lock.rs
index 75d81b4c0..ff9b711b7 100644
--- a/crates/uv/tests/it/lock.rs
+++ b/crates/uv/tests/it/lock.rs
@@ -12064,10 +12064,6 @@ fn lock_remove_member() -> Result<()> {
requires-python = ">=3.12"
dependencies = ["leaf"]
- [build-system]
- requires = ["setuptools>=42"]
- build-backend = "setuptools.build_meta"
-
[tool.uv.workspace]
members = ["leaf"]
@@ -12130,7 +12126,7 @@ fn lock_remove_member() -> Result<()> {
[[package]]
name = "leaf"
version = "0.1.0"
- source = { virtual = "leaf" }
+ source = { editable = "leaf" }
dependencies = [
{ name = "anyio" },
]
@@ -12141,13 +12137,13 @@ fn lock_remove_member() -> Result<()> {
[[package]]
name = "project"
version = "0.1.0"
- source = { editable = "." }
+ source = { virtual = "." }
dependencies = [
{ name = "leaf" },
]
[package.metadata]
- requires-dist = [{ name = "leaf", virtual = "leaf" }]
+ requires-dist = [{ name = "leaf", editable = "leaf" }]
[[package]]
name = "sniffio"
@@ -12162,16 +12158,124 @@ fn lock_remove_member() -> Result<()> {
});
// Re-run with `--locked`.
- uv_snapshot!(context.filters(), context.lock().arg("--locked"), @r###"
+ uv_snapshot!(context.filters(), context.lock().arg("--locked"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 5 packages in [TIME]
- "###);
+ ");
- // Remove the member.
+ // Remove the member as a dependency (retain it as a workspace member)
+ pyproject_toml.write_str(
+ r#"
+ [project]
+ name = "project"
+ version = "0.1.0"
+ requires-python = ">=3.12"
+ dependencies = []
+
+ [tool.uv.workspace]
+ members = ["leaf"]
+
+ [tool.uv.sources]
+ leaf = { workspace = true }
+ "#,
+ )?;
+
+ // Re-run with `--locked`. This should fail.
+ uv_snapshot!(context.filters(), context.lock().arg("--locked"), @r"
+ success: false
+ exit_code: 1
+ ----- stdout -----
+
+ ----- stderr -----
+ Resolved 5 packages in [TIME]
+ The lockfile at `uv.lock` needs to be updated, but `--locked` was provided. To update the lockfile, run `uv lock`.
+ ");
+
+ // Re-run without `--locked`.
+ uv_snapshot!(context.filters(), context.lock(), @r"
+ success: true
+ exit_code: 0
+ ----- stdout -----
+
+ ----- stderr -----
+ Resolved 5 packages in [TIME]
+ ");
+
+ let lock = context.read("uv.lock");
+
+ insta::with_settings!({
+ filters => context.filters(),
+ }, {
+ assert_snapshot!(
+ lock, @r#"
+ version = 1
+ revision = 2
+ requires-python = ">=3.12"
+
+ [options]
+ exclude-newer = "2024-03-25T00:00:00Z"
+
+ [manifest]
+ members = [
+ "leaf",
+ "project",
+ ]
+
+ [[package]]
+ name = "anyio"
+ version = "4.3.0"
+ source = { registry = "https://pypi.org/simple" }
+ dependencies = [
+ { name = "idna" },
+ { name = "sniffio" },
+ ]
+ sdist = { url = "https://files.pythonhosted.org/packages/db/4d/3970183622f0330d3c23d9b8a5f52e365e50381fd484d08e3285104333d3/anyio-4.3.0.tar.gz", hash = "sha256:f75253795a87df48568485fd18cdd2a3fa5c4f7c5be8e5e36637733fce06fed6", size = 159642, upload-time = "2024-02-19T08:36:28.641Z" }
+ wheels = [
+ { url = "https://files.pythonhosted.org/packages/14/fd/2f20c40b45e4fb4324834aea24bd4afdf1143390242c0b33774da0e2e34f/anyio-4.3.0-py3-none-any.whl", hash = "sha256:048e05d0f6caeed70d731f3db756d35dcc1f35747c8c403364a8332c630441b8", size = 85584, upload-time = "2024-02-19T08:36:26.842Z" },
+ ]
+
+ [[package]]
+ name = "idna"
+ version = "3.6"
+ source = { registry = "https://pypi.org/simple" }
+ sdist = { url = "https://files.pythonhosted.org/packages/bf/3f/ea4b9117521a1e9c50344b909be7886dd00a519552724809bb1f486986c2/idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca", size = 175426, upload-time = "2023-11-25T15:40:54.902Z" }
+ wheels = [
+ { url = "https://files.pythonhosted.org/packages/c2/e7/a82b05cf63a603df6e68d59ae6a68bf5064484a0718ea5033660af4b54a9/idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f", size = 61567, upload-time = "2023-11-25T15:40:52.604Z" },
+ ]
+
+ [[package]]
+ name = "leaf"
+ version = "0.1.0"
+ source = { editable = "leaf" }
+ dependencies = [
+ { name = "anyio" },
+ ]
+
+ [package.metadata]
+ requires-dist = [{ name = "anyio", specifier = ">3" }]
+
+ [[package]]
+ name = "project"
+ version = "0.1.0"
+ source = { virtual = "." }
+
+ [[package]]
+ name = "sniffio"
+ version = "1.3.1"
+ source = { registry = "https://pypi.org/simple" }
+ sdist = { url = "https://files.pythonhosted.org/packages/a2/87/a6771e1546d97e7e041b6ae58d80074f81b7d5121207425c964ddf5cfdbd/sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc", size = 20372, upload-time = "2024-02-25T23:20:04.057Z" }
+ wheels = [
+ { url = "https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2", size = 10235, upload-time = "2024-02-25T23:20:01.196Z" },
+ ]
+ "#
+ );
+ });
+
+ // Remove the member entirely
pyproject_toml.write_str(
r#"
[project]
@@ -12238,7 +12342,7 @@ fn lock_remove_member() -> Result<()> {
/// This test would fail if we didn't write the list of workspace members to the lockfile, since
/// we wouldn't be able to determine that a new member was added.
#[test]
-fn lock_add_member() -> Result<()> {
+fn lock_add_member_with_build_system() -> Result<()> {
let context = TestContext::new("3.12");
// Create a workspace, but don't add the member.
@@ -12449,6 +12553,339 @@ fn lock_add_member() -> Result<()> {
Ok(())
}
+#[test]
+fn lock_add_member_without_build_system() -> Result<()> {
+ let context = TestContext::new("3.12");
+
+ // Create a workspace, but don't add the member.
+ let pyproject_toml = context.temp_dir.child("pyproject.toml");
+ pyproject_toml.write_str(
+ r#"
+ [project]
+ name = "project"
+ version = "0.1.0"
+ requires-python = ">=3.12"
+ dependencies = []
+
+ [tool.uv.workspace]
+ members = []
+ "#,
+ )?;
+
+ uv_snapshot!(context.filters(), context.lock(), @r###"
+ success: true
+ exit_code: 0
+ ----- stdout -----
+
+ ----- stderr -----
+ Resolved 1 package in [TIME]
+ "###);
+
+ let lock = context.read("uv.lock");
+
+ insta::with_settings!({
+ filters => context.filters(),
+ }, {
+ assert_snapshot!(
+ lock, @r#"
+ version = 1
+ revision = 2
+ requires-python = ">=3.12"
+
+ [options]
+ exclude-newer = "2024-03-25T00:00:00Z"
+
+ [[package]]
+ name = "project"
+ version = "0.1.0"
+ source = { virtual = "." }
+ "#
+ );
+ });
+
+ // Re-run with `--locked`.
+ uv_snapshot!(context.filters(), context.lock().arg("--locked"), @r###"
+ success: true
+ exit_code: 0
+ ----- stdout -----
+
+ ----- stderr -----
+ Resolved 1 package in [TIME]
+ "###);
+
+ // Create a workspace member.
+ let leaf = context.temp_dir.child("leaf");
+ leaf.child("pyproject.toml").write_str(
+ r#"
+ [project]
+ name = "leaf"
+ version = "0.1.0"
+ requires-python = ">=3.12"
+ dependencies = ["anyio>3"]
+ "#,
+ )?;
+
+ // Add the member to the workspace, but not as a dependency of the root.
+ pyproject_toml.write_str(
+ r#"
+ [project]
+ name = "project"
+ version = "0.1.0"
+ requires-python = ">=3.12"
+ dependencies = []
+
+ [tool.uv.workspace]
+ members = ["leaf"]
+ "#,
+ )?;
+
+ // Re-run with `--locked`. This should fail.
+ uv_snapshot!(context.filters(), context.lock().arg("--locked"), @r"
+ success: false
+ exit_code: 1
+ ----- stdout -----
+
+ ----- stderr -----
+ Resolved 5 packages in [TIME]
+ The lockfile at `uv.lock` needs to be updated, but `--locked` was provided. To update the lockfile, run `uv lock`.
+ ");
+
+ // Re-run with `--offline`. This should also fail, during the resolve phase.
+ uv_snapshot!(context.filters(), context.lock().arg("--locked").arg("--offline").arg("--no-cache"), @r###"
+ success: false
+ exit_code: 1
+ ----- stdout -----
+
+ ----- stderr -----
+ × No solution found when resolving dependencies:
+ ╰─▶ Because anyio was not found in the cache and leaf depends on anyio>3, we can conclude that leaf's requirements are unsatisfiable.
+ And because your workspace requires leaf, we can conclude that your workspace's requirements are unsatisfiable.
+
+ hint: Packages were unavailable because the network was disabled. When the network is disabled, registry packages may only be read from the cache.
+ "###);
+
+ // Re-run without `--locked`.
+ uv_snapshot!(context.filters(), context.lock(), @r###"
+ success: true
+ exit_code: 0
+ ----- stdout -----
+
+ ----- stderr -----
+ Resolved 5 packages in [TIME]
+ Added anyio v4.3.0
+ Added idna v3.6
+ Added leaf v0.1.0
+ Added sniffio v1.3.1
+ "###);
+
+ // Re-run with `--locked`.
+ uv_snapshot!(context.filters(), context.lock().arg("--locked"), @r###"
+ success: true
+ exit_code: 0
+ ----- stdout -----
+
+ ----- stderr -----
+ Resolved 5 packages in [TIME]
+ "###);
+
+ let lock = context.read("uv.lock");
+
+ insta::with_settings!({
+ filters => context.filters(),
+ }, {
+ assert_snapshot!(
+ lock, @r#"
+ version = 1
+ revision = 2
+ requires-python = ">=3.12"
+
+ [options]
+ exclude-newer = "2024-03-25T00:00:00Z"
+
+ [manifest]
+ members = [
+ "leaf",
+ "project",
+ ]
+
+ [[package]]
+ name = "anyio"
+ version = "4.3.0"
+ source = { registry = "https://pypi.org/simple" }
+ dependencies = [
+ { name = "idna" },
+ { name = "sniffio" },
+ ]
+ sdist = { url = "https://files.pythonhosted.org/packages/db/4d/3970183622f0330d3c23d9b8a5f52e365e50381fd484d08e3285104333d3/anyio-4.3.0.tar.gz", hash = "sha256:f75253795a87df48568485fd18cdd2a3fa5c4f7c5be8e5e36637733fce06fed6", size = 159642, upload-time = "2024-02-19T08:36:28.641Z" }
+ wheels = [
+ { url = "https://files.pythonhosted.org/packages/14/fd/2f20c40b45e4fb4324834aea24bd4afdf1143390242c0b33774da0e2e34f/anyio-4.3.0-py3-none-any.whl", hash = "sha256:048e05d0f6caeed70d731f3db756d35dcc1f35747c8c403364a8332c630441b8", size = 85584, upload-time = "2024-02-19T08:36:26.842Z" },
+ ]
+
+ [[package]]
+ name = "idna"
+ version = "3.6"
+ source = { registry = "https://pypi.org/simple" }
+ sdist = { url = "https://files.pythonhosted.org/packages/bf/3f/ea4b9117521a1e9c50344b909be7886dd00a519552724809bb1f486986c2/idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca", size = 175426, upload-time = "2023-11-25T15:40:54.902Z" }
+ wheels = [
+ { url = "https://files.pythonhosted.org/packages/c2/e7/a82b05cf63a603df6e68d59ae6a68bf5064484a0718ea5033660af4b54a9/idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f", size = 61567, upload-time = "2023-11-25T15:40:52.604Z" },
+ ]
+
+ [[package]]
+ name = "leaf"
+ version = "0.1.0"
+ source = { virtual = "leaf" }
+ dependencies = [
+ { name = "anyio" },
+ ]
+
+ [package.metadata]
+ requires-dist = [{ name = "anyio", specifier = ">3" }]
+
+ [[package]]
+ name = "project"
+ version = "0.1.0"
+ source = { virtual = "." }
+
+ [[package]]
+ name = "sniffio"
+ version = "1.3.1"
+ source = { registry = "https://pypi.org/simple" }
+ sdist = { url = "https://files.pythonhosted.org/packages/a2/87/a6771e1546d97e7e041b6ae58d80074f81b7d5121207425c964ddf5cfdbd/sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc", size = 20372, upload-time = "2024-02-25T23:20:04.057Z" }
+ wheels = [
+ { url = "https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2", size = 10235, upload-time = "2024-02-25T23:20:01.196Z" },
+ ]
+ "#
+ );
+ });
+
+ // Add the member to the workspace, as a dependency of the root.
+ pyproject_toml.write_str(
+ r#"
+ [project]
+ name = "project"
+ version = "0.1.0"
+ requires-python = ">=3.12"
+ dependencies = ["leaf"]
+
+ [tool.uv.workspace]
+ members = ["leaf"]
+
+ [tool.uv.sources]
+ leaf = { workspace = true }
+ "#,
+ )?;
+
+ // Re-run with `--locked`. This should fail.
+ uv_snapshot!(context.filters(), context.lock().arg("--locked"), @r"
+ success: false
+ exit_code: 1
+ ----- stdout -----
+
+ ----- stderr -----
+ Resolved 5 packages in [TIME]
+ The lockfile at `uv.lock` needs to be updated, but `--locked` was provided. To update the lockfile, run `uv lock`.
+ ");
+
+ // Re-run without `--locked`.
+ uv_snapshot!(context.filters(), context.lock(), @r"
+ success: true
+ exit_code: 0
+ ----- stdout -----
+
+ ----- stderr -----
+ Resolved 5 packages in [TIME]
+ ");
+
+ // Re-run with `--locked`.
+ uv_snapshot!(context.filters(), context.lock().arg("--locked"), @r"
+ success: true
+ exit_code: 0
+ ----- stdout -----
+
+ ----- stderr -----
+ Resolved 5 packages in [TIME]
+ ");
+
+ let lock = context.read("uv.lock");
+
+ // It should change from a virtual to an editable source
+ insta::with_settings!({
+ filters => context.filters(),
+ }, {
+ assert_snapshot!(
+ lock, @r#"
+ version = 1
+ revision = 2
+ requires-python = ">=3.12"
+
+ [options]
+ exclude-newer = "2024-03-25T00:00:00Z"
+
+ [manifest]
+ members = [
+ "leaf",
+ "project",
+ ]
+
+ [[package]]
+ name = "anyio"
+ version = "4.3.0"
+ source = { registry = "https://pypi.org/simple" }
+ dependencies = [
+ { name = "idna" },
+ { name = "sniffio" },
+ ]
+ sdist = { url = "https://files.pythonhosted.org/packages/db/4d/3970183622f0330d3c23d9b8a5f52e365e50381fd484d08e3285104333d3/anyio-4.3.0.tar.gz", hash = "sha256:f75253795a87df48568485fd18cdd2a3fa5c4f7c5be8e5e36637733fce06fed6", size = 159642, upload-time = "2024-02-19T08:36:28.641Z" }
+ wheels = [
+ { url = "https://files.pythonhosted.org/packages/14/fd/2f20c40b45e4fb4324834aea24bd4afdf1143390242c0b33774da0e2e34f/anyio-4.3.0-py3-none-any.whl", hash = "sha256:048e05d0f6caeed70d731f3db756d35dcc1f35747c8c403364a8332c630441b8", size = 85584, upload-time = "2024-02-19T08:36:26.842Z" },
+ ]
+
+ [[package]]
+ name = "idna"
+ version = "3.6"
+ source = { registry = "https://pypi.org/simple" }
+ sdist = { url = "https://files.pythonhosted.org/packages/bf/3f/ea4b9117521a1e9c50344b909be7886dd00a519552724809bb1f486986c2/idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca", size = 175426, upload-time = "2023-11-25T15:40:54.902Z" }
+ wheels = [
+ { url = "https://files.pythonhosted.org/packages/c2/e7/a82b05cf63a603df6e68d59ae6a68bf5064484a0718ea5033660af4b54a9/idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f", size = 61567, upload-time = "2023-11-25T15:40:52.604Z" },
+ ]
+
+ [[package]]
+ name = "leaf"
+ version = "0.1.0"
+ source = { editable = "leaf" }
+ dependencies = [
+ { name = "anyio" },
+ ]
+
+ [package.metadata]
+ requires-dist = [{ name = "anyio", specifier = ">3" }]
+
+ [[package]]
+ name = "project"
+ version = "0.1.0"
+ source = { virtual = "." }
+ dependencies = [
+ { name = "leaf" },
+ ]
+
+ [package.metadata]
+ requires-dist = [{ name = "leaf", editable = "leaf" }]
+
+ [[package]]
+ name = "sniffio"
+ version = "1.3.1"
+ source = { registry = "https://pypi.org/simple" }
+ sdist = { url = "https://files.pythonhosted.org/packages/a2/87/a6771e1546d97e7e041b6ae58d80074f81b7d5121207425c964ddf5cfdbd/sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc", size = 20372, upload-time = "2024-02-25T23:20:04.057Z" }
+ wheels = [
+ { url = "https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2", size = 10235, upload-time = "2024-02-25T23:20:01.196Z" },
+ ]
+ "#
+ );
+ });
+
+ Ok(())
+}
+
/// Lock a `pyproject.toml`, then add a dependency that's already included in the resolution.
/// In theory, we shouldn't need to re-resolve, but based on our current strategy, we don't accept
/// the existing lockfile.
diff --git a/crates/uv/tests/it/lock_conflict.rs b/crates/uv/tests/it/lock_conflict.rs
index bf1bc1eac..d67736c88 100644
--- a/crates/uv/tests/it/lock_conflict.rs
+++ b/crates/uv/tests/it/lock_conflict.rs
@@ -1094,18 +1094,19 @@ fn extra_unconditional() -> Result<()> {
"###);
// This is fine because we are only enabling one
// extra, and thus, there is no conflict.
- uv_snapshot!(context.filters(), context.sync().arg("--frozen"), @r###"
+ uv_snapshot!(context.filters(), context.sync().arg("--frozen"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
- Prepared 3 packages in [TIME]
- Installed 3 packages in [TIME]
+ Prepared 4 packages in [TIME]
+ Installed 4 packages in [TIME]
+ anyio==4.1.0
+ idna==3.6
+ + proxy1==0.1.0 (from file://[TEMP_DIR]/proxy1)
+ sniffio==1.3.1
- "###);
+ ");
// And same thing for the other extra.
root_pyproject_toml.write_str(
@@ -1215,18 +1216,19 @@ fn extra_unconditional_non_conflicting() -> Result<()> {
// `uv sync` wasn't correctly propagating extras in a way
// that would satisfy the conflict markers that got added
// to the `proxy1[extra1]` dependency.
- uv_snapshot!(context.filters(), context.sync().arg("--frozen"), @r###"
+ uv_snapshot!(context.filters(), context.sync().arg("--frozen"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
- Prepared 3 packages in [TIME]
- Installed 3 packages in [TIME]
+ Prepared 4 packages in [TIME]
+ Installed 4 packages in [TIME]
+ anyio==4.1.0
+ idna==3.6
+ + proxy1==0.1.0 (from file://[TEMP_DIR]/proxy1)
+ sniffio==1.3.1
- "###);
+ ");
Ok(())
}
@@ -1301,16 +1303,17 @@ fn extra_unconditional_in_optional() -> Result<()> {
"###);
// This should install `sortedcontainers==2.3.0`.
- uv_snapshot!(context.filters(), context.sync().arg("--frozen").arg("--extra=x1"), @r###"
+ uv_snapshot!(context.filters(), context.sync().arg("--frozen").arg("--extra=x1"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
- Prepared 1 package in [TIME]
- Installed 1 package in [TIME]
+ Prepared 2 packages in [TIME]
+ Installed 2 packages in [TIME]
+ + proxy1==0.1.0 (from file://[TEMP_DIR]/proxy1)
+ sortedcontainers==2.3.0
- "###);
+ ");
// This should install `sortedcontainers==2.4.0`.
uv_snapshot!(context.filters(), context.sync().arg("--frozen").arg("--extra=x2"), @r###"
@@ -4460,19 +4463,20 @@ conflicts = [
error: Extra `x2` is not defined in the project's `optional-dependencies` table
"###);
- uv_snapshot!(context.filters(), context.sync(), @r###"
+ uv_snapshot!(context.filters(), context.sync(), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 7 packages in [TIME]
- Prepared 3 packages in [TIME]
- Installed 3 packages in [TIME]
+ Prepared 4 packages in [TIME]
+ Installed 4 packages in [TIME]
+ anyio==4.3.0
+ idna==3.6
+ + proxy1==0.1.0 (from file://[TEMP_DIR]/proxy1)
+ sniffio==1.3.1
- "###);
+ ");
let lock = fs_err::read_to_string(context.temp_dir.join("uv.lock")).unwrap();
insta::with_settings!({
@@ -4558,14 +4562,14 @@ conflicts = [
requires-dist = [
{ name = "anyio", specifier = ">=4" },
{ name = "idna", marker = "extra == 'x1'", specifier = "==3.6" },
- { name = "proxy1", virtual = "proxy1" },
+ { name = "proxy1", editable = "proxy1" },
]
provides-extras = ["x1"]
[[package]]
name = "proxy1"
version = "0.1.0"
- source = { virtual = "proxy1" }
+ source = { editable = "proxy1" }
[package.optional-dependencies]
x2 = [
diff --git a/crates/uv/tests/it/pip_compile.rs b/crates/uv/tests/it/pip_compile.rs
index ac3549874..69da12fd6 100644
--- a/crates/uv/tests/it/pip_compile.rs
+++ b/crates/uv/tests/it/pip_compile.rs
@@ -15772,18 +15772,18 @@ fn project_and_group_workspace_inherit() -> Result<()> {
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --group packages/mysubproject/pyproject.toml:foo
+ -e file://[TEMP_DIR]/packages/pytest
+ # via mysubproject (packages/mysubproject/pyproject.toml:foo)
+ -e file://[TEMP_DIR]/packages/sniffio
+ # via
+ # mysubproject (packages/mysubproject/pyproject.toml:foo)
+ # anyio
anyio==4.3.0
# via mysubproject (packages/mysubproject/pyproject.toml:foo)
idna==3.6
# via anyio
iniconfig==2.0.0
# via mysubproject (packages/mysubproject/pyproject.toml:foo)
- pytest @ file://[TEMP_DIR]/packages/pytest
- # via mysubproject (packages/mysubproject/pyproject.toml:foo)
- sniffio @ file://[TEMP_DIR]/packages/sniffio
- # via
- # mysubproject (packages/mysubproject/pyproject.toml:foo)
- # anyio
----- stderr -----
Resolved 5 packages in [TIME]
diff --git a/crates/uv/tests/it/sync.rs b/crates/uv/tests/it/sync.rs
index bb3546e22..5a8d79447 100644
--- a/crates/uv/tests/it/sync.rs
+++ b/crates/uv/tests/it/sync.rs
@@ -3565,6 +3565,101 @@ fn sync_ignore_extras_check_when_no_provides_extras() -> Result<()> {
Ok(())
}
+#[test]
+fn sync_workspace_members_with_transitive_dependencies() -> Result<()> {
+ let context = TestContext::new("3.12");
+
+ let pyproject_toml = context.temp_dir.child("pyproject.toml");
+ pyproject_toml.write_str(
+ r#"
+ [tool.uv.workspace]
+ members = [
+ "packages/*",
+ ]
+ "#,
+ )?;
+
+ let packages = context.temp_dir.child("packages");
+ packages.create_dir_all()?;
+
+ // Create three workspace members with transitive dependency from
+ // pkg-c -> pkg-b -> pkg-a
+ let pkg_a = packages.child("pkg-a");
+ pkg_a.create_dir_all()?;
+ let pkg_a_pyproject_toml = pkg_a.child("pyproject.toml");
+ pkg_a_pyproject_toml.write_str(
+ r#"
+ [project]
+ name = "pkg-a"
+ version = "0.0.1"
+ requires-python = ">=3.12"
+ dependencies = ["anyio"]
+ "#,
+ )?;
+
+ let pkg_b = packages.child("pkg-b");
+ pkg_b.create_dir_all()?;
+ let pkg_b_pyproject_toml = pkg_b.child("pyproject.toml");
+ pkg_b_pyproject_toml.write_str(
+ r#"
+ [project]
+ name = "pkg-b"
+ version = "0.0.1"
+ requires-python = ">=3.12"
+ dependencies = ["pkg-a"]
+
+ [tool.uv.sources]
+ pkg-a = { workspace = true }
+ "#,
+ )?;
+
+ let pkg_c = packages.child("pkg-c");
+ pkg_c.create_dir_all()?;
+ let pkg_c_pyproject_toml = pkg_c.child("pyproject.toml");
+ pkg_c_pyproject_toml.write_str(
+ r#"
+ [project]
+ name = "pkg-c"
+ version = "0.0.1"
+ requires-python = ">=3.12"
+ dependencies = ["pkg-b"]
+
+ [tool.uv.sources]
+ pkg-b = { workspace = true }
+ "#,
+ )?;
+
+ // Syncing should build the two transitive dependencies pkg-a and pkg-b,
+ // but not pkg-c, which is not a dependency.
+ uv_snapshot!(context.filters(), context.sync(), @r"
+ success: true
+ exit_code: 0
+ ----- stdout -----
+
+ ----- stderr -----
+ Resolved 6 packages in [TIME]
+ Prepared 5 packages in [TIME]
+ Installed 5 packages in [TIME]
+ + anyio==4.3.0
+ + idna==3.6
+ + pkg-a==0.0.1 (from file://[TEMP_DIR]/packages/pkg-a)
+ + pkg-b==0.0.1 (from file://[TEMP_DIR]/packages/pkg-b)
+ + sniffio==1.3.1
+ ");
+
+ // The lockfile should be valid.
+ uv_snapshot!(context.filters(), context.lock().arg("--check"), @r"
+ success: true
+ exit_code: 0
+ ----- stdout -----
+
+ ----- stderr -----
+ Resolved 6 packages in [TIME]
+ ");
+
+ Ok(())
+}
+
#[test]
fn sync_non_existent_extra_workspace_member() -> Result<()> {
let context = TestContext::new("3.12");
@@ -3626,9 +3721,10 @@ fn sync_non_existent_extra_workspace_member() -> Result<()> {
----- stderr -----
Resolved 5 packages in [TIME]
- Prepared 3 packages in [TIME]
- Installed 3 packages in [TIME]
+ Prepared 4 packages in [TIME]
+ Installed 4 packages in [TIME]
+ anyio==4.3.0
+ + child==0.1.0 (from file://[TEMP_DIR]/child)
+ idna==3.6
+ sniffio==1.3.1
");
diff --git a/docs/concepts/projects/dependencies.md b/docs/concepts/projects/dependencies.md
index bf11e7174..52a71fd04 100644
--- a/docs/concepts/projects/dependencies.md
+++ b/docs/concepts/projects/dependencies.md
@@ -808,9 +808,9 @@ $ uv add --no-editable ./path/foo
uv allows dependencies to be "virtual", in which the dependency itself is not installed as a
[package](./config.md#project-packaging), but its dependencies are.
-By default, only workspace members without build systems declared are virtual.
+By default, dependencies are never virtual.
-A dependency with a [`path` source](#path) is not virtual unless it explicitly sets
+A dependency with a [`path` source](#path) can be virtual if it explicitly sets
[`tool.uv.package = false`](../../reference/settings.md#package). Unlike working _in_ the dependent
project with uv, the package will be built even if a [build system](./config.md#build-systems) is
not declared.
@@ -825,8 +825,8 @@ dependencies = ["bar"]
bar = { path = "../projects/bar", package = false }
```
-Similarly, if a dependency sets `tool.uv.package = false`, it can be overridden by declaring
-`package = true` on the source:
+If a dependency sets `tool.uv.package = false`, it can be overridden by declaring `package = true`
+on the source:
```toml title="pyproject.toml"
[project]
@@ -836,6 +836,52 @@ dependencies = ["bar"]
bar = { path = "../projects/bar", package = true }
```
+Similarly, a dependency with a [`workspace` source](#workspace-member) can be virtual if it
+explicitly sets [`tool.uv.package = false`](../../reference/settings.md#package). The workspace
+member will be built even if a [build system](./config.md#build-systems) is not declared.
+
+Workspace members that are _not_ dependencies can be virtual by default, e.g., if the parent
+`pyproject.toml` is:
+
+```toml title="pyproject.toml"
+[project]
+name = "parent"
+version = "1.0.0"
+dependencies = []
+
+[tool.uv.workspace]
+members = ["child"]
+```
+
+And the child `pyproject.toml` excluded a build system:
+
+```toml title="pyproject.toml"
+[project]
+name = "child"
+version = "1.0.0"
+dependencies = ["anyio"]
+```
+
+Then the `child` workspace member would not be installed, but the transitive dependency `anyio`
+would be.
+
+In contrast, if the parent declared a dependency on `child`:
+
+```toml title="pyproject.toml"
+[project]
+name = "parent"
+version = "1.0.0"
+dependencies = ["child"]
+
+[tool.uv.sources]
+child = { workspace = true }
+
+[tool.uv.workspace]
+members = ["child"]
+```
+
+Then `child` would be built and installed.
+
## Dependency specifiers
uv uses standard
From 5b716c4e50f2a4e829de3f474125cdac8de11c79 Mon Sep 17 00:00:00 2001
From: konsti
Date: Thu, 17 Jul 2025 22:37:35 +0200
Subject: [PATCH 076/130] Add missing trailing newline to outdated error
(#14689)
Unlike the other branch in match, which uses a fully formatted error, we
need to print the newline ourselves.
Before (top) and after (bottom):
---
crates/uv/src/commands/diagnostics.rs | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/crates/uv/src/commands/diagnostics.rs b/crates/uv/src/commands/diagnostics.rs
index 02412d683..f24aa3406 100644
--- a/crates/uv/src/commands/diagnostics.rs
+++ b/crates/uv/src/commands/diagnostics.rs
@@ -128,7 +128,7 @@ impl OperationDiagnostic {
None
}
pip::operations::Error::OutdatedEnvironment => {
- anstream::eprint!("{}", err);
+ anstream::eprintln!("{}", err);
None
}
err => Some(err),
From ac35377132954c9ee0fd60d5a464dab4490966c0 Mon Sep 17 00:00:00 2001
From: Zanie Blue
Date: Thu, 17 Jul 2025 15:52:31 -0500
Subject: [PATCH 077/130] Fix rendering of `uv venv --clear` hint in bash
(#14691)
Closes https://github.com/astral-sh/uv/issues/14688
---
crates/uv-console/src/lib.rs | 3 +++
1 file changed, 3 insertions(+)
diff --git a/crates/uv-console/src/lib.rs b/crates/uv-console/src/lib.rs
index 24c5eea16..1da7efdf1 100644
--- a/crates/uv-console/src/lib.rs
+++ b/crates/uv-console/src/lib.rs
@@ -84,6 +84,9 @@ fn confirm_inner(
if hint.is_some() {
term.clear_last_lines(2)?;
+ // It's not clear why we need to clear to the end of the screen here, but it fixes lingering
+ // display of the hint on `bash` (the issue did not reproduce on `zsh`).
+ term.clear_to_end_of_screen()?;
} else {
term.clear_line()?;
}
From 1a339b76e841188a04af9dbd1ad87136cd301122 Mon Sep 17 00:00:00 2001
From: Zanie Blue
Date: Thu, 17 Jul 2025 17:07:48 -0500
Subject: [PATCH 078/130] Add release notes and bump version for 0.8.0
(#14690)
[Rendered](https://github.com/astral-sh/uv/blob/zb/release-notes/CHANGELOG.md)
---
CHANGELOG.md | 197 +++++++++++++++++---
Cargo.lock | 6 +-
crates/uv-build-backend/src/lib.rs | 2 +-
crates/uv-build/Cargo.toml | 2 +-
crates/uv-build/pyproject.toml | 2 +-
crates/uv-version/Cargo.toml | 2 +-
crates/uv/Cargo.toml | 2 +-
docs/concepts/build-backend.md | 2 +-
docs/getting-started/installation.md | 4 +-
docs/guides/integration/aws-lambda.md | 4 +-
docs/guides/integration/docker.md | 12 +-
docs/guides/integration/github.md | 2 +-
docs/guides/integration/pre-commit.md | 10 +-
scripts/packages/built-by-uv/pyproject.toml | 2 +-
14 files changed, 199 insertions(+), 50 deletions(-)
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 87cf0c9e8..351edc326 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -3,6 +3,155 @@
+## 0.8.0
+
+Since we released uv [0.7.0](https://github.com/astral-sh/uv/releases/tag/0.5.0) in April, we've accumulated various changes that improve correctness and user experience, but could break some workflows. This release contains those changes; many have been marked as breaking out of an abundance of caution. We expect most users to be able to upgrade without making changes.
+
+This release also includes the stabilization of a couple `uv python install` features, which have been available under preview since late last year.
+
+### Breaking changes
+
+- **Install Python executables into a directory on the `PATH` ([#14626](https://github.com/astral-sh/uv/pull/14626))**
+
+ `uv python install` now installs a versioned Python executable (e.g., `python3.13`) into a directory on the `PATH` (e.g., `~/.local/bin`) by default. This behavior has been available under the `--preview` flag since [Oct 2024](https://github.com/astral-sh/uv/pull/8458). This change should not be breaking unless it shadows a Python executable elsewhere on the `PATH`.
+
+ To install unversioned executables, i.e., `python3` and `python`, use the `--default` flag. The `--default` flag has also been in preview, but is not stabilized in this release.
+
+ Note that these executables point to the base Python installation and only include the standard library. That means they will not include dependencies from your current project (use `uv run python` instead) and you cannot install packages into their environment (use `uvx --with python` instead).
+
+ As with tool installation, the target directory respects common variables like `XDG_BIN_HOME` and can be overridden with a `UV_PYTHON_BIN_DIR` variable.
+
+ You can opt out of this behavior with `uv python install --no-bin` or `UV_PYTHON_INSTALL_BIN=0`.
+
+ See the [documentation on installing Python executables](https://docs.astral.sh/uv/concepts/python-versions/#installing-python-executables) for more details.
+
+- **Register Python versions with the Windows registry ([#14625](https://github.com/astral-sh/uv/pull/14625))**
+
+ `uv python install` now registers the installed Python version with the Windows Registry as specified by [PEP 514](https://peps.python.org/pep-0514/). This allows using uv installed Python versions via the `py` launcher. This behavior has been available under the `--preview` flag since [Jan 2025](https://github.com/astral-sh/uv/pull/10634). This change should not be breaking, as using the uv Python versions with `py` requires explicit opt in.
+
+ You can opt out of this behavior with `uv python install --no-registry` or `UV_PYTHON_INSTALL_REGISTRY=0`.
+
+- **Prompt before removing an existing directory in `uv venv` ([#14309](https://github.com/astral-sh/uv/pull/14309))**
+
+ Previously, `uv venv` would remove an existing virtual environment without confirmation. While this is consistent with the behavior of project commands (e.g., `uv sync`), it's surprising to users that are using imperative workflows (i.e., `uv pip`). Now, `uv venv` will prompt for confirmation before removing an existing virtual environment. **If not in an interactive context, uv will still remove the virtual environment for backwards compatibility. However, this behavior is likely to change in a future release.**
+
+ The behavior for other commands (e.g., `uv sync`) is unchanged.
+
+ You can opt out of this behavior by setting `UV_VENV_CLEAR=1` or passing the `--clear` flag.
+
+- **Validate that discovered interpreters meet the Python preference ([#7934](https://github.com/astral-sh/uv/pull/7934))**
+
+ uv allows opting out of its managed Python versions with the `--no-managed-python` and `python-preference` options.
+
+ Previously, uv would not enforce this option for Python interpreters discovered on the `PATH`. For example, if a symlink to a managed Python interpreter was created, uv would allow it to be used even if `--no-managed-python` was provided. Now, uv ignores Python interpreters that do not match the Python preference _unless_ they are in an active virtual environment or are explicitly requested, e.g., with `--python /path/to/python3.13`.
+
+ Similarly, uv would previously not invalidate existing project environments if they did not match the Python preference. Now, uv will invalidate and recreate project environments when the Python preference changes.
+
+ You can opt out of this behavior by providing the explicit path to the Python interpreter providing `--managed-python` / `--no-managed-python` matching the interpreter you want.
+
+- **Install dependencies without build systems when they are `path` sources ([#14413](https://github.com/astral-sh/uv/pull/14413))**
+
+ When working on a project, uv uses the [presence of a build system](https://docs.astral.sh/uv/concepts/projects/config/#build-systems) to determine if it should be built and installed into the environment. However, when a project is a dependency of another project, it can be surprising for the dependency to be missing from the environment.
+
+ Previously, uv would not build and install dependencies with [`path` sources](https://docs.astral.sh/uv/concepts/projects/dependencies/#path) unless they declared a build system or set `tool.uv.package = true`. Now, dependencies with `path` sources are built and installed regardless of the presence of a build system. If a build system is not present, the `setuptools.build_meta:__legacy__ ` backend will be used (per [PEP 517](https://peps.python.org/pep-0517/#source-trees)).
+
+ You can opt out of this behavior by setting `package = false` in the source declaration, e.g.:
+
+ ```toml
+ [tool.uv.sources]
+ foo = { path = "./foo", package = false }
+ ```
+
+ Or, by setting `tool.uv.package = false` in the dependent `pyproject.toml`.
+
+ See the documentation on [virtual dependencies](https://docs.astral.sh/uv/concepts/projects/dependencies/#virtual-dependencies) for details.
+
+- **Install dependencies without build systems when they are workspace members ([#14663](https://github.com/astral-sh/uv/pull/14663))**
+
+ As described above for dependencies with `path` sources, uv previously would not build and install workspace members that did not declare a build system. Now, uv will build and install workspace members that are a dependency of _another_ workspace member regardless of the presence of a build system. The behavior is unchanged for workspace members that are not included in the `project.dependencies`, `project.optional-dependencies`, or `dependency-groups` tables of another workspace member.
+
+ You can opt out of this behavior by setting `tool.uv.package = false` in the workspace member's `pyproject.toml`.
+
+ See the documentation on [virtual dependencies](https://docs.astral.sh/uv/concepts/projects/dependencies/#virtual-dependencies) for details.
+
+- **Bump `--python-platform linux` to `manylinux_2_28` ([#14300](https://github.com/astral-sh/uv/pull/14300))**
+
+ uv allows performing [platform-specific resolution](https://docs.astral.sh/uv/concepts/resolution/#platform-specific-resolution) for explicit targets and provides short aliases, e.g., `linux`, for common targets.
+
+ Previously, the default target for `--python-platform linux` was `manylinux_2_17`, which is compatible with most Linux distributions from 2014 or newer. We now default to `manylinux_2_28`, which is compatible with most Linux distributions from 2017 or newer. This change follows the lead of other tools, such as `cibuildwheel`, which changed their default to `manylinux_2_28` in [Mar 2025](https://github.com/pypa/cibuildwheel/pull/2330).
+
+ This change only affects users requesting a specific target platform. Otherwise, uv detects the `manylinux` target from your local glibc version.
+
+ You can opt out of this behavior by using `--python-platform x86_64-manylinux_2_17` instead.
+
+- **Remove `uv version` fallback ([#14161](https://github.com/astral-sh/uv/pull/14161))**
+
+ In [Apr 2025](https://github.com/astral-sh/uv/pull/12349), uv changed the `uv version` command to an interface for viewing and updating the version of the current project. However, when outside a project, `uv version` would continue to display uv's version for backwards compatibility. Now, when used outside of a project, `uv version` will fail.
+
+ You cannot opt out of this behavior. Use `uv self version` instead.
+
+- **Require `--global` for removal of the global Python pin ([#14169](https://github.com/astral-sh/uv/pull/14169))**
+
+ Previously, `uv python pin --rm` would allow you to remove the global Python pin without opt in. Now, uv requires the `--global` flag to remove the global Python pin.
+
+ You cannot opt out of this behavior. Use the `--global` flag instead.
+
+- **Support conflicting editable settings across groups ([#14197](https://github.com/astral-sh/uv/pull/14197))**
+
+ Previously, uv would always treat a package as editable if any requirement requested it as editable. However, this prevented users from declaring `path` sources that toggled the `editable` setting across dependency groups. Now, uv allows declaring different `editable` values for conflicting groups. However, if a project includes a path dependency twice, once with `editable = true` and once without any editable annotation, those are now considered conflicting, and uv will exit with an error.
+
+ You cannot opt out of this behavior. Use consistent `editable` settings or [mark groups as conflicting](https://docs.astral.sh/uv/concepts/projects/config/#conflicting-dependencies).
+
+- **Make `uv_build` the default build backend in `uv init` ([#14661](https://github.com/astral-sh/uv/pull/14661))**
+
+ The uv build backend (`uv_build`) was [stabilized in uv 0.7.19](https://github.com/astral-sh/uv/releases/tag/0.7.19). Now, it is the default build backend for `uv init --package` and `uv init --lib`. Previously, `hatchling` was the default build backend. A build backend is still not used without opt-in in `uv init`, but we expect to change this in a future release.
+
+ You can opt out of this behavior with `uv init --build-backend hatchling`.
+
+- **Set default `UV_TOOL_BIN_DIR` on Docker images ([#13391](https://github.com/astral-sh/uv/pull/13391))**
+
+ Previously, `UV_TOOL_BIN_DIR` was not set in Docker images which meant that `uv tool install` did not install tools into a directory on the `PATH` without additional configuration. Now, `UV_TOOL_BIN_DIR` is set to `/usr/local/bin` in all Docker derived images.
+
+ When the default image user is overridden (e.g. `USER `) with a less privileged user, this may cause `uv tool install` to fail.
+
+ You can opt out of this behavior by setting an alternative `UV_TOOL_BIN_DIR`.
+
+- **Update `--check` to return an exit code of 1 ([#14167](https://github.com/astral-sh/uv/pull/14167))**
+
+ uv uses an exit code of 1 to indicate a "successful failure" and an exit code of 2 to indicate an "error".
+
+ Previously, `uv lock --check` and `uv sync --check` would exit with a code of 2 when the lockfile or environment were outdated. Now, uv will exit with a code of 1.
+
+ You cannot opt out of this behavior.
+
+- **Use an ephemeral environment for `uv run --with` invocations ([#14447](https://github.com/astral-sh/uv/pull/14447))**
+
+ When using `uv run --with`, uv layers the requirements requested using `--with` into another virtual environment and caches it. Previously, uv would invoke the Python interpreter in this layered environment. However, this allows poisoning the cached environment and introduces race conditions for concurrent invocations. Now, uv will layer _another_ empty virtual environment on top of the cached environment and invoke the Python interpreter there. This should only cause breakage in cases where the environment is being inspected at runtime.
+
+ You cannot opt out of this behavior.
+
+- **Restructure the `uv venv` command output and exit codes ([#14546](https://github.com/astral-sh/uv/pull/14546))**
+
+ Previously, uv used `miette` to format the `uv venv` output. However, this was inconsistent with most of the uv CLI. Now, the output is a little different and the exit code has switched from 1 to 2 for some error cases.
+
+ You cannot opt out of this behavior.
+
+- **Default to `--workspace` when adding subdirectories ([#14529](https://github.com/astral-sh/uv/pull/14529))**
+
+ When using `uv add` to add a subdirectory in a workspace, uv now defaults to adding the target as a workspace member.
+
+ You can opt out of this behavior by providing `--no-workspace`.
+
+- **Add missing validations for disallowed `uv.toml` fields ([#14322](https://github.com/astral-sh/uv/pull/14322))**
+
+ uv does not allow some settings in the `uv.toml`. Previously, some settings were silently ignored when present in the `uv.toml`. Now, uv will error.
+
+ You cannot opt out of this behavior. Use `--no-config` or remove the invalid settings.
+
+### Configuration
+
+- Add support for toggling Python bin and registry install options via env vars ([#14662](https://github.com/astral-sh/uv/pull/14662))
+
## 0.7.22
### Python
@@ -153,7 +302,7 @@ See the [python-build-standalone release](https://github.com/astral-sh/python-bu
### Python
- Added arm64 Windows Python 3.11, 3.12, 3.13, and 3.14
-
+
These are not downloaded by default, since x86-64 Python has broader ecosystem support on Windows.
However, they can be requested with `cpython--windows-aarch64`.
@@ -633,11 +782,11 @@ This release contains various changes that improve correctness and user experien
### Breaking changes
- **Update `uv version` to display and update project versions ([#12349](https://github.com/astral-sh/uv/pull/12349))**
-
+
Previously, `uv version` displayed uv's version. Now, `uv version` will display or update the project's version. This interface was [heavily requested](https://github.com/astral-sh/uv/issues/6298) and, after much consideration, we decided that transitioning the top-level command was the best option.
-
+
Here's a brief example:
-
+
```console
$ uv init example
Initialized project `example` at `./example`
@@ -649,72 +798,72 @@ This release contains various changes that improve correctness and user experien
$ uv version --short
1.0.0
```
-
+
If used outside of a project, uv will fallback to showing its own version still:
-
+
```console
$ uv version
warning: failed to read project: No `pyproject.toml` found in current directory or any parent directory
running `uv self version` for compatibility with old `uv version` command.
this fallback will be removed soon, pass `--preview` to make this an error.
-
+
uv 0.7.0 (4433f41c9 2025-04-29)
```
-
+
As described in the warning, `--preview` can be used to error instead:
-
+
```console
$ uv version --preview
error: No `pyproject.toml` found in current directory or any parent directory
```
-
+
The previous functionality of `uv version` was moved to `uv self version`.
- **Avoid fallback to subsequent indexes on authentication failure ([#12805](https://github.com/astral-sh/uv/pull/12805))**
-
+
When using the `first-index` strategy (the default), uv will stop searching indexes for a package once it is found on a single index. Previously, uv considered a package as "missing" from an index during authentication failures, such as an HTTP 401 or HTTP 403 (normally, missing packages are represented by an HTTP 404). This behavior was motivated by unusual responses from some package indexes, but reduces the safety of uv's index strategy when authentication fails. Now, uv will consider an authentication failure as a stop-point when searching for a package across indexes. The `index.ignore-error-codes` option can be used to recover the existing behavior, e.g.:
-
+
```toml
[[tool.uv.index]]
name = "pytorch"
url = "https://download.pytorch.org/whl/cpu"
ignore-error-codes = [401, 403]
```
-
+
Since PyTorch's indexes always return a HTTP 403 for missing packages, uv special-cases indexes on the `pytorch.org` domain to ignore that error code by default.
- **Require the command in `uvx ` to be available in the Python environment ([#11603](https://github.com/astral-sh/uv/pull/11603))**
-
+
Previously, `uvx` would attempt to execute a command even if it was not provided by a Python package. For example, if we presume `foo` is an empty Python package which provides no command, `uvx foo` would invoke the `foo` command on the `PATH` (if present). Now, uv will error early if the `foo` executable is not provided by the requested Python package. This check is not enforced when `--from` is used, so patterns like `uvx --from foo bash -c "..."` are still valid. uv also still allows `uvx foo` where the `foo` executable is provided by a dependency of `foo` instead of `foo` itself, as this is fairly common for packages which depend on a dedicated package for their command-line interface.
- **Use index URL instead of package URL for keyring credential lookups ([#12651](https://github.com/astral-sh/uv/pull/12651))**
-
+
When determining credentials for querying a package URL, uv previously sent the full URL to the `keyring` command. However, some keyring plugins expect to receive the *index URL* (which is usually a parent of the package URL). Now, uv requests credentials for the index URL instead. This behavior matches `pip`.
- **Remove `--version` from subcommands ([#13108](https://github.com/astral-sh/uv/pull/13108))**
-
+
Previously, uv allowed the `--version` flag on arbitrary subcommands, e.g., `uv run --version`. However, the `--version` flag is useful for other operations since uv is a package manager. Consequently, we've removed the `--version` flag from subcommands — it is only available as `uv --version`.
- **Omit Python 3.7 downloads from managed versions ([#13022](https://github.com/astral-sh/uv/pull/13022))**
-
+
Python 3.7 is EOL and not formally supported by uv; however, Python 3.7 was previously available for download on a subset of platforms.
- **Reject non-PEP 751 TOML files in install, compile, and export commands ([#13120](https://github.com/astral-sh/uv/pull/13120), [#13119](https://github.com/astral-sh/uv/pull/13119))**
-
+
Previously, uv treated arbitrary `.toml` files passed to commands (e.g., `uv pip install -r foo.toml` or `uv pip compile -o foo.toml`) as `requirements.txt`-formatted files. Now, uv will error instead. If using PEP 751 lockfiles, use the standardized format for custom names instead, e.g., `pylock.foo.toml`.
- **Ignore arbitrary Python requests in version files ([#12909](https://github.com/astral-sh/uv/pull/12909))**
-
+
uv allows arbitrary strings to be used for Python version requests, in which they are treated as an executable name to search for in the `PATH`. However, using this form of request in `.python-version` files is non-standard and conflicts with `pyenv-virtualenv` which writes environment names to `.python-version` files. In this release, uv will now ignore requests that are arbitrary strings when found in `.python-version` files.
- **Error on unknown dependency object specifiers ([12811](https://github.com/astral-sh/uv/pull/12811))**
-
+
The `[dependency-groups]` entries can include "object specifiers", e.g. `set-phasers-to = ...` in:
-
+
```toml
[dependency-groups]
foo = ["pyparsing"]
bar = [{set-phasers-to = "stun"}]
```
-
+
However, the only current spec-compliant object specifier is `include-group`. Previously, uv would ignore unknown object specifiers. Now, uv will error.
- **Make `--frozen` and `--no-sources` conflicting options ([#12671](https://github.com/astral-sh/uv/pull/12671))**
-
+
Using `--no-sources` always requires a new resolution and `--frozen` will always fail when used with it. Now, this conflict is encoded in the CLI options for clarity.
- **Treat empty `UV_PYTHON_INSTALL_DIR` and `UV_TOOL_DIR` as unset ([#12907](https://github.com/astral-sh/uv/pull/12907), [#12905](https://github.com/astral-sh/uv/pull/12905))**
-
+
Previously, these variables were treated as set to the current working directory when set to an empty string. Now, uv will ignore these variables when empty. This matches uv's behavior for other environment variables which configure directories.
### Enhancements
diff --git a/Cargo.lock b/Cargo.lock
index 0900699cb..78429b08f 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -4633,7 +4633,7 @@ dependencies = [
[[package]]
name = "uv"
-version = "0.7.22"
+version = "0.8.0"
dependencies = [
"anstream",
"anyhow",
@@ -4799,7 +4799,7 @@ dependencies = [
[[package]]
name = "uv-build"
-version = "0.7.22"
+version = "0.8.0"
dependencies = [
"anyhow",
"uv-build-backend",
@@ -5992,7 +5992,7 @@ dependencies = [
[[package]]
name = "uv-version"
-version = "0.7.22"
+version = "0.8.0"
[[package]]
name = "uv-virtualenv"
diff --git a/crates/uv-build-backend/src/lib.rs b/crates/uv-build-backend/src/lib.rs
index 2ec11aeeb..8add8dda3 100644
--- a/crates/uv-build-backend/src/lib.rs
+++ b/crates/uv-build-backend/src/lib.rs
@@ -557,7 +557,7 @@ mod tests {
// Check that the source dist is reproducible across platforms.
assert_snapshot!(
format!("{:x}", sha2::Sha256::digest(fs_err::read(&source_dist_path).unwrap())),
- @"dab46bcc4d66960a11cfdc19604512a8e1a3241a67536f7e962166760e9c575c"
+ @"9a7f7181c5e69ac14e411a2500fed153a1e6ea41cd5da6f24f226c4cddacf6b7"
);
// Check both the files we report and the actual files
assert_snapshot!(format_file_list(build.source_dist_list_files, src.path()), @r"
diff --git a/crates/uv-build/Cargo.toml b/crates/uv-build/Cargo.toml
index 8014fa445..dcf61a435 100644
--- a/crates/uv-build/Cargo.toml
+++ b/crates/uv-build/Cargo.toml
@@ -1,6 +1,6 @@
[package]
name = "uv-build"
-version = "0.7.22"
+version = "0.8.0"
edition.workspace = true
rust-version.workspace = true
homepage.workspace = true
diff --git a/crates/uv-build/pyproject.toml b/crates/uv-build/pyproject.toml
index 1a78d34dc..53bcbf49b 100644
--- a/crates/uv-build/pyproject.toml
+++ b/crates/uv-build/pyproject.toml
@@ -1,6 +1,6 @@
[project]
name = "uv-build"
-version = "0.7.22"
+version = "0.8.0"
description = "The uv build backend"
authors = [{ name = "Astral Software Inc.", email = "hey@astral.sh" }]
requires-python = ">=3.8"
diff --git a/crates/uv-version/Cargo.toml b/crates/uv-version/Cargo.toml
index e1a424af8..02f940b30 100644
--- a/crates/uv-version/Cargo.toml
+++ b/crates/uv-version/Cargo.toml
@@ -1,6 +1,6 @@
[package]
name = "uv-version"
-version = "0.7.22"
+version = "0.8.0"
edition = { workspace = true }
rust-version = { workspace = true }
homepage = { workspace = true }
diff --git a/crates/uv/Cargo.toml b/crates/uv/Cargo.toml
index ff389f033..d160cce7b 100644
--- a/crates/uv/Cargo.toml
+++ b/crates/uv/Cargo.toml
@@ -1,6 +1,6 @@
[package]
name = "uv"
-version = "0.7.22"
+version = "0.8.0"
edition = { workspace = true }
rust-version = { workspace = true }
homepage = { workspace = true }
diff --git a/docs/concepts/build-backend.md b/docs/concepts/build-backend.md
index d2edf1bad..d29420085 100644
--- a/docs/concepts/build-backend.md
+++ b/docs/concepts/build-backend.md
@@ -31,7 +31,7 @@ To use uv as a build backend in an existing project, add `uv_build` to the
```toml title="pyproject.toml"
[build-system]
-requires = ["uv_build>=0.7.22,<0.8.0"]
+requires = ["uv_build>=0.8.0,<0.9.0"]
build-backend = "uv_build"
```
diff --git a/docs/getting-started/installation.md b/docs/getting-started/installation.md
index 3e31a5003..5e8165824 100644
--- a/docs/getting-started/installation.md
+++ b/docs/getting-started/installation.md
@@ -25,7 +25,7 @@ uv provides a standalone installer to download and install uv:
Request a specific version by including it in the URL:
```console
- $ curl -LsSf https://astral.sh/uv/0.7.22/install.sh | sh
+ $ curl -LsSf https://astral.sh/uv/0.8.0/install.sh | sh
```
=== "Windows"
@@ -41,7 +41,7 @@ uv provides a standalone installer to download and install uv:
Request a specific version by including it in the URL:
```pwsh-session
- PS> powershell -ExecutionPolicy ByPass -c "irm https://astral.sh/uv/0.7.22/install.ps1 | iex"
+ PS> powershell -ExecutionPolicy ByPass -c "irm https://astral.sh/uv/0.8.0/install.ps1 | iex"
```
!!! tip
diff --git a/docs/guides/integration/aws-lambda.md b/docs/guides/integration/aws-lambda.md
index 14224b3fe..d9fc06d29 100644
--- a/docs/guides/integration/aws-lambda.md
+++ b/docs/guides/integration/aws-lambda.md
@@ -92,7 +92,7 @@ the second stage, we'll copy this directory over to the final image, omitting th
other unnecessary files.
```dockerfile title="Dockerfile"
-FROM ghcr.io/astral-sh/uv:0.7.22 AS uv
+FROM ghcr.io/astral-sh/uv:0.8.0 AS uv
# First, bundle the dependencies into the task root.
FROM public.ecr.aws/lambda/python:3.13 AS builder
@@ -334,7 +334,7 @@ And confirm that opening http://127.0.0.1:8000/ in a web browser displays, "Hell
Finally, we'll update the Dockerfile to include the local library in the deployment package:
```dockerfile title="Dockerfile"
-FROM ghcr.io/astral-sh/uv:0.7.22 AS uv
+FROM ghcr.io/astral-sh/uv:0.8.0 AS uv
# First, bundle the dependencies into the task root.
FROM public.ecr.aws/lambda/python:3.13 AS builder
diff --git a/docs/guides/integration/docker.md b/docs/guides/integration/docker.md
index a75228723..0eeaed62d 100644
--- a/docs/guides/integration/docker.md
+++ b/docs/guides/integration/docker.md
@@ -31,8 +31,8 @@ $ docker run --rm -it ghcr.io/astral-sh/uv:debian uv --help
The following distroless images are available:
- `ghcr.io/astral-sh/uv:latest`
-- `ghcr.io/astral-sh/uv:{major}.{minor}.{patch}`, e.g., `ghcr.io/astral-sh/uv:0.7.22`
-- `ghcr.io/astral-sh/uv:{major}.{minor}`, e.g., `ghcr.io/astral-sh/uv:0.7` (the latest patch
+- `ghcr.io/astral-sh/uv:{major}.{minor}.{patch}`, e.g., `ghcr.io/astral-sh/uv:0.8.0`
+- `ghcr.io/astral-sh/uv:{major}.{minor}`, e.g., `ghcr.io/astral-sh/uv:0.8` (the latest patch
version)
And the following derived images are available:
@@ -75,7 +75,7 @@ And the following derived images are available:
As with the distroless image, each derived image is published with uv version tags as
`ghcr.io/astral-sh/uv:{major}.{minor}.{patch}-{base}` and
-`ghcr.io/astral-sh/uv:{major}.{minor}-{base}`, e.g., `ghcr.io/astral-sh/uv:0.7.22-alpine`.
+`ghcr.io/astral-sh/uv:{major}.{minor}-{base}`, e.g., `ghcr.io/astral-sh/uv:0.8.0-alpine`.
In addition, starting with `0.8` each derived image also sets `UV_TOOL_BIN_DIR` to `/usr/local/bin`
to allow `uv tool install` to work as expected with the default user.
@@ -116,7 +116,7 @@ Note this requires `curl` to be available.
In either case, it is best practice to pin to a specific uv version, e.g., with:
```dockerfile
-COPY --from=ghcr.io/astral-sh/uv:0.7.22 /uv /uvx /bin/
+COPY --from=ghcr.io/astral-sh/uv:0.8.0 /uv /uvx /bin/
```
!!! tip
@@ -134,7 +134,7 @@ COPY --from=ghcr.io/astral-sh/uv:0.7.22 /uv /uvx /bin/
Or, with the installer:
```dockerfile
-ADD https://astral.sh/uv/0.7.22/install.sh /uv-installer.sh
+ADD https://astral.sh/uv/0.8.0/install.sh /uv-installer.sh
```
### Installing a project
@@ -560,5 +560,5 @@ Verified OK
!!! tip
These examples use `latest`, but best practice is to verify the attestation for a specific
- version tag, e.g., `ghcr.io/astral-sh/uv:0.7.22`, or (even better) the specific image digest,
+ version tag, e.g., `ghcr.io/astral-sh/uv:0.8.0`, or (even better) the specific image digest,
such as `ghcr.io/astral-sh/uv:0.5.27@sha256:5adf09a5a526f380237408032a9308000d14d5947eafa687ad6c6a2476787b4f`.
diff --git a/docs/guides/integration/github.md b/docs/guides/integration/github.md
index 956b47660..15d26b280 100644
--- a/docs/guides/integration/github.md
+++ b/docs/guides/integration/github.md
@@ -47,7 +47,7 @@ jobs:
uses: astral-sh/setup-uv@v6
with:
# Install a specific version of uv.
- version: "0.7.22"
+ version: "0.8.0"
```
## Setting up Python
diff --git a/docs/guides/integration/pre-commit.md b/docs/guides/integration/pre-commit.md
index d2598fed8..bbc21ab45 100644
--- a/docs/guides/integration/pre-commit.md
+++ b/docs/guides/integration/pre-commit.md
@@ -19,7 +19,7 @@ To make sure your `uv.lock` file is up to date even if your `pyproject.toml` fil
repos:
- repo: https://github.com/astral-sh/uv-pre-commit
# uv version.
- rev: 0.7.22
+ rev: 0.8.0
hooks:
- id: uv-lock
```
@@ -30,7 +30,7 @@ To keep a `requirements.txt` file in sync with your `uv.lock` file:
repos:
- repo: https://github.com/astral-sh/uv-pre-commit
# uv version.
- rev: 0.7.22
+ rev: 0.8.0
hooks:
- id: uv-export
```
@@ -41,7 +41,7 @@ To compile requirements files:
repos:
- repo: https://github.com/astral-sh/uv-pre-commit
# uv version.
- rev: 0.7.22
+ rev: 0.8.0
hooks:
# Compile requirements
- id: pip-compile
@@ -54,7 +54,7 @@ To compile alternative requirements files, modify `args` and `files`:
repos:
- repo: https://github.com/astral-sh/uv-pre-commit
# uv version.
- rev: 0.7.22
+ rev: 0.8.0
hooks:
# Compile requirements
- id: pip-compile
@@ -68,7 +68,7 @@ To run the hook over multiple files at the same time, add additional entries:
repos:
- repo: https://github.com/astral-sh/uv-pre-commit
# uv version.
- rev: 0.7.22
+ rev: 0.8.0
hooks:
# Compile requirements
- id: pip-compile
diff --git a/scripts/packages/built-by-uv/pyproject.toml b/scripts/packages/built-by-uv/pyproject.toml
index f9f893485..b1914e071 100644
--- a/scripts/packages/built-by-uv/pyproject.toml
+++ b/scripts/packages/built-by-uv/pyproject.toml
@@ -24,5 +24,5 @@ data = "assets"
headers = "header"
[build-system]
-requires = ["uv_build>=0.7,<0.8"]
+requires = ["uv_build>=0.8,<0.9"]
build-backend = "uv_build"
From 1f887552f6c630c4736ec05cf834086fe95eb92f Mon Sep 17 00:00:00 2001
From: Geoffrey Thomas
Date: Thu, 17 Jul 2025 18:25:03 -0400
Subject: [PATCH 079/130] CHANGELOG: manylinux_2_28 is more like 2019 (#14696)
I must have Googled something too fast, sorry. glibc 2.28 came out
August 2018, Fedora 29 was the earliest to ship with it in October 2018,
Debian 10 shipped with it in July 2019, and CentOS 8 shipped with it in
September 2019.
---
CHANGELOG.md | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 351edc326..e3d8f6f17 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -78,7 +78,7 @@ This release also includes the stabilization of a couple `uv python install` fea
uv allows performing [platform-specific resolution](https://docs.astral.sh/uv/concepts/resolution/#platform-specific-resolution) for explicit targets and provides short aliases, e.g., `linux`, for common targets.
- Previously, the default target for `--python-platform linux` was `manylinux_2_17`, which is compatible with most Linux distributions from 2014 or newer. We now default to `manylinux_2_28`, which is compatible with most Linux distributions from 2017 or newer. This change follows the lead of other tools, such as `cibuildwheel`, which changed their default to `manylinux_2_28` in [Mar 2025](https://github.com/pypa/cibuildwheel/pull/2330).
+ Previously, the default target for `--python-platform linux` was `manylinux_2_17`, which is compatible with most Linux distributions from 2014 or newer. We now default to `manylinux_2_28`, which is compatible with most Linux distributions from 2019 or newer. This change follows the lead of other tools, such as `cibuildwheel`, which changed their default to `manylinux_2_28` in [Mar 2025](https://github.com/pypa/cibuildwheel/pull/2330).
This change only affects users requesting a specific target platform. Otherwise, uv detects the `manylinux` target from your local glibc version.
From 0b23572941e271086485227f7c2b5c440062660f Mon Sep 17 00:00:00 2001
From: Zanie Blue
Date: Thu, 17 Jul 2025 17:26:47 -0500
Subject: [PATCH 080/130] Bump version to 0.8.0
Somehow this one was missed?
---
pyproject.toml | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/pyproject.toml b/pyproject.toml
index a079d53b2..1d0a1e713 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -4,7 +4,7 @@ build-backend = "maturin"
[project]
name = "uv"
-version = "0.7.22"
+version = "0.8.0"
description = "An extremely fast Python package and project manager, written in Rust."
authors = [{ name = "Astral Software Inc.", email = "hey@astral.sh" }]
requires-python = ">=3.8"
From a6a5e65e0c2e6aa7d56554ab86033b7066865f51 Mon Sep 17 00:00:00 2001
From: Zanie Blue
Date: Thu, 17 Jul 2025 18:11:22 -0500
Subject: [PATCH 081/130] Edits to the 0.8 changelog entry (#14698)
---
CHANGELOG.md | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/CHANGELOG.md b/CHANGELOG.md
index e3d8f6f17..b80747ed1 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -5,7 +5,7 @@
## 0.8.0
-Since we released uv [0.7.0](https://github.com/astral-sh/uv/releases/tag/0.5.0) in April, we've accumulated various changes that improve correctness and user experience, but could break some workflows. This release contains those changes; many have been marked as breaking out of an abundance of caution. We expect most users to be able to upgrade without making changes.
+Since we released uv [0.7.0](https://github.com/astral-sh/uv/releases/tag/0.7.0) in April, we've accumulated various changes that improve correctness and user experience, but could break some workflows. This release contains those changes; many have been marked as breaking out of an abundance of caution. We expect most users to be able to upgrade without making changes.
This release also includes the stabilization of a couple `uv python install` features, which have been available under preview since late last year.
@@ -25,7 +25,7 @@ This release also includes the stabilization of a couple `uv python install` fea
See the [documentation on installing Python executables](https://docs.astral.sh/uv/concepts/python-versions/#installing-python-executables) for more details.
-- **Register Python versions with the Windows registry ([#14625](https://github.com/astral-sh/uv/pull/14625))**
+- **Register Python versions with the Windows Registry ([#14625](https://github.com/astral-sh/uv/pull/14625))**
`uv python install` now registers the installed Python version with the Windows Registry as specified by [PEP 514](https://peps.python.org/pep-0514/). This allows using uv installed Python versions via the `py` launcher. This behavior has been available under the `--preview` flag since [Jan 2025](https://github.com/astral-sh/uv/pull/10634). This change should not be breaking, as using the uv Python versions with `py` requires explicit opt in.
From e724ddc63f14b9378672c16433dbfba534c6cb84 Mon Sep 17 00:00:00 2001
From: Charlie Marsh
Date: Thu, 17 Jul 2025 21:27:54 -0400
Subject: [PATCH 082/130] Allow `--config-settings-package` to apply
configuration settings at the package level (#14573)
## Summary
Closes https://github.com/astral-sh/uv/issues/14564.
Closes https://github.com/astral-sh/uv/issues/10940.
---
crates/uv-bench/benches/uv.rs | 6 +-
crates/uv-cli/src/lib.rs | 37 +++-
crates/uv-cli/src/options.rs | 32 ++-
.../uv-configuration/src/config_settings.rs | 184 ++++++++++++++++++
crates/uv-dispatch/src/lib.rs | 25 ++-
.../src/index/built_wheel_index.rs | 42 ++--
crates/uv-distribution/src/source/mod.rs | 51 +++--
crates/uv-installer/src/plan.rs | 11 +-
crates/uv-settings/src/combine.rs | 15 +-
crates/uv-settings/src/settings.rs | 34 +++-
crates/uv-types/src/traits.rs | 7 +-
crates/uv/src/commands/build_frontend.rs | 6 +-
crates/uv/src/commands/pip/compile.rs | 5 +-
crates/uv/src/commands/pip/install.rs | 6 +-
crates/uv/src/commands/pip/operations.rs | 4 +-
crates/uv/src/commands/pip/sync.rs | 6 +-
crates/uv/src/commands/project/add.rs | 1 +
crates/uv/src/commands/project/lock.rs | 2 +
crates/uv/src/commands/project/mod.rs | 10 +
crates/uv/src/commands/project/sync.rs | 3 +
crates/uv/src/commands/project/tree.rs | 1 +
crates/uv/src/commands/venv.rs | 4 +-
crates/uv/src/lib.rs | 3 +
crates/uv/src/settings.rs | 22 ++-
crates/uv/tests/it/pip_install.rs | 119 ++++++++++-
crates/uv/tests/it/show_settings.rs | 108 +++++++++-
crates/uv/tests/it/sync.rs | 143 ++++++++++++++
docs/reference/cli.md | 15 ++
docs/reference/settings.md | 54 +++++
uv.schema.json | 29 +++
30 files changed, 927 insertions(+), 58 deletions(-)
diff --git a/crates/uv-bench/benches/uv.rs b/crates/uv-bench/benches/uv.rs
index 9bdd7adb9..8380ccd60 100644
--- a/crates/uv-bench/benches/uv.rs
+++ b/crates/uv-bench/benches/uv.rs
@@ -86,8 +86,8 @@ mod resolver {
use uv_cache::Cache;
use uv_client::RegistryClient;
use uv_configuration::{
- BuildOptions, Concurrency, ConfigSettings, Constraints, IndexStrategy, PreviewMode,
- SourceStrategy,
+ BuildOptions, Concurrency, ConfigSettings, Constraints, IndexStrategy,
+ PackageConfigSettings, PreviewMode, SourceStrategy,
};
use uv_dispatch::{BuildDispatch, SharedState};
use uv_distribution::DistributionDatabase;
@@ -144,6 +144,7 @@ mod resolver {
let build_options = BuildOptions::default();
let concurrency = Concurrency::default();
let config_settings = ConfigSettings::default();
+ let config_settings_package = PackageConfigSettings::default();
let exclude_newer = Some(
jiff::civil::date(2024, 9, 1)
.to_zoned(jiff::tz::TimeZone::UTC)
@@ -184,6 +185,7 @@ mod resolver {
state,
IndexStrategy::default(),
&config_settings,
+ &config_settings_package,
build_isolation,
LinkMode::default(),
&build_options,
diff --git a/crates/uv-cli/src/lib.rs b/crates/uv-cli/src/lib.rs
index 9d7cfa6e0..d6560014f 100644
--- a/crates/uv-cli/src/lib.rs
+++ b/crates/uv-cli/src/lib.rs
@@ -10,8 +10,9 @@ use clap::{Args, Parser, Subcommand};
use uv_cache::CacheArgs;
use uv_configuration::{
- ConfigSettingEntry, ExportFormat, IndexStrategy, KeyringProviderType, PackageNameSpecifier,
- ProjectBuildBackend, TargetTriple, TrustedHost, TrustedPublishing, VersionControlSystem,
+ ConfigSettingEntry, ConfigSettingPackageEntry, ExportFormat, IndexStrategy,
+ KeyringProviderType, PackageNameSpecifier, ProjectBuildBackend, TargetTriple, TrustedHost,
+ TrustedPublishing, VersionControlSystem,
};
use uv_distribution_types::{Index, IndexUrl, Origin, PipExtraIndex, PipFindLinks, PipIndex};
use uv_normalize::{ExtraName, GroupName, PackageName, PipGroupName};
@@ -4693,6 +4694,14 @@ pub struct ToolUpgradeArgs {
)]
pub config_setting: Option>,
+ /// Settings to pass to the PEP 517 build backend for a specific package, specified as `PACKAGE:KEY=VALUE` pairs.
+ #[arg(
+ long,
+ alias = "config-settings-package",
+ help_heading = "Build options"
+ )]
+ pub config_setting_package: Option>,
+
/// Disable isolation when building source distributions.
///
/// Assumes that build dependencies specified by PEP 518 are already installed.
@@ -5484,6 +5493,14 @@ pub struct InstallerArgs {
)]
pub config_setting: Option>,
+ /// Settings to pass to the PEP 517 build backend for a specific package, specified as `PACKAGE:KEY=VALUE` pairs.
+ #[arg(
+ long,
+ alias = "config-settings-package",
+ help_heading = "Build options"
+ )]
+ pub config_settings_package: Option>,
+
/// Disable isolation when building source distributions.
///
/// Assumes that build dependencies specified by PEP 518 are already installed.
@@ -5671,6 +5688,14 @@ pub struct ResolverArgs {
)]
pub config_setting: Option>,
+ /// Settings to pass to the PEP 517 build backend for a specific package, specified as `PACKAGE:KEY=VALUE` pairs.
+ #[arg(
+ long,
+ alias = "config-settings-package",
+ help_heading = "Build options"
+ )]
+ pub config_settings_package: Option>,
+
/// Disable isolation when building source distributions.
///
/// Assumes that build dependencies specified by PEP 518 are already installed.
@@ -5860,6 +5885,14 @@ pub struct ResolverInstallerArgs {
)]
pub config_setting: Option>,
+ /// Settings to pass to the PEP 517 build backend for a specific package, specified as `PACKAGE:KEY=VALUE` pairs.
+ #[arg(
+ long,
+ alias = "config-settings-package",
+ help_heading = "Build options"
+ )]
+ pub config_settings_package: Option>,
+
/// Disable isolation when building source distributions.
///
/// Assumes that build dependencies specified by PEP 518 are already installed.
diff --git a/crates/uv-cli/src/options.rs b/crates/uv-cli/src/options.rs
index f522022a1..d2e651a19 100644
--- a/crates/uv-cli/src/options.rs
+++ b/crates/uv-cli/src/options.rs
@@ -1,7 +1,7 @@
use anstream::eprintln;
use uv_cache::Refresh;
-use uv_configuration::ConfigSettings;
+use uv_configuration::{ConfigSettings, PackageConfigSettings};
use uv_resolver::PrereleaseMode;
use uv_settings::{Combine, PipOptions, ResolverInstallerOptions, ResolverOptions};
use uv_warnings::owo_colors::OwoColorize;
@@ -62,6 +62,7 @@ impl From for PipOptions {
pre,
fork_strategy,
config_setting,
+ config_settings_package,
no_build_isolation,
no_build_isolation_package,
build_isolation,
@@ -84,6 +85,11 @@ impl From for PipOptions {
},
config_settings: config_setting
.map(|config_settings| config_settings.into_iter().collect::()),
+ config_settings_package: config_settings_package.map(|config_settings| {
+ config_settings
+ .into_iter()
+ .collect::()
+ }),
no_build_isolation: flag(no_build_isolation, build_isolation, "build-isolation"),
no_build_isolation_package: Some(no_build_isolation_package),
exclude_newer,
@@ -104,6 +110,7 @@ impl From for PipOptions {
index_strategy,
keyring_provider,
config_setting,
+ config_settings_package,
no_build_isolation,
build_isolation,
exclude_newer,
@@ -120,6 +127,11 @@ impl From for PipOptions {
keyring_provider,
config_settings: config_setting
.map(|config_settings| config_settings.into_iter().collect::()),
+ config_settings_package: config_settings_package.map(|config_settings| {
+ config_settings
+ .into_iter()
+ .collect::()
+ }),
no_build_isolation: flag(no_build_isolation, build_isolation, "build-isolation"),
exclude_newer,
link_mode,
@@ -147,6 +159,7 @@ impl From for PipOptions {
pre,
fork_strategy,
config_setting,
+ config_settings_package,
no_build_isolation,
no_build_isolation_package,
build_isolation,
@@ -173,6 +186,11 @@ impl From for PipOptions {
fork_strategy,
config_settings: config_setting
.map(|config_settings| config_settings.into_iter().collect::()),
+ config_settings_package: config_settings_package.map(|config_settings| {
+ config_settings
+ .into_iter()
+ .collect::