From b0348ee2a9dbad692ed70e8c71c046187871d000 Mon Sep 17 00:00:00 2001 From: Ben Beasley Date: Thu, 10 Jul 2025 14:19:36 -0400 Subject: [PATCH 001/130] Conditionalize version_extras test on the pypi feature (#14536) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## Summary The `version_extras` test added in 85c0fc963b1d4f6c8130c4b4446d15b8f6ac8ac4 needs to connect to PyPI. This PR conditionalizes it on the `pypi` extra so that people running the tests offline don’t have to skip that test explicitly. ## Test Plan I already ran `cargo test` in the git checkout to confirm I didn’t somehow introduce a syntax error. I am also applying this PR as a patch to [the `uv` package in Fedora](https://src.fedoraproject.org/rpms/uv), which runs tests offline with the `pypi` feature disabled. --- crates/uv/tests/it/version.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/crates/uv/tests/it/version.rs b/crates/uv/tests/it/version.rs index ab09833f9..3c5e28e0f 100644 --- a/crates/uv/tests/it/version.rs +++ b/crates/uv/tests/it/version.rs @@ -2558,6 +2558,7 @@ fn version_set_evil_constraints() -> Result<()> { /// Bump the version with conflicting extras, to ensure we're activating the correct subset of /// extras during the resolve. #[test] +#[cfg(feature = "pypi")] fn version_extras() -> Result<()> { let context = TestContext::new("3.12"); From 43dbdba578179d8e6ced763916b658dcfb9c53c6 Mon Sep 17 00:00:00 2001 From: Noam Teyssier <22600644+noamteyssier@users.noreply.github.com> Date: Thu, 10 Jul 2025 11:50:50 -0700 Subject: [PATCH 002/130] feature: shorthand for --with (-w) in uvx and uv tool run (#14530) ## Summary This is a small quality of life feature that adds a shorthand (`-w`) to the `--with` flag for minimizing keystrokes. Pretty minor, but I didn't see any conflicts with `-w` and thought this could be a nice place for it. ```bash # proposed addition (short) uvx -w numpy ipython # original (long) uvx --with numpy ipython ``` ## Test Plan Added testing already in the P.R. - just copied over tests from the `--with` flag --- crates/uv-cli/src/lib.rs | 6 ++-- crates/uv/tests/it/tool_run.rs | 64 ++++++++++++++++++++++++++++++++++ docs/concepts/tools.md | 6 ++++ docs/reference/cli.md | 6 ++-- 4 files changed, 76 insertions(+), 6 deletions(-) diff --git a/crates/uv-cli/src/lib.rs b/crates/uv-cli/src/lib.rs index 3e9aba123..39dac54ba 100644 --- a/crates/uv-cli/src/lib.rs +++ b/crates/uv-cli/src/lib.rs @@ -3045,7 +3045,7 @@ pub struct RunArgs { /// When used in a project, these dependencies will be layered on top of the project environment /// in a separate, ephemeral environment. These dependencies are allowed to conflict with those /// specified by the project. - #[arg(long)] + #[arg(short = 'w', long)] pub with: Vec, /// Run with the given packages installed in editable mode. @@ -4256,7 +4256,7 @@ pub struct ToolRunArgs { pub from: Option, /// Run with the given packages installed. - #[arg(long)] + #[arg(short = 'w', long)] pub with: Vec, /// Run with the given packages installed in editable mode @@ -4371,7 +4371,7 @@ pub struct ToolInstallArgs { pub from: Option, /// Include the following additional requirements. - #[arg(long)] + #[arg(short = 'w', long)] pub with: Vec, /// Include all requirements listed in the given `requirements.txt` files. diff --git a/crates/uv/tests/it/tool_run.rs b/crates/uv/tests/it/tool_run.rs index 153adeb51..fb6287454 100644 --- a/crates/uv/tests/it/tool_run.rs +++ b/crates/uv/tests/it/tool_run.rs @@ -1125,6 +1125,70 @@ fn tool_run_without_output() { "###); } +#[test] +#[cfg(not(windows))] +fn tool_run_csv_with_shorthand() -> anyhow::Result<()> { + let context = TestContext::new("3.12").with_filtered_counts(); + let tool_dir = context.temp_dir.child("tools"); + let bin_dir = context.temp_dir.child("bin"); + + let anyio_local = context.temp_dir.child("src").child("anyio_local"); + copy_dir_all( + context.workspace_root.join("scripts/packages/anyio_local"), + &anyio_local, + )?; + + let black_editable = context.temp_dir.child("src").child("black_editable"); + copy_dir_all( + context + .workspace_root + .join("scripts/packages/black_editable"), + &black_editable, + )?; + + let pyproject_toml = context.temp_dir.child("pyproject.toml"); + pyproject_toml.write_str(indoc! { r#" + [project] + name = "foo" + version = "1.0.0" + requires-python = ">=3.8" + dependencies = ["anyio", "sniffio==1.3.1"] + "# + })?; + + let test_script = context.temp_dir.child("main.py"); + test_script.write_str(indoc! { r" + import sniffio + " + })?; + + // Performs a tool run with a comma-separated `--with` flag. + uv_snapshot!(context.filters(), context.tool_run() + .arg("-w") + .arg("iniconfig,typing-extensions") + .arg("pytest") + .arg("--version") + .env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str()) + .env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str()), @r###" + success: true + exit_code: 0 + ----- stdout ----- + pytest 8.1.1 + + ----- stderr ----- + Resolved [N] packages in [TIME] + Prepared [N] packages in [TIME] + Installed [N] packages in [TIME] + + iniconfig==2.0.0 + + packaging==24.0 + + pluggy==1.4.0 + + pytest==8.1.1 + + typing-extensions==4.10.0 + "###); + + Ok(()) +} + #[test] #[cfg(not(windows))] fn tool_run_csv_with() -> anyhow::Result<()> { diff --git a/docs/concepts/tools.md b/docs/concepts/tools.md index d5069e725..7c5eb9564 100644 --- a/docs/concepts/tools.md +++ b/docs/concepts/tools.md @@ -200,6 +200,12 @@ The `--with` option supports package specifications, so a specific version can b $ uvx --with == ``` +The `-w` shorthand can be used in place of the `--with` option: + +```console +$ uvx -w +``` + If the requested version conflicts with the requirements of the tool package, package resolution will fail and the command will error. diff --git a/docs/reference/cli.md b/docs/reference/cli.md index bd828acda..f20bcf7ec 100644 --- a/docs/reference/cli.md +++ b/docs/reference/cli.md @@ -246,7 +246,7 @@ used.

--upgrade-package, -P upgrade-package

Allow upgrades for a specific package, ignoring pinned versions in any existing output file. Implies --refresh-package

--verbose, -v

Use verbose output.

You can configure fine-grained logging using the RUST_LOG environment variable. (https://docs.rs/tracing-subscriber/latest/tracing_subscriber/filter/struct.EnvFilter.html#directives)

-
--with with

Run with the given packages installed.

+
--with, -w with

Run with the given packages installed.

When used in a project, these dependencies will be layered on top of the project environment in a separate, ephemeral environment. These dependencies are allowed to conflict with those specified by the project.

--with-editable with-editable

Run with the given packages installed in editable mode.

When used in a project, these dependencies will be layered on top of the project environment in a separate, ephemeral environment. These dependencies are allowed to conflict with those specified by the project.

@@ -1935,7 +1935,7 @@ uv tool run [OPTIONS] [COMMAND]
--upgrade-package, -P upgrade-package

Allow upgrades for a specific package, ignoring pinned versions in any existing output file. Implies --refresh-package

--verbose, -v

Use verbose output.

You can configure fine-grained logging using the RUST_LOG environment variable. (https://docs.rs/tracing-subscriber/latest/tracing_subscriber/filter/struct.EnvFilter.html#directives)

-
--with with

Run with the given packages installed

+
--with, -w with

Run with the given packages installed

--with-editable with-editable

Run with the given packages installed in editable mode

When used in a project, these dependencies will be layered on top of the uv tool's environment in a separate, ephemeral environment. These dependencies are allowed to conflict with those specified.

--with-requirements with-requirements

Run with all packages listed in the given requirements.txt files

@@ -2104,7 +2104,7 @@ uv tool install [OPTIONS]
--upgrade-package, -P upgrade-package

Allow upgrades for a specific package, ignoring pinned versions in any existing output file. Implies --refresh-package

--verbose, -v

Use verbose output.

You can configure fine-grained logging using the RUST_LOG environment variable. (https://docs.rs/tracing-subscriber/latest/tracing_subscriber/filter/struct.EnvFilter.html#directives)

-
--with with

Include the following additional requirements

+
--with, -w with

Include the following additional requirements

--with-editable with-editable

Include the given packages in editable mode

--with-requirements with-requirements

Include all requirements listed in the given requirements.txt files

From a3bc30c1a7f6ebf230227ea8aa0b77013ab20412 Mon Sep 17 00:00:00 2001 From: Charlie Marsh Date: Thu, 10 Jul 2025 19:00:22 -0400 Subject: [PATCH 003/130] Use `astral-sh` fork of `rs-async-zip` (#14552) ## Summary I transferred ownership from my personal GitHub to `astral-sh`. There's no change in contents, etc. --- Cargo.lock | 2 +- Cargo.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 5861cd325..bc42e30af 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -251,7 +251,7 @@ dependencies = [ [[package]] name = "async_zip" version = "0.0.17" -source = "git+https://github.com/charliermarsh/rs-async-zip?rev=c909fda63fcafe4af496a07bfda28a5aae97e58d#c909fda63fcafe4af496a07bfda28a5aae97e58d" +source = "git+https://github.com/astral-sh/rs-async-zip?rev=c909fda63fcafe4af496a07bfda28a5aae97e58d#c909fda63fcafe4af496a07bfda28a5aae97e58d" dependencies = [ "async-compression", "crc32fast", diff --git a/Cargo.toml b/Cargo.toml index fc19dcc9a..ecdc11701 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -80,7 +80,7 @@ async-channel = { version = "2.3.1" } async-compression = { version = "0.4.12", features = ["bzip2", "gzip", "xz", "zstd"] } async-trait = { version = "0.1.82" } async_http_range_reader = { version = "0.9.1" } -async_zip = { git = "https://github.com/charliermarsh/rs-async-zip", rev = "c909fda63fcafe4af496a07bfda28a5aae97e58d", features = ["bzip2", "deflate", "lzma", "tokio", "xz", "zstd"] } +async_zip = { git = "https://github.com/astral-sh/rs-async-zip", rev = "c909fda63fcafe4af496a07bfda28a5aae97e58d", features = ["bzip2", "deflate", "lzma", "tokio", "xz", "zstd"] } axoupdater = { version = "0.9.0", default-features = false } backon = { version = "1.3.0" } base64 = { version = "0.22.1" } From 1b2ac40568eaa788c45752c6fc206da1130688b5 Mon Sep 17 00:00:00 2001 From: Zanie Blue Date: Thu, 10 Jul 2025 18:19:45 -0500 Subject: [PATCH 004/130] Fix `if` on macos test job (#14551) --- .github/workflows/ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 4a8e8fb12..b0d8e18a3 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -233,7 +233,7 @@ jobs: timeout-minutes: 15 needs: determine_changes # Only run macOS tests on main without opt-in - if: ${{ contains(github.event.pull_request.labels.*.name, 'test:macos' || github.ref == 'refs/heads/main') }} + if: ${{ contains(github.event.pull_request.labels.*.name, 'test:macos') || github.ref == 'refs/heads/main' }} runs-on: macos-latest-xlarge # github-macos-14-aarch64-6 name: "cargo test | macos" steps: From 0fb8c2b1d70820f24675b863570574f6b26aa1cb Mon Sep 17 00:00:00 2001 From: Charlie Marsh Date: Thu, 10 Jul 2025 21:38:28 -0400 Subject: [PATCH 005/130] Add `--python-platform` to `uv sync` (#14320) ## Summary Closes https://github.com/astral-sh/uv/issues/14273. --- crates/uv-cli/src/lib.rs | 17 +++++++++ crates/uv/src/commands/project/add.rs | 1 + crates/uv/src/commands/project/remove.rs | 1 + crates/uv/src/commands/project/run.rs | 2 + crates/uv/src/commands/project/sync.rs | 22 ++++++----- crates/uv/src/commands/project/version.rs | 1 + crates/uv/src/lib.rs | 1 + crates/uv/src/settings.rs | 3 ++ crates/uv/tests/it/sync.rs | 39 +++++++++++++++++++ docs/reference/cli.md | 46 ++++++++++++++++++++++- 10 files changed, 123 insertions(+), 10 deletions(-) diff --git a/crates/uv-cli/src/lib.rs b/crates/uv-cli/src/lib.rs index 39dac54ba..056447959 100644 --- a/crates/uv-cli/src/lib.rs +++ b/crates/uv-cli/src/lib.rs @@ -3439,6 +3439,23 @@ pub struct SyncArgs { )] pub python: Option>, + /// The platform for which requirements should be installed. + /// + /// Represented as a "target triple", a string that describes the target platform in terms of + /// its CPU, vendor, and operating system name, like `x86_64-unknown-linux-gnu` or + /// `aarch64-apple-darwin`. + /// + /// When targeting macOS (Darwin), the default minimum version is `12.0`. Use + /// `MACOSX_DEPLOYMENT_TARGET` to specify a different minimum version, e.g., `13.0`. + /// + /// WARNING: When specified, uv will select wheels that are compatible with the _target_ + /// platform; as a result, the installed distributions may not be compatible with the _current_ + /// platform. Conversely, any distributions that are built from source may be incompatible with + /// the _target_ platform, as they will be built for the _current_ platform. The + /// `--python-platform` option is intended for advanced use cases. + #[arg(long)] + pub python_platform: Option, + /// Check if the Python environment is synchronized with the project. /// /// If the environment is not up to date, uv will exit with an error. diff --git a/crates/uv/src/commands/project/add.rs b/crates/uv/src/commands/project/add.rs index 959241b4b..f255194de 100644 --- a/crates/uv/src/commands/project/add.rs +++ b/crates/uv/src/commands/project/add.rs @@ -1080,6 +1080,7 @@ async fn lock_and_sync( EditableMode::Editable, InstallOptions::default(), Modifications::Sufficient, + None, settings.into(), network_settings, &sync_state, diff --git a/crates/uv/src/commands/project/remove.rs b/crates/uv/src/commands/project/remove.rs index 6bc04160e..50615699e 100644 --- a/crates/uv/src/commands/project/remove.rs +++ b/crates/uv/src/commands/project/remove.rs @@ -357,6 +357,7 @@ pub(crate) async fn remove( EditableMode::Editable, InstallOptions::default(), Modifications::Exact, + None, (&settings).into(), &network_settings, &state, diff --git a/crates/uv/src/commands/project/run.rs b/crates/uv/src/commands/project/run.rs index a4fd4ae7d..f0a46f16a 100644 --- a/crates/uv/src/commands/project/run.rs +++ b/crates/uv/src/commands/project/run.rs @@ -305,6 +305,7 @@ hint: If you are running a script with `{}` in the shebang, you may need to incl editable, install_options, modifications, + None, (&settings).into(), &network_settings, &sync_state, @@ -816,6 +817,7 @@ hint: If you are running a script with `{}` in the shebang, you may need to incl editable, install_options, modifications, + None, (&settings).into(), &network_settings, &sync_state, diff --git a/crates/uv/src/commands/project/sync.rs b/crates/uv/src/commands/project/sync.rs index 6e057446e..664eb2a94 100644 --- a/crates/uv/src/commands/project/sync.rs +++ b/crates/uv/src/commands/project/sync.rs @@ -13,7 +13,7 @@ use uv_client::{BaseClientBuilder, FlatIndexClient, RegistryClientBuilder}; use uv_configuration::{ Concurrency, Constraints, DependencyGroups, DependencyGroupsWithDefaults, DryRun, EditableMode, ExtrasSpecification, ExtrasSpecificationWithDefaults, HashCheckingMode, InstallOptions, - PreviewMode, + PreviewMode, TargetTriple, }; use uv_dispatch::BuildDispatch; use uv_distribution_types::{ @@ -34,8 +34,9 @@ use uv_workspace::pyproject::Source; use uv_workspace::{DiscoveryOptions, MemberDiscovery, VirtualProject, Workspace, WorkspaceCache}; use crate::commands::pip::loggers::{DefaultInstallLogger, DefaultResolveLogger, InstallLogger}; -use crate::commands::pip::operations; use crate::commands::pip::operations::Modifications; +use crate::commands::pip::resolution_markers; +use crate::commands::pip::{operations, resolution_tags}; use crate::commands::project::install_target::InstallTarget; use crate::commands::project::lock::{LockMode, LockOperation, LockResult}; use crate::commands::project::lock_target::LockTarget; @@ -63,6 +64,7 @@ pub(crate) async fn sync( install_options: InstallOptions, modifications: Modifications, python: Option, + python_platform: Option, install_mirrors: PythonInstallMirrors, python_preference: PythonPreference, python_downloads: PythonDownloads, @@ -453,6 +455,7 @@ pub(crate) async fn sync( editable, install_options, modifications, + python_platform.as_ref(), (&settings).into(), &network_settings, &state, @@ -589,6 +592,7 @@ pub(super) async fn do_sync( editable: EditableMode, install_options: InstallOptions, modifications: Modifications, + python_platform: Option<&TargetTriple>, settings: InstallerSettingsRef<'_>, network_settings: &NetworkSettings, state: &PlatformState, @@ -644,7 +648,7 @@ pub(super) async fn do_sync( target.validate_groups(groups)?; // Determine the markers to use for resolution. - let marker_env = venv.interpreter().resolver_marker_environment(); + let marker_env = resolution_markers(None, python_platform, venv.interpreter()); // Validate that the platform is supported by the lockfile. let environments = target.lock().supported_environments(); @@ -670,13 +674,13 @@ pub(super) async fn do_sync( } } - // Determine the tags to use for resolution. - let tags = venv.interpreter().tags()?; + // Determine the tags to use for the resolution. + let tags = resolution_tags(None, python_platform, venv.interpreter())?; // Read the lockfile. let resolution = target.to_resolution( &marker_env, - tags, + &tags, extras, groups, build_options, @@ -728,7 +732,7 @@ pub(super) async fn do_sync( let entries = client .fetch_all(index_locations.flat_indexes().map(Index::url)) .await?; - FlatIndex::from_entries(entries, Some(tags), &hasher, build_options) + FlatIndex::from_entries(entries, Some(&tags), &hasher, build_options) }; // Create a build dispatch. @@ -768,7 +772,7 @@ pub(super) async fn do_sync( index_locations, config_setting, &hasher, - tags, + &tags, &client, state.in_flight(), concurrency, @@ -847,7 +851,7 @@ fn apply_editable_mode(resolution: Resolution, editable: EditableMode) -> Resolu /// These credentials can come from any of `tool.uv.sources`, `tool.uv.dev-dependencies`, /// `project.dependencies`, and `project.optional-dependencies`. fn store_credentials_from_target(target: InstallTarget<'_>) { - // Iterate over any idnexes in the target. + // Iterate over any indexes in the target. for index in target.indexes() { if let Some(credentials) = index.credentials() { let credentials = Arc::new(credentials); diff --git a/crates/uv/src/commands/project/version.rs b/crates/uv/src/commands/project/version.rs index ec278d4b4..ed1e9e246 100644 --- a/crates/uv/src/commands/project/version.rs +++ b/crates/uv/src/commands/project/version.rs @@ -634,6 +634,7 @@ async fn lock_and_sync( EditableMode::Editable, install_options, Modifications::Sufficient, + None, settings.into(), &network_settings, &state, diff --git a/crates/uv/src/lib.rs b/crates/uv/src/lib.rs index 28a20f373..261dd8d7c 100644 --- a/crates/uv/src/lib.rs +++ b/crates/uv/src/lib.rs @@ -1802,6 +1802,7 @@ async fn run_project( args.install_options, args.modifications, args.python, + args.python_platform, args.install_mirrors, globals.python_preference, globals.python_downloads, diff --git a/crates/uv/src/settings.rs b/crates/uv/src/settings.rs index ed86608ed..f89704d45 100644 --- a/crates/uv/src/settings.rs +++ b/crates/uv/src/settings.rs @@ -1150,6 +1150,7 @@ pub(crate) struct SyncSettings { pub(crate) all_packages: bool, pub(crate) package: Option, pub(crate) python: Option, + pub(crate) python_platform: Option, pub(crate) install_mirrors: PythonInstallMirrors, pub(crate) refresh: Refresh, pub(crate) settings: ResolverInstallerSettings, @@ -1190,6 +1191,7 @@ impl SyncSettings { package, script, python, + python_platform, check, no_check, } = args; @@ -1249,6 +1251,7 @@ impl SyncSettings { all_packages, package, python: python.and_then(Maybe::into_option), + python_platform, refresh: Refresh::from(refresh), settings, install_mirrors, diff --git a/crates/uv/tests/it/sync.rs b/crates/uv/tests/it/sync.rs index 690079abf..d4479296a 100644 --- a/crates/uv/tests/it/sync.rs +++ b/crates/uv/tests/it/sync.rs @@ -10026,3 +10026,42 @@ fn read_only() -> Result<()> { Ok(()) } + +#[test] +fn sync_python_platform() -> Result<()> { + let context = TestContext::new("3.12"); + + let pyproject_toml = context.temp_dir.child("pyproject.toml"); + pyproject_toml.write_str( + r#" + [project] + name = "project" + version = "0.1.0" + requires-python = ">=3.12" + dependencies = ["black"] + "#, + )?; + + // Lock the project + context.lock().assert().success(); + + // Sync with a specific platform should filter packages + uv_snapshot!(context.filters(), context.sync().arg("--python-platform").arg("linux"), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Resolved 8 packages in [TIME] + Prepared 6 packages in [TIME] + Installed 6 packages in [TIME] + + black==24.3.0 + + click==8.1.7 + + mypy-extensions==1.0.0 + + packaging==24.0 + + pathspec==0.12.1 + + platformdirs==4.2.0 + "); + + Ok(()) +} diff --git a/docs/reference/cli.md b/docs/reference/cli.md index f20bcf7ec..989cbc54b 100644 --- a/docs/reference/cli.md +++ b/docs/reference/cli.md @@ -1138,7 +1138,51 @@ used.

synced to the given environment. The interpreter will be used to create a virtual environment in the project.

See uv python for details on Python discovery and supported request formats.

-

May also be set with the UV_PYTHON environment variable.

--quiet, -q

Use quiet output.

+

May also be set with the UV_PYTHON environment variable.

--python-platform python-platform

The platform for which requirements should be installed.

+

Represented as a "target triple", a string that describes the target platform in terms of its CPU, vendor, and operating system name, like x86_64-unknown-linux-gnu or aarch64-apple-darwin.

+

When targeting macOS (Darwin), the default minimum version is 12.0. Use MACOSX_DEPLOYMENT_TARGET to specify a different minimum version, e.g., 13.0.

+

WARNING: When specified, uv will select wheels that are compatible with the target platform; as a result, the installed distributions may not be compatible with the current platform. Conversely, any distributions that are built from source may be incompatible with the target platform, as they will be built for the current platform. The --python-platform option is intended for advanced use cases.

+

Possible values:

+
    +
  • windows: An alias for x86_64-pc-windows-msvc, the default target for Windows
  • +
  • linux: An alias for x86_64-unknown-linux-gnu, the default target for Linux
  • +
  • macos: An alias for aarch64-apple-darwin, the default target for macOS
  • +
  • x86_64-pc-windows-msvc: A 64-bit x86 Windows target
  • +
  • i686-pc-windows-msvc: A 32-bit x86 Windows target
  • +
  • x86_64-unknown-linux-gnu: An x86 Linux target. Equivalent to x86_64-manylinux_2_17
  • +
  • aarch64-apple-darwin: An ARM-based macOS target, as seen on Apple Silicon devices
  • +
  • x86_64-apple-darwin: An x86 macOS target
  • +
  • aarch64-unknown-linux-gnu: An ARM64 Linux target. Equivalent to aarch64-manylinux_2_17
  • +
  • aarch64-unknown-linux-musl: An ARM64 Linux target
  • +
  • x86_64-unknown-linux-musl: An x86_64 Linux target
  • +
  • x86_64-manylinux2014: An x86_64 target for the manylinux2014 platform. Equivalent to x86_64-manylinux_2_17
  • +
  • x86_64-manylinux_2_17: An x86_64 target for the manylinux_2_17 platform
  • +
  • x86_64-manylinux_2_28: An x86_64 target for the manylinux_2_28 platform
  • +
  • x86_64-manylinux_2_31: An x86_64 target for the manylinux_2_31 platform
  • +
  • x86_64-manylinux_2_32: An x86_64 target for the manylinux_2_32 platform
  • +
  • x86_64-manylinux_2_33: An x86_64 target for the manylinux_2_33 platform
  • +
  • x86_64-manylinux_2_34: An x86_64 target for the manylinux_2_34 platform
  • +
  • x86_64-manylinux_2_35: An x86_64 target for the manylinux_2_35 platform
  • +
  • x86_64-manylinux_2_36: An x86_64 target for the manylinux_2_36 platform
  • +
  • x86_64-manylinux_2_37: An x86_64 target for the manylinux_2_37 platform
  • +
  • x86_64-manylinux_2_38: An x86_64 target for the manylinux_2_38 platform
  • +
  • x86_64-manylinux_2_39: An x86_64 target for the manylinux_2_39 platform
  • +
  • x86_64-manylinux_2_40: An x86_64 target for the manylinux_2_40 platform
  • +
  • aarch64-manylinux2014: An ARM64 target for the manylinux2014 platform. Equivalent to aarch64-manylinux_2_17
  • +
  • aarch64-manylinux_2_17: An ARM64 target for the manylinux_2_17 platform
  • +
  • aarch64-manylinux_2_28: An ARM64 target for the manylinux_2_28 platform
  • +
  • aarch64-manylinux_2_31: An ARM64 target for the manylinux_2_31 platform
  • +
  • aarch64-manylinux_2_32: An ARM64 target for the manylinux_2_32 platform
  • +
  • aarch64-manylinux_2_33: An ARM64 target for the manylinux_2_33 platform
  • +
  • aarch64-manylinux_2_34: An ARM64 target for the manylinux_2_34 platform
  • +
  • aarch64-manylinux_2_35: An ARM64 target for the manylinux_2_35 platform
  • +
  • aarch64-manylinux_2_36: An ARM64 target for the manylinux_2_36 platform
  • +
  • aarch64-manylinux_2_37: An ARM64 target for the manylinux_2_37 platform
  • +
  • aarch64-manylinux_2_38: An ARM64 target for the manylinux_2_38 platform
  • +
  • aarch64-manylinux_2_39: An ARM64 target for the manylinux_2_39 platform
  • +
  • aarch64-manylinux_2_40: An ARM64 target for the manylinux_2_40 platform
  • +
  • wasm32-pyodide2024: A wasm32 target using the the Pyodide 2024 platform. Meant for use with Python 3.12
  • +
--quiet, -q

Use quiet output.

Repeating this option, e.g., -qq, will enable a silent mode in which uv will write no output to stdout.

--refresh

Refresh all cached data

--refresh-package refresh-package

Refresh cached data for a specific package

From cc5d5d5fba546afca77788ced6ae2f2488b90f17 Mon Sep 17 00:00:00 2001 From: Charlie Marsh Date: Thu, 10 Jul 2025 21:41:32 -0400 Subject: [PATCH 006/130] Fix repeated word in Pyodide doc (#14554) --- crates/uv-configuration/src/target_triple.rs | 2 +- docs/reference/cli.md | 8 ++++---- uv.schema.json | 2 +- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/crates/uv-configuration/src/target_triple.rs b/crates/uv-configuration/src/target_triple.rs index b9ca3fafe..81499deff 100644 --- a/crates/uv-configuration/src/target_triple.rs +++ b/crates/uv-configuration/src/target_triple.rs @@ -227,7 +227,7 @@ pub enum TargetTriple { #[serde(alias = "aarch64-manylinux240")] Aarch64Manylinux240, - /// A wasm32 target using the the Pyodide 2024 platform. Meant for use with Python 3.12. + /// A wasm32 target using the Pyodide 2024 platform. Meant for use with Python 3.12. #[cfg_attr(feature = "clap", value(name = "wasm32-pyodide2024"))] Wasm32Pyodide2024, } diff --git a/docs/reference/cli.md b/docs/reference/cli.md index 989cbc54b..f8247d5aa 100644 --- a/docs/reference/cli.md +++ b/docs/reference/cli.md @@ -1763,7 +1763,7 @@ interpreter. Use --universal to display the tree for all platforms,
  • aarch64-manylinux_2_38: An ARM64 target for the manylinux_2_38 platform
  • aarch64-manylinux_2_39: An ARM64 target for the manylinux_2_39 platform
  • aarch64-manylinux_2_40: An ARM64 target for the manylinux_2_40 platform
  • -
  • wasm32-pyodide2024: A wasm32 target using the the Pyodide 2024 platform. Meant for use with Python 3.12
  • +
  • wasm32-pyodide2024: A wasm32 target using the Pyodide 2024 platform. Meant for use with Python 3.12
  • --python-version python-version

    The Python version to use when filtering the tree.

    For example, pass --python-version 3.10 to display the dependencies that would be included when installing on Python 3.10.

    Defaults to the version of the discovered Python interpreter.

    @@ -3448,7 +3448,7 @@ by --python-version.

  • aarch64-manylinux_2_38: An ARM64 target for the manylinux_2_38 platform
  • aarch64-manylinux_2_39: An ARM64 target for the manylinux_2_39 platform
  • aarch64-manylinux_2_40: An ARM64 target for the manylinux_2_40 platform
  • -
  • wasm32-pyodide2024: A wasm32 target using the the Pyodide 2024 platform. Meant for use with Python 3.12
  • +
  • wasm32-pyodide2024: A wasm32 target using the Pyodide 2024 platform. Meant for use with Python 3.12
  • --python-version python-version

    The Python version to use for resolution.

    For example, 3.8 or 3.8.17.

    Defaults to the version of the Python interpreter used for resolution.

    @@ -3705,7 +3705,7 @@ be used with caution, as it can modify the system Python installation.

  • aarch64-manylinux_2_38: An ARM64 target for the manylinux_2_38 platform
  • aarch64-manylinux_2_39: An ARM64 target for the manylinux_2_39 platform
  • aarch64-manylinux_2_40: An ARM64 target for the manylinux_2_40 platform
  • -
  • wasm32-pyodide2024: A wasm32 target using the the Pyodide 2024 platform. Meant for use with Python 3.12
  • +
  • wasm32-pyodide2024: A wasm32 target using the Pyodide 2024 platform. Meant for use with Python 3.12
  • --python-version python-version

    The minimum Python version that should be supported by the requirements (e.g., 3.7 or 3.7.9).

    If a patch version is omitted, the minimum patch version is assumed. For example, 3.7 is mapped to 3.7.0.

    --quiet, -q

    Use quiet output.

    @@ -3987,7 +3987,7 @@ should be used with caution, as it can modify the system Python installation.

    aarch64-manylinux_2_38: An ARM64 target for the manylinux_2_38 platform
  • aarch64-manylinux_2_39: An ARM64 target for the manylinux_2_39 platform
  • aarch64-manylinux_2_40: An ARM64 target for the manylinux_2_40 platform
  • -
  • wasm32-pyodide2024: A wasm32 target using the the Pyodide 2024 platform. Meant for use with Python 3.12
  • +
  • wasm32-pyodide2024: A wasm32 target using the Pyodide 2024 platform. Meant for use with Python 3.12
  • --python-version python-version

    The minimum Python version that should be supported by the requirements (e.g., 3.7 or 3.7.9).

    If a patch version is omitted, the minimum patch version is assumed. For example, 3.7 is mapped to 3.7.0.

    --quiet, -q

    Use quiet output.

    diff --git a/uv.schema.json b/uv.schema.json index aba25a46e..4190672e9 100644 --- a/uv.schema.json +++ b/uv.schema.json @@ -2242,7 +2242,7 @@ "const": "aarch64-manylinux_2_40" }, { - "description": "A wasm32 target using the the Pyodide 2024 platform. Meant for use with Python 3.12.", + "description": "A wasm32 target using the Pyodide 2024 platform. Meant for use with Python 3.12.", "type": "string", "const": "wasm32-pyodide2024" } From 2e0f399eeb57e06dadf6e4d652d31329ada64e85 Mon Sep 17 00:00:00 2001 From: Charlie Marsh Date: Thu, 10 Jul 2025 21:46:44 -0400 Subject: [PATCH 007/130] Run `cargo dev generate-all` (#14555) ## Summary I think we had a missing rebase. --- docs/reference/cli.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/reference/cli.md b/docs/reference/cli.md index f8247d5aa..0364703c2 100644 --- a/docs/reference/cli.md +++ b/docs/reference/cli.md @@ -1181,7 +1181,7 @@ environment in the project.

  • aarch64-manylinux_2_38: An ARM64 target for the manylinux_2_38 platform
  • aarch64-manylinux_2_39: An ARM64 target for the manylinux_2_39 platform
  • aarch64-manylinux_2_40: An ARM64 target for the manylinux_2_40 platform
  • -
  • wasm32-pyodide2024: A wasm32 target using the the Pyodide 2024 platform. Meant for use with Python 3.12
  • +
  • wasm32-pyodide2024: A wasm32 target using the Pyodide 2024 platform. Meant for use with Python 3.12
  • --quiet, -q

    Use quiet output.

    Repeating this option, e.g., -qq, will enable a silent mode in which uv will write no output to stdout.

    --refresh

    Refresh all cached data

    From 71470b7b1ae41867846822716b11d66f23ecdabd Mon Sep 17 00:00:00 2001 From: Zanie Blue Date: Fri, 11 Jul 2025 07:35:27 -0500 Subject: [PATCH 008/130] Add `UV_HTTP_RETRIES` to customize retry counts (#14544) I want to increase this number in CI and was surprised we didn't support configuration yet. --- crates/uv-client/src/base_client.rs | 26 +++++++++- crates/uv-client/src/registry_client.rs | 5 ++ crates/uv-requirements/src/lib.rs | 3 ++ crates/uv-static/src/env_vars.rs | 6 +++ crates/uv/src/commands/build_frontend.rs | 1 + crates/uv/src/commands/pip/compile.rs | 1 + crates/uv/src/commands/pip/install.rs | 1 + crates/uv/src/commands/pip/list.rs | 1 + crates/uv/src/commands/pip/sync.rs | 1 + crates/uv/src/commands/pip/tree.rs | 1 + crates/uv/src/commands/pip/uninstall.rs | 1 + crates/uv/src/commands/project/add.rs | 2 + crates/uv/src/commands/project/init.rs | 2 + crates/uv/src/commands/project/lock.rs | 2 + crates/uv/src/commands/project/mod.rs | 7 +++ crates/uv/src/commands/project/run.rs | 4 ++ crates/uv/src/commands/project/sync.rs | 1 + crates/uv/src/commands/project/tree.rs | 1 + crates/uv/src/commands/publish.rs | 1 + crates/uv/src/commands/python/install.rs | 1 + crates/uv/src/commands/python/pin.rs | 1 + crates/uv/src/commands/tool/install.rs | 2 + crates/uv/src/commands/tool/run.rs | 1 + crates/uv/src/commands/tool/upgrade.rs | 1 + crates/uv/src/commands/venv.rs | 3 ++ crates/uv/tests/it/pip_install.rs | 60 ++++++++++++++++++++++++ docs/reference/environment.md | 8 ++++ 27 files changed, 143 insertions(+), 1 deletion(-) diff --git a/crates/uv-client/src/base_client.rs b/crates/uv-client/src/base_client.rs index e11845adb..9ddc30e75 100644 --- a/crates/uv-client/src/base_client.rs +++ b/crates/uv-client/src/base_client.rs @@ -6,6 +6,7 @@ use std::sync::Arc; use std::time::Duration; use std::{env, io, iter}; +use anyhow::Context; use anyhow::anyhow; use http::{ HeaderMap, HeaderName, HeaderValue, Method, StatusCode, @@ -166,6 +167,25 @@ impl<'a> BaseClientBuilder<'a> { self } + /// Read the retry count from [`EnvVars::UV_HTTP_RETRIES`] if set, otherwise, make no change. + /// + /// Errors when [`EnvVars::UV_HTTP_RETRIES`] is not a valid u32. + pub fn retries_from_env(self) -> anyhow::Result { + // TODO(zanieb): We should probably parse this in another layer, but there's not a natural + // fit for it right now + if let Some(value) = env::var_os(EnvVars::UV_HTTP_RETRIES) { + Ok(self.retries( + value + .to_string_lossy() + .as_ref() + .parse::() + .context("Failed to parse `UV_HTTP_RETRIES`")?, + )) + } else { + Ok(self) + } + } + #[must_use] pub fn native_tls(mut self, native_tls: bool) -> Self { self.native_tls = native_tls; @@ -238,7 +258,11 @@ impl<'a> BaseClientBuilder<'a> { /// Create a [`RetryPolicy`] for the client. fn retry_policy(&self) -> ExponentialBackoff { - ExponentialBackoff::builder().build_with_max_retries(self.retries) + let mut builder = ExponentialBackoff::builder(); + if env::var_os(EnvVars::UV_TEST_NO_HTTP_RETRY_DELAY).is_some() { + builder = builder.retry_bounds(Duration::from_millis(0), Duration::from_millis(0)); + } + builder.build_with_max_retries(self.retries) } pub fn build(&self) -> BaseClient { diff --git a/crates/uv-client/src/registry_client.rs b/crates/uv-client/src/registry_client.rs index c7694676c..afa1b03ae 100644 --- a/crates/uv-client/src/registry_client.rs +++ b/crates/uv-client/src/registry_client.rs @@ -115,6 +115,11 @@ impl<'a> RegistryClientBuilder<'a> { self } + pub fn retries_from_env(mut self) -> anyhow::Result { + self.base_client_builder = self.base_client_builder.retries_from_env()?; + Ok(self) + } + #[must_use] pub fn native_tls(mut self, native_tls: bool) -> Self { self.base_client_builder = self.base_client_builder.native_tls(native_tls); diff --git a/crates/uv-requirements/src/lib.rs b/crates/uv-requirements/src/lib.rs index 812f9141f..68fe84abc 100644 --- a/crates/uv-requirements/src/lib.rs +++ b/crates/uv-requirements/src/lib.rs @@ -31,6 +31,9 @@ pub enum Error { #[error(transparent)] WheelFilename(#[from] uv_distribution_filename::WheelFilenameError), + #[error("Failed to construct HTTP client")] + ClientError(#[source] anyhow::Error), + #[error(transparent)] Io(#[from] std::io::Error), } diff --git a/crates/uv-static/src/env_vars.rs b/crates/uv-static/src/env_vars.rs index 4ac2976d9..5b91fccea 100644 --- a/crates/uv-static/src/env_vars.rs +++ b/crates/uv-static/src/env_vars.rs @@ -402,6 +402,9 @@ impl EnvVars { /// Timeout (in seconds) for HTTP requests. (default: 30 s) pub const UV_HTTP_TIMEOUT: &'static str = "UV_HTTP_TIMEOUT"; + /// The number of retries for HTTP requests. (default: 3) + pub const UV_HTTP_RETRIES: &'static str = "UV_HTTP_RETRIES"; + /// Timeout (in seconds) for HTTP requests. Equivalent to `UV_HTTP_TIMEOUT`. pub const UV_REQUEST_TIMEOUT: &'static str = "UV_REQUEST_TIMEOUT"; @@ -659,6 +662,9 @@ impl EnvVars { #[attr_hidden] pub const UV_TEST_VENDOR_LINKS_URL: &'static str = "UV_TEST_VENDOR_LINKS_URL"; + /// Used to disable delay for HTTP retries in tests. + pub const UV_TEST_NO_HTTP_RETRY_DELAY: &'static str = "UV_TEST_NO_HTTP_RETRY_DELAY"; + /// Used to set an index url for tests. #[attr_hidden] pub const UV_TEST_INDEX_URL: &'static str = "UV_TEST_INDEX_URL"; diff --git a/crates/uv/src/commands/build_frontend.rs b/crates/uv/src/commands/build_frontend.rs index 2cef9a406..fd6ed73d7 100644 --- a/crates/uv/src/commands/build_frontend.rs +++ b/crates/uv/src/commands/build_frontend.rs @@ -207,6 +207,7 @@ async fn build_impl( } = settings; let client_builder = BaseClientBuilder::default() + .retries_from_env()? .connectivity(network_settings.connectivity) .native_tls(network_settings.native_tls) .allow_insecure_host(network_settings.allow_insecure_host.clone()); diff --git a/crates/uv/src/commands/pip/compile.rs b/crates/uv/src/commands/pip/compile.rs index a1846d418..c40716763 100644 --- a/crates/uv/src/commands/pip/compile.rs +++ b/crates/uv/src/commands/pip/compile.rs @@ -179,6 +179,7 @@ pub(crate) async fn pip_compile( } let client_builder = BaseClientBuilder::new() + .retries_from_env()? .connectivity(network_settings.connectivity) .native_tls(network_settings.native_tls) .keyring(keyring_provider) diff --git a/crates/uv/src/commands/pip/install.rs b/crates/uv/src/commands/pip/install.rs index aa6e6a6c9..bbfe99c50 100644 --- a/crates/uv/src/commands/pip/install.rs +++ b/crates/uv/src/commands/pip/install.rs @@ -99,6 +99,7 @@ pub(crate) async fn pip_install( let start = std::time::Instant::now(); let client_builder = BaseClientBuilder::new() + .retries_from_env()? .connectivity(network_settings.connectivity) .native_tls(network_settings.native_tls) .keyring(keyring_provider) diff --git a/crates/uv/src/commands/pip/list.rs b/crates/uv/src/commands/pip/list.rs index 356574436..40e8c770d 100644 --- a/crates/uv/src/commands/pip/list.rs +++ b/crates/uv/src/commands/pip/list.rs @@ -87,6 +87,7 @@ pub(crate) async fn pip_list( let capabilities = IndexCapabilities::default(); let client_builder = BaseClientBuilder::new() + .retries_from_env()? .connectivity(network_settings.connectivity) .native_tls(network_settings.native_tls) .keyring(keyring_provider) diff --git a/crates/uv/src/commands/pip/sync.rs b/crates/uv/src/commands/pip/sync.rs index 8f26aaea2..6858ddad0 100644 --- a/crates/uv/src/commands/pip/sync.rs +++ b/crates/uv/src/commands/pip/sync.rs @@ -81,6 +81,7 @@ pub(crate) async fn pip_sync( preview: PreviewMode, ) -> Result { let client_builder = BaseClientBuilder::new() + .retries_from_env()? .connectivity(network_settings.connectivity) .native_tls(network_settings.native_tls) .keyring(keyring_provider) diff --git a/crates/uv/src/commands/pip/tree.rs b/crates/uv/src/commands/pip/tree.rs index b0ba44c35..81a566b8e 100644 --- a/crates/uv/src/commands/pip/tree.rs +++ b/crates/uv/src/commands/pip/tree.rs @@ -86,6 +86,7 @@ pub(crate) async fn pip_tree( let capabilities = IndexCapabilities::default(); let client_builder = BaseClientBuilder::new() + .retries_from_env()? .connectivity(network_settings.connectivity) .native_tls(network_settings.native_tls) .keyring(keyring_provider) diff --git a/crates/uv/src/commands/pip/uninstall.rs b/crates/uv/src/commands/pip/uninstall.rs index 835e7de65..f617a0203 100644 --- a/crates/uv/src/commands/pip/uninstall.rs +++ b/crates/uv/src/commands/pip/uninstall.rs @@ -42,6 +42,7 @@ pub(crate) async fn pip_uninstall( let start = std::time::Instant::now(); let client_builder = BaseClientBuilder::new() + .retries_from_env()? .connectivity(network_settings.connectivity) .native_tls(network_settings.native_tls) .keyring(keyring_provider) diff --git a/crates/uv/src/commands/project/add.rs b/crates/uv/src/commands/project/add.rs index f255194de..d65866483 100644 --- a/crates/uv/src/commands/project/add.rs +++ b/crates/uv/src/commands/project/add.rs @@ -176,6 +176,7 @@ pub(crate) async fn add( } let client_builder = BaseClientBuilder::new() + .retries_from_env()? .connectivity(network_settings.connectivity) .native_tls(network_settings.native_tls) .allow_insecure_host(network_settings.allow_insecure_host.clone()); @@ -329,6 +330,7 @@ pub(crate) async fn add( .ok(); let client_builder = BaseClientBuilder::new() + .retries_from_env()? .connectivity(network_settings.connectivity) .native_tls(network_settings.native_tls) .keyring(settings.resolver.keyring_provider) diff --git a/crates/uv/src/commands/project/init.rs b/crates/uv/src/commands/project/init.rs index 15fed409e..9ff321a72 100644 --- a/crates/uv/src/commands/project/init.rs +++ b/crates/uv/src/commands/project/init.rs @@ -218,6 +218,7 @@ async fn init_script( warn_user_once!("`--package` is a no-op for Python scripts, which are standalone"); } let client_builder = BaseClientBuilder::new() + .retries_from_env()? .connectivity(network_settings.connectivity) .native_tls(network_settings.native_tls) .allow_insecure_host(network_settings.allow_insecure_host.clone()); @@ -348,6 +349,7 @@ async fn init_project( let reporter = PythonDownloadReporter::single(printer); let client_builder = BaseClientBuilder::new() + .retries_from_env()? .connectivity(network_settings.connectivity) .native_tls(network_settings.native_tls) .allow_insecure_host(network_settings.allow_insecure_host.clone()); diff --git a/crates/uv/src/commands/project/lock.rs b/crates/uv/src/commands/project/lock.rs index 9cbd43ea9..f79557d9e 100644 --- a/crates/uv/src/commands/project/lock.rs +++ b/crates/uv/src/commands/project/lock.rs @@ -99,6 +99,7 @@ pub(crate) async fn lock( let script = match script { Some(ScriptPath::Path(path)) => { let client_builder = BaseClientBuilder::new() + .retries_from_env()? .connectivity(network_settings.connectivity) .native_tls(network_settings.native_tls) .allow_insecure_host(network_settings.allow_insecure_host.clone()); @@ -588,6 +589,7 @@ async fn do_lock( // Initialize the client. let client_builder = BaseClientBuilder::new() + .retries_from_env()? .connectivity(network_settings.connectivity) .native_tls(network_settings.native_tls) .keyring(*keyring_provider) diff --git a/crates/uv/src/commands/project/mod.rs b/crates/uv/src/commands/project/mod.rs index a012e2855..1a0274cac 100644 --- a/crates/uv/src/commands/project/mod.rs +++ b/crates/uv/src/commands/project/mod.rs @@ -690,6 +690,7 @@ impl ScriptInterpreter { } let client_builder = BaseClientBuilder::new() + .retries_from_env()? .connectivity(network_settings.connectivity) .native_tls(network_settings.native_tls) .allow_insecure_host(network_settings.allow_insecure_host.clone()); @@ -946,6 +947,7 @@ impl ProjectInterpreter { } let client_builder = BaseClientBuilder::default() + .retries_from_env()? .connectivity(network_settings.connectivity) .native_tls(network_settings.native_tls) .allow_insecure_host(network_settings.allow_insecure_host.clone()); @@ -1656,6 +1658,8 @@ pub(crate) async fn resolve_names( } = settings; let client_builder = BaseClientBuilder::new() + .retries_from_env() + .map_err(uv_requirements::Error::ClientError)? .connectivity(network_settings.connectivity) .native_tls(network_settings.native_tls) .keyring(*keyring_provider) @@ -1813,6 +1817,7 @@ pub(crate) async fn resolve_environment( } = spec.requirements; let client_builder = BaseClientBuilder::new() + .retries_from_env()? .connectivity(network_settings.connectivity) .native_tls(network_settings.native_tls) .keyring(*keyring_provider) @@ -1984,6 +1989,7 @@ pub(crate) async fn sync_environment( } = settings; let client_builder = BaseClientBuilder::new() + .retries_from_env()? .connectivity(network_settings.connectivity) .native_tls(network_settings.native_tls) .keyring(keyring_provider) @@ -2147,6 +2153,7 @@ pub(crate) async fn update_environment( } = settings; let client_builder = BaseClientBuilder::new() + .retries_from_env()? .connectivity(network_settings.connectivity) .native_tls(network_settings.native_tls) .keyring(*keyring_provider) diff --git a/crates/uv/src/commands/project/run.rs b/crates/uv/src/commands/project/run.rs index f0a46f16a..3eece5432 100644 --- a/crates/uv/src/commands/project/run.rs +++ b/crates/uv/src/commands/project/run.rs @@ -618,6 +618,7 @@ hint: If you are running a script with `{}` in the shebang, you may need to incl // If we're isolating the environment, use an ephemeral virtual environment as the // base environment for the project. let client_builder = BaseClientBuilder::new() + .retries_from_env()? .connectivity(network_settings.connectivity) .native_tls(network_settings.native_tls) .allow_insecure_host(network_settings.allow_insecure_host.clone()); @@ -859,6 +860,7 @@ hint: If you are running a script with `{}` in the shebang, you may need to incl let interpreter = { let client_builder = BaseClientBuilder::new() + .retries_from_env()? .connectivity(network_settings.connectivity) .native_tls(network_settings.native_tls) .allow_insecure_host(network_settings.allow_insecure_host.clone()); @@ -929,6 +931,7 @@ hint: If you are running a script with `{}` in the shebang, you may need to incl None } else { let client_builder = BaseClientBuilder::new() + .retries_from_env()? .connectivity(network_settings.connectivity) .native_tls(network_settings.native_tls) .allow_insecure_host(network_settings.allow_insecure_host.clone()); @@ -1526,6 +1529,7 @@ impl RunCommand { .tempfile()?; let client = BaseClientBuilder::new() + .retries_from_env()? .connectivity(network_settings.connectivity) .native_tls(network_settings.native_tls) .allow_insecure_host(network_settings.allow_insecure_host.clone()) diff --git a/crates/uv/src/commands/project/sync.rs b/crates/uv/src/commands/project/sync.rs index 664eb2a94..a9a161527 100644 --- a/crates/uv/src/commands/project/sync.rs +++ b/crates/uv/src/commands/project/sync.rs @@ -623,6 +623,7 @@ pub(super) async fn do_sync( } = settings; let client_builder = BaseClientBuilder::new() + .retries_from_env()? .connectivity(network_settings.connectivity) .native_tls(network_settings.native_tls) .keyring(keyring_provider) diff --git a/crates/uv/src/commands/project/tree.rs b/crates/uv/src/commands/project/tree.rs index d401940d9..cd1339d3e 100644 --- a/crates/uv/src/commands/project/tree.rs +++ b/crates/uv/src/commands/project/tree.rs @@ -215,6 +215,7 @@ pub(crate) async fn tree( let client = RegistryClientBuilder::new( cache.clone().with_refresh(Refresh::All(Timestamp::now())), ) + .retries_from_env()? .native_tls(network_settings.native_tls) .connectivity(network_settings.connectivity) .allow_insecure_host(network_settings.allow_insecure_host.clone()) diff --git a/crates/uv/src/commands/publish.rs b/crates/uv/src/commands/publish.rs index 63a0f2756..e7f5e00a2 100644 --- a/crates/uv/src/commands/publish.rs +++ b/crates/uv/src/commands/publish.rs @@ -95,6 +95,7 @@ pub(crate) async fn publish( false, ); let registry_client_builder = RegistryClientBuilder::new(cache.clone()) + .retries_from_env()? .native_tls(network_settings.native_tls) .connectivity(network_settings.connectivity) .allow_insecure_host(network_settings.allow_insecure_host.clone()) diff --git a/crates/uv/src/commands/python/install.rs b/crates/uv/src/commands/python/install.rs index 3df0cf91d..8c8387d07 100644 --- a/crates/uv/src/commands/python/install.rs +++ b/crates/uv/src/commands/python/install.rs @@ -376,6 +376,7 @@ pub(crate) async fn install( // Download and unpack the Python versions concurrently let client = uv_client::BaseClientBuilder::new() + .retries_from_env()? .connectivity(network_settings.connectivity) .native_tls(network_settings.native_tls) .allow_insecure_host(network_settings.allow_insecure_host.clone()) diff --git a/crates/uv/src/commands/python/pin.rs b/crates/uv/src/commands/python/pin.rs index 395981751..f4d10cdfa 100644 --- a/crates/uv/src/commands/python/pin.rs +++ b/crates/uv/src/commands/python/pin.rs @@ -107,6 +107,7 @@ pub(crate) async fn pin( } let client_builder = BaseClientBuilder::new() + .retries_from_env()? .connectivity(network_settings.connectivity) .native_tls(network_settings.native_tls) .allow_insecure_host(network_settings.allow_insecure_host.clone()); diff --git a/crates/uv/src/commands/tool/install.rs b/crates/uv/src/commands/tool/install.rs index 5ced211b3..27f18abe4 100644 --- a/crates/uv/src/commands/tool/install.rs +++ b/crates/uv/src/commands/tool/install.rs @@ -66,6 +66,7 @@ pub(crate) async fn install( preview: PreviewMode, ) -> Result { let client_builder = BaseClientBuilder::new() + .retries_from_env()? .connectivity(network_settings.connectivity) .native_tls(network_settings.native_tls) .allow_insecure_host(network_settings.allow_insecure_host.clone()); @@ -97,6 +98,7 @@ pub(crate) async fn install( let workspace_cache = WorkspaceCache::default(); let client_builder = BaseClientBuilder::new() + .retries_from_env()? .connectivity(network_settings.connectivity) .native_tls(network_settings.native_tls) .allow_insecure_host(network_settings.allow_insecure_host.clone()); diff --git a/crates/uv/src/commands/tool/run.rs b/crates/uv/src/commands/tool/run.rs index 2746d65ad..c8297243d 100644 --- a/crates/uv/src/commands/tool/run.rs +++ b/crates/uv/src/commands/tool/run.rs @@ -690,6 +690,7 @@ async fn get_or_create_environment( preview: PreviewMode, ) -> Result<(ToolRequirement, PythonEnvironment), ProjectError> { let client_builder = BaseClientBuilder::new() + .retries_from_env()? .connectivity(network_settings.connectivity) .native_tls(network_settings.native_tls) .allow_insecure_host(network_settings.allow_insecure_host.clone()); diff --git a/crates/uv/src/commands/tool/upgrade.rs b/crates/uv/src/commands/tool/upgrade.rs index 95b7d1e2d..9d2d32a21 100644 --- a/crates/uv/src/commands/tool/upgrade.rs +++ b/crates/uv/src/commands/tool/upgrade.rs @@ -80,6 +80,7 @@ pub(crate) async fn upgrade( let reporter = PythonDownloadReporter::single(printer); let client_builder = BaseClientBuilder::new() + .retries_from_env()? .connectivity(network_settings.connectivity) .native_tls(network_settings.native_tls) .allow_insecure_host(network_settings.allow_insecure_host.clone()); diff --git a/crates/uv/src/commands/venv.rs b/crates/uv/src/commands/venv.rs index 9334d844d..6d6e15758 100644 --- a/crates/uv/src/commands/venv.rs +++ b/crates/uv/src/commands/venv.rs @@ -193,6 +193,9 @@ async fn venv_impl( .unwrap_or(PathBuf::from(".venv")), ); + // TODO(zanieb): We don't use [`BaseClientBuilder::retries_from_env`] here because it's a pain + // to map into a miette diagnostic. We should just remove miette diagnostics here, we're not + // using them elsewhere. let client_builder = BaseClientBuilder::default() .connectivity(network_settings.connectivity) .native_tls(network_settings.native_tls) diff --git a/crates/uv/tests/it/pip_install.rs b/crates/uv/tests/it/pip_install.rs index f231198e4..f142beefa 100644 --- a/crates/uv/tests/it/pip_install.rs +++ b/crates/uv/tests/it/pip_install.rs @@ -499,6 +499,66 @@ fn install_package() { context.assert_command("import flask").success(); } +#[tokio::test] +async fn install_http_retries() { + let context = TestContext::new("3.12"); + + let server = MockServer::start().await; + + // Create a server that always fails, so we can see the number of retries used + Mock::given(method("GET")) + .respond_with(ResponseTemplate::new(503)) + .mount(&server) + .await; + + uv_snapshot!(context.filters(), context.pip_install() + .arg("anyio") + .arg("--index") + .arg(server.uri()) + .env(EnvVars::UV_HTTP_RETRIES, "foo"), @r" + success: false + exit_code: 2 + ----- stdout ----- + + ----- stderr ----- + error: Failed to parse `UV_HTTP_RETRIES` + Caused by: invalid digit found in string + " + ); + + uv_snapshot!(context.filters(), context.pip_install() + .arg("anyio") + .arg("--index") + .arg(server.uri()) + .env(EnvVars::UV_HTTP_RETRIES, "999999999999"), @r" + success: false + exit_code: 2 + ----- stdout ----- + + ----- stderr ----- + error: Failed to parse `UV_HTTP_RETRIES` + Caused by: number too large to fit in target type + " + ); + + uv_snapshot!(context.filters(), context.pip_install() + .arg("anyio") + .arg("--index") + .arg(server.uri()) + .env(EnvVars::UV_HTTP_RETRIES, "5") + .env(EnvVars::UV_TEST_NO_HTTP_RETRY_DELAY, "true"), @r" + success: false + exit_code: 2 + ----- stdout ----- + + ----- stderr ----- + error: Request failed after 5 retries + Caused by: Failed to fetch: `http://[LOCALHOST]/anyio/` + Caused by: HTTP status server error (503 Service Unavailable) for url (http://[LOCALHOST]/anyio/) + " + ); +} + /// Install a package from a `requirements.txt` into a virtual environment. #[test] fn install_requirements_txt() -> Result<()> { diff --git a/docs/reference/environment.md b/docs/reference/environment.md index 61889ddb3..bf8bf29ec 100644 --- a/docs/reference/environment.md +++ b/docs/reference/environment.md @@ -102,6 +102,10 @@ Equivalent to the `--token` argument for self update. A GitHub token for authent Enables fetching files stored in Git LFS when installing a package from a Git repository. +### `UV_HTTP_RETRIES` + +The number of retries for HTTP requests. (default: 3) + ### `UV_HTTP_TIMEOUT` Timeout (in seconds) for HTTP requests. (default: 30 s) @@ -416,6 +420,10 @@ WARNING: `UV_SYSTEM_PYTHON=true` is intended for use in continuous integration ( or containerized environments and should be used with caution, as modifying the system Python can lead to unexpected behavior. +### `UV_TEST_NO_HTTP_RETRY_DELAY` + +Used to disable delay for HTTP retries in tests. + ### `UV_TOOL_BIN_DIR` Specifies the "bin" directory for installing tool executables. From 567468ce72a3c7c1644cb061f9ed0d396118b2fb Mon Sep 17 00:00:00 2001 From: Ivan Smirnov Date: Fri, 11 Jul 2025 16:01:54 +0100 Subject: [PATCH 009/130] More efficient cache-key globbing + support parent paths in globs (#13469) ## Summary (Related PR: #13438 - would be nice to have it merged as well since it touches on the same globwalker code) There's a few issues with `cache-key` globs, which this PR attempts to address: - As of the current state, parent or absolute paths are not allowed, which is not obvious and is not documented. E.g., cache-key paths of the form `{file = "../dep/**"}` will be essentially ignored. - Absolute glob patterns also don't work (funnily enough, there's logic in `globwalk` itself that attempts to address it in [`globwalk::glob_builder()`](https://github.com/Gilnaa/globwalk/blob/8973fa2bc560be54c91448131238fa50d56ee121/src/lib.rs#L415), which serves as inspiration to some parts of this PR). - The reason for parent paths being ignored is the way globwalker is currently being triggered in `uv-cache-info`: the base directory is being walked over completely and each entry is then being matched to one of the provided match patterns. - This may also end up being very inefficient if you have a huge root folder with thousands of files: if your match patterns are `a/b/*.rs` and `a/c/*.py` then instead of walking over the root directory, you can just walk over `a/b` and `a/c` and match the relevant patterns there. - Why supporting parent paths may be important to the point of being a blocker: in large codebases with python projects depending on other local non-python projects (e.g. rust crates), cache-keys can be very useful to track dependency on the source code of the latter (e.g. `cache-keys = [{ file = "../../crates/some-dep/**" }]`. - TLDR: parent/absolute cache-key globs don't work, glob walk can be slow. ## Solution - In this PR, user-provided glob patterns are first clustered (LCP-style) into pattern groups with longest common path prefix; each of these groups can then be walked over separately. - Pattern groups do not overlap, so we would never walk over the same directory twice (unless there's symlinks pointing to same folders). - Paths are not canonicalized nor virtually normalized (which is impossible on Unix without FS access), so the method is symlink-safe (i.e. we don't treat `a/b/..` as `a`) and should work fine with #13438. - Because of LCP logic, the minimal amount of directory space will be traversed to cover all patterns. - Absolute glob patterns will now work. - Parent-relative glob patterns will now work. - Glob walking will be more efficient in some cases. ## Possible improvements - Efficiency can be further greatly improved if we limit max depth for globwalk. Currently, a simple ".toml" will deep-traverse the whole folder. Essentially, max depth can be always set to either N or infinity. If a pattern at a pivot node contains `**`, we collect all children nodes from the subtree into the same group and don't limit max depth; otherwise, we set max depth to the length of the glob pattern. This wouldn't change correctness though and can we done separately if needed. - If this is considered important enough, docs can be updated to indicate that parent and absolute globs are supported (and symlinks are resolved, if the relevant PR is algo merged in). ## Test Plan - Glob splitting and clustering tests are included in the PR. - Relative and absolute glob cache-keys were tested in an actual codebase. --- crates/uv-cache-info/src/cache_info.rs | 50 ++-- crates/uv-cache-info/src/glob.rs | 318 +++++++++++++++++++++++++ crates/uv-cache-info/src/lib.rs | 1 + 3 files changed, 347 insertions(+), 22 deletions(-) create mode 100644 crates/uv-cache-info/src/glob.rs diff --git a/crates/uv-cache-info/src/cache_info.rs b/crates/uv-cache-info/src/cache_info.rs index ce98cc513..27a98ab54 100644 --- a/crates/uv-cache-info/src/cache_info.rs +++ b/crates/uv-cache-info/src/cache_info.rs @@ -7,6 +7,7 @@ use serde::Deserialize; use tracing::{debug, warn}; use crate::git_info::{Commit, Tags}; +use crate::glob::cluster_globs; use crate::timestamp::Timestamp; #[derive(Debug, thiserror::Error)] @@ -212,34 +213,39 @@ impl CacheInfo { } } - // If we have any globs, process them in a single pass. + // If we have any globs, first cluster them using LCP and then do a single pass on each group. if !globs.is_empty() { - let walker = globwalk::GlobWalkerBuilder::from_patterns(directory, &globs) + for (glob_base, glob_patterns) in cluster_globs(&globs) { + let walker = globwalk::GlobWalkerBuilder::from_patterns( + directory.join(glob_base), + &glob_patterns, + ) .file_type(globwalk::FileType::FILE | globwalk::FileType::SYMLINK) .build()?; - for entry in walker { - let entry = match entry { - Ok(entry) => entry, - Err(err) => { - warn!("Failed to read glob entry: {err}"); + for entry in walker { + let entry = match entry { + Ok(entry) => entry, + Err(err) => { + warn!("Failed to read glob entry: {err}"); + continue; + } + }; + let metadata = match entry.metadata() { + Ok(metadata) => metadata, + Err(err) => { + warn!("Failed to read metadata for glob entry: {err}"); + continue; + } + }; + if !metadata.is_file() { + warn!( + "Expected file for cache key, but found directory: `{}`", + entry.path().display() + ); continue; } - }; - let metadata = match entry.metadata() { - Ok(metadata) => metadata, - Err(err) => { - warn!("Failed to read metadata for glob entry: {err}"); - continue; - } - }; - if !metadata.is_file() { - warn!( - "Expected file for cache key, but found directory: `{}`", - entry.path().display() - ); - continue; + timestamp = max(timestamp, Some(Timestamp::from_metadata(&metadata))); } - timestamp = max(timestamp, Some(Timestamp::from_metadata(&metadata))); } } diff --git a/crates/uv-cache-info/src/glob.rs b/crates/uv-cache-info/src/glob.rs new file mode 100644 index 000000000..e9c85897f --- /dev/null +++ b/crates/uv-cache-info/src/glob.rs @@ -0,0 +1,318 @@ +use std::{ + collections::BTreeMap, + path::{Component, Components, Path, PathBuf}, +}; + +/// Check if a component of the path looks like it may be a glob pattern. +/// +/// Note: this function is being used when splitting a glob pattern into a long possible +/// base and the glob remainder (scanning through components until we hit the first component +/// for which this function returns true). It is acceptable for this function to return +/// false positives (e.g. patterns like 'foo[bar' or 'foo{bar') in which case correctness +/// will not be affected but efficiency might be (because we'll traverse more than we should), +/// however it should not return false negatives. +fn is_glob_like(part: Component) -> bool { + matches!(part, Component::Normal(_)) + && part.as_os_str().to_str().is_some_and(|part| { + ["*", "{", "}", "?", "[", "]"] + .into_iter() + .any(|c| part.contains(c)) + }) +} + +#[derive(Debug, Default, Clone, PartialEq, Eq)] +struct GlobParts { + base: PathBuf, + pattern: PathBuf, +} + +/// Split a glob into longest possible base + shortest possible glob pattern. +fn split_glob(pattern: impl AsRef) -> GlobParts { + let pattern: &Path = pattern.as_ref().as_ref(); + + let mut glob = GlobParts::default(); + let mut globbing = false; + let mut last = None; + + for part in pattern.components() { + if let Some(last) = last { + if last != Component::CurDir { + if globbing { + glob.pattern.push(last); + } else { + glob.base.push(last); + } + } + } + if !globbing { + globbing = is_glob_like(part); + } + // we don't know if this part is the last one, defer handling it by one iteration + last = Some(part); + } + + if let Some(last) = last { + // defer handling the last component to prevent draining entire pattern into base + if globbing || matches!(last, Component::Normal(_)) { + glob.pattern.push(last); + } else { + glob.base.push(last); + } + } + glob +} + +/// Classic trie with edges being path components and values being glob patterns. +#[derive(Default)] +struct Trie<'a> { + children: BTreeMap, Trie<'a>>, + patterns: Vec<&'a Path>, +} + +impl<'a> Trie<'a> { + fn insert(&mut self, mut components: Components<'a>, pattern: &'a Path) { + if let Some(part) = components.next() { + self.children + .entry(part) + .or_default() + .insert(components, pattern); + } else { + self.patterns.push(pattern); + } + } + + #[allow(clippy::needless_pass_by_value)] + fn collect_patterns( + &self, + pattern_prefix: PathBuf, + group_prefix: PathBuf, + patterns: &mut Vec, + groups: &mut Vec<(PathBuf, Vec)>, + ) { + // collect all patterns beneath and including this node + for pattern in &self.patterns { + patterns.push(pattern_prefix.join(pattern)); + } + for (part, child) in &self.children { + if let Component::Normal(_) = part { + // for normal components, collect all descendant patterns ('normal' edges only) + child.collect_patterns( + pattern_prefix.join(part), + group_prefix.join(part), + patterns, + groups, + ); + } else { + // for non-normal component edges, kick off separate group collection at this node + child.collect_groups(group_prefix.join(part), groups); + } + } + } + + #[allow(clippy::needless_pass_by_value)] + fn collect_groups(&self, prefix: PathBuf, groups: &mut Vec<(PathBuf, Vec)>) { + // LCP-style grouping of patterns + if self.patterns.is_empty() { + // no patterns in this node; child nodes can form independent groups + for (part, child) in &self.children { + child.collect_groups(prefix.join(part), groups); + } + } else { + // pivot point, we've hit a pattern node; we have to stop here and form a group + let mut group = Vec::new(); + self.collect_patterns(PathBuf::new(), prefix.clone(), &mut group, groups); + groups.push((prefix, group)); + } + } +} + +/// Given a collection of globs, cluster them into (base, globs) groups so that: +/// - base doesn't contain any glob symbols +/// - each directory would only be walked at most once +/// - base of each group is the longest common prefix of globs in the group +pub(crate) fn cluster_globs(patterns: &[impl AsRef]) -> Vec<(PathBuf, Vec)> { + // split all globs into base/pattern + let globs: Vec<_> = patterns.iter().map(split_glob).collect(); + + // construct a path trie out of all split globs + let mut trie = Trie::default(); + for glob in &globs { + trie.insert(glob.base.components(), &glob.pattern); + } + + // run LCP-style aggregation of patterns in the trie into groups + let mut groups = Vec::new(); + trie.collect_groups(PathBuf::new(), &mut groups); + + // finally, convert resulting patterns to strings + groups + .into_iter() + .map(|(base, patterns)| { + ( + base, + patterns + .iter() + // NOTE: this unwrap is ok because input patterns are valid utf-8 + .map(|p| p.to_str().unwrap().to_owned()) + .collect(), + ) + }) + .collect() +} + +#[cfg(test)] +mod tests { + use super::{GlobParts, cluster_globs, split_glob}; + + fn windowsify(path: &str) -> String { + if cfg!(windows) { + path.replace('/', "\\") + } else { + path.to_owned() + } + } + + #[test] + fn test_split_glob() { + #[track_caller] + fn check(input: &str, base: &str, pattern: &str) { + let result = split_glob(input); + let expected = GlobParts { + base: base.into(), + pattern: pattern.into(), + }; + assert_eq!(result, expected, "{input:?} != {base:?} + {pattern:?}"); + } + + check("", "", ""); + check("a", "", "a"); + check("a/b", "a", "b"); + check("a/b/", "a", "b"); + check("a/.//b/", "a", "b"); + check("./a/b/c", "a/b", "c"); + check("c/d/*", "c/d", "*"); + check("c/d/*/../*", "c/d", "*/../*"); + check("a/?b/c", "a", "?b/c"); + check("/a/b/*", "/a/b", "*"); + check("../x/*", "../x", "*"); + check("a/{b,c}/d", "a", "{b,c}/d"); + check("a/[bc]/d", "a", "[bc]/d"); + check("*", "", "*"); + check("*/*", "", "*/*"); + check("..", "..", ""); + check("/", "/", ""); + } + + #[test] + fn test_cluster_globs() { + #[track_caller] + fn check(input: &[&str], expected: &[(&str, &[&str])]) { + let input = input.iter().map(|s| windowsify(s)).collect::>(); + + let mut result_sorted = cluster_globs(&input); + for (_, patterns) in &mut result_sorted { + patterns.sort_unstable(); + } + result_sorted.sort_unstable(); + + let mut expected_sorted = Vec::new(); + for (base, patterns) in expected { + let mut patterns_sorted = Vec::new(); + for pattern in *patterns { + patterns_sorted.push(windowsify(pattern)); + } + patterns_sorted.sort_unstable(); + expected_sorted.push((windowsify(base).into(), patterns_sorted)); + } + expected_sorted.sort_unstable(); + + assert_eq!( + result_sorted, expected_sorted, + "{input:?} != {expected_sorted:?} (got: {result_sorted:?})" + ); + } + + check(&["a/b/*", "a/c/*"], &[("a/b", &["*"]), ("a/c", &["*"])]); + check(&["./a/b/*", "a/c/*"], &[("a/b", &["*"]), ("a/c", &["*"])]); + check(&["/a/b/*", "/a/c/*"], &[("/a/b", &["*"]), ("/a/c", &["*"])]); + check( + &["../a/b/*", "../a/c/*"], + &[("../a/b", &["*"]), ("../a/c", &["*"])], + ); + check(&["x/*", "y/*"], &[("x", &["*"]), ("y", &["*"])]); + check(&[], &[]); + check( + &["./*", "a/*", "../foo/*.png"], + &[("", &["*", "a/*"]), ("../foo", &["*.png"])], + ); + check( + &[ + "?", + "/foo/?", + "/foo/bar/*", + "../bar/*.png", + "../bar/../baz/*.jpg", + ], + &[ + ("", &["?"]), + ("/foo", &["?", "bar/*"]), + ("../bar", &["*.png"]), + ("../bar/../baz", &["*.jpg"]), + ], + ); + check(&["/abs/path/*"], &[("/abs/path", &["*"])]); + check(&["/abs/*", "rel/*"], &[("/abs", &["*"]), ("rel", &["*"])]); + check(&["a/{b,c}/*", "a/d?/*"], &[("a", &["{b,c}/*", "d?/*"])]); + check( + &[ + "../shared/a/[abc].png", + "../shared/a/b/*", + "../shared/b/c/?x/d", + "docs/important/*.{doc,xls}", + "docs/important/very/*", + ], + &[ + ("../shared/a", &["[abc].png", "b/*"]), + ("../shared/b/c", &["?x/d"]), + ("docs/important", &["*.{doc,xls}", "very/*"]), + ], + ); + check(&["file.txt"], &[("", &["file.txt"])]); + check(&["/"], &[("/", &[""])]); + check(&[".."], &[("..", &[""])]); + check( + &["file1.txt", "file2.txt"], + &[("", &["file1.txt", "file2.txt"])], + ); + check( + &["a/file1.txt", "a/file2.txt"], + &[("a", &["file1.txt", "file2.txt"])], + ); + check( + &["*", "a/b/*", "a/../c/*.jpg", "a/../c/*.png", "/a/*", "/b/*"], + &[ + ("", &["*", "a/b/*"]), + ("a/../c", &["*.jpg", "*.png"]), + ("/a", &["*"]), + ("/b", &["*"]), + ], + ); + + if cfg!(windows) { + check( + &[ + r"\\foo\bar\shared/a/[abc].png", + r"\\foo\bar\shared/a/b/*", + r"\\foo\bar/shared/b/c/?x/d", + r"D:\docs\important/*.{doc,xls}", + r"D:\docs/important/very/*", + ], + &[ + (r"\\foo\bar\shared\a", &["[abc].png", r"b\*"]), + (r"\\foo\bar\shared\b\c", &[r"?x\d"]), + (r"D:\docs\important", &["*.{doc,xls}", r"very\*"]), + ], + ); + } + } +} diff --git a/crates/uv-cache-info/src/lib.rs b/crates/uv-cache-info/src/lib.rs index 286411f68..092d40652 100644 --- a/crates/uv-cache-info/src/lib.rs +++ b/crates/uv-cache-info/src/lib.rs @@ -3,4 +3,5 @@ pub use crate::timestamp::*; mod cache_info; mod git_info; +mod glob; mod timestamp; From 088a436efe34ec517d5762545a753ab9424278d8 Mon Sep 17 00:00:00 2001 From: Zanie Blue Date: Fri, 11 Jul 2025 11:45:45 -0500 Subject: [PATCH 010/130] Move `run_to_completion` utility to `crate::child` instead of `crate::commands::run` (#14566) This was really confusing as everything else in the `commands` module is a command --- crates/uv/src/{commands/run.rs => child.rs} | 0 crates/uv/src/commands/mod.rs | 1 - crates/uv/src/commands/project/run.rs | 2 +- crates/uv/src/commands/tool/run.rs | 2 +- crates/uv/src/lib.rs | 1 + 5 files changed, 3 insertions(+), 3 deletions(-) rename crates/uv/src/{commands/run.rs => child.rs} (100%) diff --git a/crates/uv/src/commands/run.rs b/crates/uv/src/child.rs similarity index 100% rename from crates/uv/src/commands/run.rs rename to crates/uv/src/child.rs diff --git a/crates/uv/src/commands/mod.rs b/crates/uv/src/commands/mod.rs index 0203d4dd5..d1e647363 100644 --- a/crates/uv/src/commands/mod.rs +++ b/crates/uv/src/commands/mod.rs @@ -72,7 +72,6 @@ mod project; mod publish; mod python; pub(crate) mod reporters; -mod run; #[cfg(feature = "self-update")] mod self_update; mod tool; diff --git a/crates/uv/src/commands/project/run.rs b/crates/uv/src/commands/project/run.rs index 3eece5432..63850f563 100644 --- a/crates/uv/src/commands/project/run.rs +++ b/crates/uv/src/commands/project/run.rs @@ -40,6 +40,7 @@ use uv_static::EnvVars; use uv_warnings::warn_user; use uv_workspace::{DiscoveryOptions, VirtualProject, Workspace, WorkspaceCache, WorkspaceError}; +use crate::child::run_to_completion; use crate::commands::pip::loggers::{ DefaultInstallLogger, DefaultResolveLogger, SummaryInstallLogger, SummaryResolveLogger, }; @@ -55,7 +56,6 @@ use crate::commands::project::{ validate_project_requires_python, }; use crate::commands::reporters::PythonDownloadReporter; -use crate::commands::run::run_to_completion; use crate::commands::{ExitStatus, diagnostics, project}; use crate::printer::Printer; use crate::settings::{NetworkSettings, ResolverInstallerSettings}; diff --git a/crates/uv/src/commands/tool/run.rs b/crates/uv/src/commands/tool/run.rs index c8297243d..f6b79774c 100644 --- a/crates/uv/src/commands/tool/run.rs +++ b/crates/uv/src/commands/tool/run.rs @@ -42,6 +42,7 @@ use uv_warnings::warn_user; use uv_warnings::warn_user_once; use uv_workspace::WorkspaceCache; +use crate::child::run_to_completion; use crate::commands::ExitStatus; use crate::commands::pip::loggers::{ DefaultInstallLogger, DefaultResolveLogger, SummaryInstallLogger, SummaryResolveLogger, @@ -51,7 +52,6 @@ use crate::commands::project::{ EnvironmentSpecification, PlatformState, ProjectError, resolve_names, }; use crate::commands::reporters::PythonDownloadReporter; -use crate::commands::run::run_to_completion; use crate::commands::tool::common::{matching_packages, refine_interpreter}; use crate::commands::tool::{Target, ToolRequest}; use crate::commands::{diagnostics, project::environment::CachedEnvironment}; diff --git a/crates/uv/src/lib.rs b/crates/uv/src/lib.rs index 261dd8d7c..84d889599 100644 --- a/crates/uv/src/lib.rs +++ b/crates/uv/src/lib.rs @@ -52,6 +52,7 @@ use crate::settings::{ PublishSettings, }; +pub(crate) mod child; pub(crate) mod commands; pub(crate) mod logging; pub(crate) mod printer; From a9e21f7f6b26e4ad27718a35efb53d7cda490e69 Mon Sep 17 00:00:00 2001 From: dmitry-bychkov Date: Fri, 11 Jul 2025 20:05:15 +0300 Subject: [PATCH 011/130] Update CONTRIBUTING.md with instructions to format markdown files (#14246) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## Summary Current documentation requires contributors to have Node.js/npm installed locally to format Markdown files. This might be problematic for users who don't work with JavaScript ecosystem or users who want to avoid toolchain setup. This change adds docker-based alternative: ``` docker run --rm -v .:/src/ -w /src/ node:alpine npx prettier --prose-wrap always --write "**/*.md" ``` Which mounts current working directory into /src/ inside of a container and also sets working directory (-w) to /src/ so prettier loads .editorconfig. ## Test Plan Both commands should produce the same output
    Native Prettier ```console ➜ uv git:(docs/contributing-md-formatting) npx prettier --prose-wrap always --write "**/*.md" .github/PULL_REQUEST_TEMPLATE.md 28ms (unchanged) BENCHMARKS.md 30ms (unchanged) changelogs/0.1.x.md 264ms (unchanged) changelogs/0.2.x.md 223ms (unchanged) changelogs/0.3.x.md 29ms (unchanged) changelogs/0.4.x.md 126ms (unchanged) changelogs/0.5.x.md 153ms (unchanged) changelogs/0.6.x.md 77ms (unchanged) CONTRIBUTING.md 9ms (unchanged) crates/README.md 4ms (unchanged) crates/uv-build/README.md 1ms (unchanged) crates/uv-client/README.md 1ms (unchanged) crates/uv-globfilter/README.md 3ms (unchanged) crates/uv-pep440/Readme.md 6ms (unchanged) crates/uv-pep508/Readme.md 3ms (unchanged) crates/uv-python/python/packaging/README.md 1ms (unchanged) crates/uv-trampoline/README.md 14ms (unchanged) crates/uv-virtualenv/README.md 1ms (unchanged) docs/concepts/authentication.md 10ms (unchanged) docs/concepts/build-backend.md 11ms (unchanged) docs/concepts/cache.md 17ms (unchanged) docs/concepts/configuration-files.md 9ms (unchanged) docs/concepts/index.md 2ms (unchanged) docs/concepts/indexes.md 22ms (unchanged) docs/concepts/projects/build.md 4ms (unchanged) docs/concepts/projects/config.md 25ms (unchanged) docs/concepts/projects/dependencies.md 29ms (unchanged) docs/concepts/projects/index.md 2ms (unchanged) docs/concepts/projects/init.md 10ms (unchanged) docs/concepts/projects/layout.md 10ms (unchanged) docs/concepts/projects/run.md 4ms (unchanged) docs/concepts/projects/sync.md 11ms (unchanged) docs/concepts/projects/workspaces.md 12ms (unchanged) docs/concepts/python-versions.md 26ms (unchanged) docs/concepts/resolution.md 40ms (unchanged) docs/concepts/tools.md 19ms (unchanged) docs/getting-started/features.md 8ms (unchanged) docs/getting-started/first-steps.md 2ms (unchanged) docs/getting-started/help.md 8ms (unchanged) docs/getting-started/index.md 2ms (unchanged) docs/getting-started/installation.md 8ms (unchanged) docs/guides/index.md 2ms (unchanged) docs/guides/install-python.md 31ms (unchanged) docs/guides/integration/alternative-indexes.md 21ms (unchanged) docs/guides/integration/aws-lambda.md 49ms (unchanged) docs/guides/integration/dependency-bots.md 16ms (unchanged) docs/guides/integration/docker.md 37ms (unchanged) docs/guides/integration/fastapi.md 8ms (unchanged) docs/guides/integration/github.md 36ms (unchanged) docs/guides/integration/index.md 4ms (unchanged) docs/guides/integration/jupyter.md 17ms (unchanged) docs/guides/integration/marimo.md 11ms (unchanged) docs/guides/integration/pre-commit.md 27ms (unchanged) docs/guides/integration/pytorch.md 12ms (unchanged) docs/guides/package.md 5ms (unchanged) docs/guides/projects.md 12ms (unchanged) docs/guides/scripts.md 19ms (unchanged) docs/guides/tools.md 8ms (unchanged) docs/index.md 7ms (unchanged) docs/pip/compatibility.md 44ms (unchanged) docs/pip/compile.md 13ms (unchanged) docs/pip/dependencies.md 3ms (unchanged) docs/pip/environments.md 10ms (unchanged) docs/pip/index.md 2ms (unchanged) docs/pip/inspection.md 1ms (unchanged) docs/pip/packages.md 3ms (unchanged) docs/reference/benchmarks.md 3ms (unchanged) docs/reference/index.md 3ms (unchanged) docs/reference/installer.md 2ms (unchanged) docs/reference/policies/index.md 2ms (unchanged) docs/reference/policies/license.md 2ms (unchanged) docs/reference/policies/platforms.md 4ms (unchanged) docs/reference/policies/versioning.md 2ms (unchanged) docs/reference/resolver-internals.md 19ms (unchanged) docs/reference/troubleshooting/build-failures.md 13ms (unchanged) docs/reference/troubleshooting/index.md 1ms (unchanged) docs/reference/troubleshooting/reproducible-examples.md 7ms (unchanged) PIP_COMPATIBILITY.md 1ms (unchanged) README.md 10ms (unchanged) scripts/benchmark/README.md 1ms (unchanged) scripts/packages/built-by-uv/README.md 1ms (unchanged) scripts/packages/dependent_locals/first_local/README.md 0ms (unchanged) scripts/packages/dependent_locals/second_local/README.md 0ms (unchanged) scripts/packages/hatchling_editable/README.md 0ms (unchanged) scripts/packages/README.md 1ms (unchanged) scripts/packages/root_editable/README.md 0ms (unchanged) scripts/workspaces/albatross-virtual-workspace/packages/Unrelated.md 1ms (unchanged) SECURITY.md 2ms (unchanged) STYLE.md 9ms (unchanged) ➜ uv git:(docs/contributing-md-formatting) git status On branch docs/contributing-md-formatting nothing to commit, working tree clean ➜ uv git:(docs/contributing-md-formatting) ```
    Docker based ```console ➜ uv git:(docs/contributing-md-formatting) sudo docker run --rm -v .:/src/ -w /src/ node:alpine npx prettier --prose-wrap always --write "**/*.md" npm warn exec The following package was not found and will be installed: prettier@3.6.0 .github/PULL_REQUEST_TEMPLATE.md 54ms (unchanged) BENCHMARKS.md 41ms (unchanged) changelogs/0.1.x.md 297ms (unchanged) changelogs/0.2.x.md 306ms (unchanged) changelogs/0.3.x.md 50ms (unchanged) changelogs/0.4.x.md 137ms (unchanged) changelogs/0.5.x.md 217ms (unchanged) changelogs/0.6.x.md 114ms (unchanged) CONTRIBUTING.md 12ms (unchanged) crates/README.md 8ms (unchanged) crates/uv-build/README.md 2ms (unchanged) crates/uv-client/README.md 2ms (unchanged) crates/uv-globfilter/README.md 6ms (unchanged) crates/uv-pep440/Readme.md 8ms (unchanged) crates/uv-pep508/Readme.md 5ms (unchanged) crates/uv-python/python/packaging/README.md 2ms (unchanged) crates/uv-trampoline/README.md 17ms (unchanged) crates/uv-virtualenv/README.md 2ms (unchanged) docs/concepts/authentication.md 20ms (unchanged) docs/concepts/build-backend.md 20ms (unchanged) docs/concepts/cache.md 35ms (unchanged) docs/concepts/configuration-files.md 11ms (unchanged) docs/concepts/index.md 3ms (unchanged) docs/concepts/indexes.md 24ms (unchanged) docs/concepts/projects/build.md 5ms (unchanged) docs/concepts/projects/config.md 25ms (unchanged) docs/concepts/projects/dependencies.md 38ms (unchanged) docs/concepts/projects/index.md 3ms (unchanged) docs/concepts/projects/init.md 15ms (unchanged) docs/concepts/projects/layout.md 11ms (unchanged) docs/concepts/projects/run.md 7ms (unchanged) docs/concepts/projects/sync.md 15ms (unchanged) docs/concepts/projects/workspaces.md 15ms (unchanged) docs/concepts/python-versions.md 30ms (unchanged) docs/concepts/resolution.md 52ms (unchanged) docs/concepts/tools.md 20ms (unchanged) docs/getting-started/features.md 10ms (unchanged) docs/getting-started/first-steps.md 2ms (unchanged) docs/getting-started/help.md 5ms (unchanged) docs/getting-started/index.md 3ms (unchanged) docs/getting-started/installation.md 8ms (unchanged) docs/guides/index.md 2ms (unchanged) docs/guides/install-python.md 49ms (unchanged) docs/guides/integration/alternative-indexes.md 29ms (unchanged) docs/guides/integration/aws-lambda.md 102ms (unchanged) docs/guides/integration/dependency-bots.md 20ms (unchanged) docs/guides/integration/docker.md 38ms (unchanged) docs/guides/integration/fastapi.md 7ms (unchanged) docs/guides/integration/github.md 46ms (unchanged) docs/guides/integration/index.md 3ms (unchanged) docs/guides/integration/jupyter.md 16ms (unchanged) docs/guides/integration/marimo.md 6ms (unchanged) docs/guides/integration/pre-commit.md 14ms (unchanged) docs/guides/integration/pytorch.md 18ms (unchanged) docs/guides/package.md 9ms (unchanged) docs/guides/projects.md 11ms (unchanged) docs/guides/scripts.md 13ms (unchanged) docs/guides/tools.md 13ms (unchanged) docs/index.md 11ms (unchanged) docs/pip/compatibility.md 40ms (unchanged) docs/pip/compile.md 12ms (unchanged) docs/pip/dependencies.md 4ms (unchanged) docs/pip/environments.md 10ms (unchanged) docs/pip/index.md 4ms (unchanged) docs/pip/inspection.md 2ms (unchanged) docs/pip/packages.md 5ms (unchanged) docs/reference/benchmarks.md 2ms (unchanged) docs/reference/index.md 3ms (unchanged) docs/reference/installer.md 3ms (unchanged) docs/reference/policies/index.md 1ms (unchanged) docs/reference/policies/license.md 3ms (unchanged) docs/reference/policies/platforms.md 5ms (unchanged) docs/reference/policies/versioning.md 4ms (unchanged) docs/reference/resolver-internals.md 29ms (unchanged) docs/reference/troubleshooting/build-failures.md 19ms (unchanged) docs/reference/troubleshooting/index.md 2ms (unchanged) docs/reference/troubleshooting/reproducible-examples.md 9ms (unchanged) PIP_COMPATIBILITY.md 1ms (unchanged) README.md 15ms (unchanged) scripts/benchmark/README.md 1ms (unchanged) scripts/packages/built-by-uv/README.md 1ms (unchanged) scripts/packages/dependent_locals/first_local/README.md 0ms (unchanged) scripts/packages/dependent_locals/second_local/README.md 0ms (unchanged) scripts/packages/hatchling_editable/README.md 1ms (unchanged) scripts/packages/README.md 1ms (unchanged) scripts/packages/root_editable/README.md 0ms (unchanged) scripts/workspaces/albatross-virtual-workspace/packages/Unrelated.md 2ms (unchanged) SECURITY.md 3ms (unchanged) STYLE.md 16ms (unchanged) npm notice npm notice New minor version of npm available! 11.3.0 -> 11.4.2 npm notice Changelog: https://github.com/npm/cli/releases/tag/v11.4.2 npm notice To update run: npm install -g npm@11.4.2 npm notice ➜ uv git:(docs/contributing-md-formatting) git status On branch docs/contributing-md-formatting nothing to commit, working tree clean ➜ uv git:(docs/contributing-md-formatting) ```
    Co-authored-by: Dmitry Bychkov --- CONTRIBUTING.md | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 14b5197fe..f7be958a4 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -165,6 +165,13 @@ After making changes to the documentation, format the markdown files with: npx prettier --prose-wrap always --write "**/*.md" ``` +Note that the command above requires Node.js and npm to be installed on your system. As an +alternative, you can run this command using Docker: + +```console +$ docker run --rm -v .:/src/ -w /src/ node:alpine npx prettier --prose-wrap always --write "**/*.md" +``` + ## Releases Releases can only be performed by Astral team members. From 081e2010df63c561bdf56f5d6e34b102dd035d94 Mon Sep 17 00:00:00 2001 From: Zanie Blue Date: Fri, 11 Jul 2025 12:13:35 -0500 Subject: [PATCH 012/130] Isolate `install_git_public_rate_limited...` test from `UV_HTTP_RETRIES` (#14567) Blocking https://github.com/astral-sh/uv/pull/14565 This also makes the test 5x faster, from 5s to 1s. --- crates/uv/tests/it/edit.rs | 4 +++- crates/uv/tests/it/pip_install.rs | 4 +++- 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/crates/uv/tests/it/edit.rs b/crates/uv/tests/it/edit.rs index c1a74541f..18170cff9 100644 --- a/crates/uv/tests/it/edit.rs +++ b/crates/uv/tests/it/edit.rs @@ -561,7 +561,9 @@ async fn add_git_private_rate_limited_by_github_rest_api_429_response() -> Resul uv_snapshot!(context.filters(), context .add() .arg(format!("uv-private-pypackage @ git+https://{token}@github.com/astral-test/uv-private-pypackage")) - .env("UV_GITHUB_FAST_PATH_URL", server.uri()), @r" + .env(EnvVars::UV_GITHUB_FAST_PATH_URL, server.uri()) + .env(EnvVars::UV_TEST_NO_HTTP_RETRY_DELAY, "true") + .env_remove(EnvVars::UV_HTTP_RETRIES), @r" success: true exit_code: 0 ----- stdout ----- diff --git a/crates/uv/tests/it/pip_install.rs b/crates/uv/tests/it/pip_install.rs index f142beefa..bc27228c7 100644 --- a/crates/uv/tests/it/pip_install.rs +++ b/crates/uv/tests/it/pip_install.rs @@ -2168,7 +2168,9 @@ async fn install_git_public_rate_limited_by_github_rest_api_429_response() { uv_snapshot!(context.filters(), context .pip_install() .arg("uv-public-pypackage @ git+https://github.com/astral-test/uv-public-pypackage") - .env("UV_GITHUB_FAST_PATH_URL", server.uri()), @r" + .env(EnvVars::UV_GITHUB_FAST_PATH_URL, server.uri()) + .env(EnvVars::UV_TEST_NO_HTTP_RETRY_DELAY, "true") + .env_remove(EnvVars::UV_HTTP_RETRIES), @r" success: true exit_code: 0 ----- stdout ----- From ee35fe34ab90adb99505581daeb63edd5c3f827f Mon Sep 17 00:00:00 2001 From: Zanie Blue Date: Fri, 11 Jul 2025 13:59:47 -0500 Subject: [PATCH 013/130] Increase the number of retries during test runs in CI (#14565) --- .github/workflows/ci.yml | 8 ++++++++ crates/uv/tests/it/edit.rs | 4 +++- crates/uv/tests/it/network.rs | 33 +++++++++++++++++++++++++-------- 3 files changed, 36 insertions(+), 9 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index b0d8e18a3..ba7a4b4d1 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -223,6 +223,9 @@ jobs: tool: cargo-nextest - name: "Cargo test" + env: + # Retry more than default to reduce flakes in CI + UV_HTTP_RETRIES: 5 run: | cargo nextest run \ --features python-patch \ @@ -256,6 +259,9 @@ jobs: tool: cargo-nextest - name: "Cargo test" + env: + # Retry more than default to reduce flakes in CI + UV_HTTP_RETRIES: 5 run: | cargo nextest run \ --no-default-features \ @@ -300,6 +306,8 @@ jobs: - name: "Cargo test" working-directory: ${{ env.UV_WORKSPACE }} env: + # Retry more than default to reduce flakes in CI + UV_HTTP_RETRIES: 5 # Avoid permission errors during concurrent tests # See https://github.com/astral-sh/uv/issues/6940 UV_LINK_MODE: copy diff --git a/crates/uv/tests/it/edit.rs b/crates/uv/tests/it/edit.rs index 18170cff9..ddaed434f 100644 --- a/crates/uv/tests/it/edit.rs +++ b/crates/uv/tests/it/edit.rs @@ -11877,7 +11877,9 @@ async fn add_unexpected_error_code() -> Result<()> { "# })?; - uv_snapshot!(context.filters(), context.add().arg("anyio").arg("--index").arg(server.uri()), @r" + uv_snapshot!(context.filters(), context.add().arg("anyio").arg("--index").arg(server.uri()) + .env_remove(EnvVars::UV_HTTP_RETRIES) + .env(EnvVars::UV_TEST_NO_HTTP_RETRY_DELAY, "true"), @r" success: false exit_code: 2 ----- stdout ----- diff --git a/crates/uv/tests/it/network.rs b/crates/uv/tests/it/network.rs index 1a5805970..a9376e07e 100644 --- a/crates/uv/tests/it/network.rs +++ b/crates/uv/tests/it/network.rs @@ -3,6 +3,7 @@ use std::{env, io}; use assert_fs::fixture::{ChildPath, FileWriteStr, PathChild}; use http::StatusCode; use serde_json::json; +use uv_static::EnvVars; use wiremock::matchers::method; use wiremock::{Mock, MockServer, ResponseTemplate}; @@ -48,7 +49,9 @@ async fn simple_http_500() { .pip_install() .arg("tqdm") .arg("--index-url") - .arg(&mock_server_uri), @r" + .arg(&mock_server_uri) + .env_remove(EnvVars::UV_HTTP_RETRIES) + .env(EnvVars::UV_TEST_NO_HTTP_RETRY_DELAY, "true"), @r" success: false exit_code: 2 ----- stdout ----- @@ -72,7 +75,9 @@ async fn simple_io_err() { .pip_install() .arg("tqdm") .arg("--index-url") - .arg(&mock_server_uri), @r" + .arg(&mock_server_uri) + .env_remove(EnvVars::UV_HTTP_RETRIES) + .env(EnvVars::UV_TEST_NO_HTTP_RETRY_DELAY, "true"), @r" success: false exit_code: 2 ----- stdout ----- @@ -99,7 +104,9 @@ async fn find_links_http_500() { .arg("tqdm") .arg("--no-index") .arg("--find-links") - .arg(&mock_server_uri), @r" + .arg(&mock_server_uri) + .env_remove(EnvVars::UV_HTTP_RETRIES) + .env(EnvVars::UV_TEST_NO_HTTP_RETRY_DELAY, "true"), @r" success: false exit_code: 2 ----- stdout ----- @@ -125,7 +132,9 @@ async fn find_links_io_error() { .arg("tqdm") .arg("--no-index") .arg("--find-links") - .arg(&mock_server_uri), @r" + .arg(&mock_server_uri) + .env_remove(EnvVars::UV_HTTP_RETRIES) + .env(EnvVars::UV_TEST_NO_HTTP_RETRY_DELAY, "true"), @r" success: false exit_code: 2 ----- stdout ----- @@ -154,7 +163,9 @@ async fn direct_url_http_500() { let filters = vec![(mock_server_uri.as_str(), "[SERVER]")]; uv_snapshot!(filters, context .pip_install() - .arg(format!("tqdm @ {tqdm_url}")), @r" + .arg(format!("tqdm @ {tqdm_url}")) + .env_remove(EnvVars::UV_HTTP_RETRIES) + .env(EnvVars::UV_TEST_NO_HTTP_RETRY_DELAY, "true"), @r" success: false exit_code: 1 ----- stdout ----- @@ -180,7 +191,9 @@ async fn direct_url_io_error() { let filters = vec![(mock_server_uri.as_str(), "[SERVER]")]; uv_snapshot!(filters, context .pip_install() - .arg(format!("tqdm @ {tqdm_url}")), @r" + .arg(format!("tqdm @ {tqdm_url}")) + .env_remove(EnvVars::UV_HTTP_RETRIES) + .env(EnvVars::UV_TEST_NO_HTTP_RETRY_DELAY, "true"), @r" success: false exit_code: 1 ----- stdout ----- @@ -239,7 +252,9 @@ async fn python_install_http_500() { .python_install() .arg("cpython-3.10.0-darwin-aarch64-none") .arg("--python-downloads-json-url") - .arg(python_downloads_json.path()), @r" + .arg(python_downloads_json.path()) + .env_remove(EnvVars::UV_HTTP_RETRIES) + .env(EnvVars::UV_TEST_NO_HTTP_RETRY_DELAY, "true"), @r" success: false exit_code: 1 ----- stdout ----- @@ -269,7 +284,9 @@ async fn python_install_io_error() { .python_install() .arg("cpython-3.10.0-darwin-aarch64-none") .arg("--python-downloads-json-url") - .arg(python_downloads_json.path()), @r" + .arg(python_downloads_json.path()) + .env_remove(EnvVars::UV_HTTP_RETRIES) + .env(EnvVars::UV_TEST_NO_HTTP_RETRY_DELAY, "true"), @r" success: false exit_code: 1 ----- stdout ----- From 7ea030a1a854680001205037ae0959c1750365a2 Mon Sep 17 00:00:00 2001 From: Geoffrey Thomas Date: Sat, 12 Jul 2025 12:46:40 -0400 Subject: [PATCH 014/130] Bump Python releases to pick up python-build-standalone 20250712 (#14578) This is primarily a regression fix for missing SQLite extensions (astral-sh/python-build-standalone#694). --- crates/uv-python/download-metadata.json | 936 ++++++++++++------------ 1 file changed, 468 insertions(+), 468 deletions(-) diff --git a/crates/uv-python/download-metadata.json b/crates/uv-python/download-metadata.json index 4e2d98846..8c7ffec4c 100644 --- a/crates/uv-python/download-metadata.json +++ b/crates/uv-python/download-metadata.json @@ -11,8 +11,8 @@ "minor": 14, "patch": 0, "prerelease": "b4", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.14.0b4%2B20250708-aarch64-apple-darwin-install_only_stripped.tar.gz", - "sha256": "7a69c986243f4e7ed70c1a97d4a524253d3fb4f042ae68eb688f9fafe5dbb714", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-aarch64-apple-darwin-install_only_stripped.tar.gz", + "sha256": "94b80254a7e50dd2d82d323a0bffdc59772b2f04b0f0c044bc4d56d696249eb2", "variant": null }, "cpython-3.14.0b4-darwin-x86_64-none": { @@ -27,8 +27,8 @@ "minor": 14, "patch": 0, "prerelease": "b4", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.14.0b4%2B20250708-x86_64-apple-darwin-install_only_stripped.tar.gz", - "sha256": "8c100fe3bfef08b046051c4183c9ca4542317729c466982783fabea996fcb97f", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-x86_64-apple-darwin-install_only_stripped.tar.gz", + "sha256": "2155f60b2a8a1448b2c4852a27887be2e9fe8e910bac1a75b342e44884a191b5", "variant": null }, "cpython-3.14.0b4-linux-aarch64-gnu": { @@ -43,8 +43,8 @@ "minor": 14, "patch": 0, "prerelease": "b4", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.14.0b4%2B20250708-aarch64-unknown-linux-gnu-install_only_stripped.tar.gz", - "sha256": "930e8ecf6c89de145cf49171d98e089af7007752e8e7652c1ea73460fec0d07c", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-aarch64-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "f76fb1a88e722f9cae8b82b9851b736968582527d8a1212ab3b918b2012ce0a6", "variant": null }, "cpython-3.14.0b4-linux-armv7-gnueabi": { @@ -59,8 +59,8 @@ "minor": 14, "patch": 0, "prerelease": "b4", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.14.0b4%2B20250708-armv7-unknown-linux-gnueabi-install_only_stripped.tar.gz", - "sha256": "5b489148c56a0a9772568706cf6c716e14b1d93e52f54d76f71f14783f659d13", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-armv7-unknown-linux-gnueabi-install_only_stripped.tar.gz", + "sha256": "c358e87ac84d228e191a22d2447c60e1cb15e6cbb753c397b0e9b9da9c557ce0", "variant": null }, "cpython-3.14.0b4-linux-armv7-gnueabihf": { @@ -75,8 +75,8 @@ "minor": 14, "patch": 0, "prerelease": "b4", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.14.0b4%2B20250708-armv7-unknown-linux-gnueabihf-install_only_stripped.tar.gz", - "sha256": "2b4474ebc495b64374339acf58d22793f8f55ce1a40e31d61a988af7cf2c8085", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-armv7-unknown-linux-gnueabihf-install_only_stripped.tar.gz", + "sha256": "a426e05b3d8a20dfbda84162ef75ed3590e7137436623b93d136c084d0688690", "variant": null }, "cpython-3.14.0b4-linux-powerpc64le-gnu": { @@ -91,8 +91,8 @@ "minor": 14, "patch": 0, "prerelease": "b4", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.14.0b4%2B20250708-ppc64le-unknown-linux-gnu-install_only_stripped.tar.gz", - "sha256": "abc24237c270f248b5b2990091209a60c23d5bef8476796cf5b0c16c34a24e54", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-ppc64le-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "b835aac7264b64652007f5210369d5fe1b8d1629befbb8d00e40a891cd039f67", "variant": null }, "cpython-3.14.0b4-linux-riscv64-gnu": { @@ -107,8 +107,8 @@ "minor": 14, "patch": 0, "prerelease": "b4", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.14.0b4%2B20250708-riscv64-unknown-linux-gnu-install_only_stripped.tar.gz", - "sha256": "fd25c2de82d3ea004831c543591195f3790c93d5df7f5f1a39b0e5f9e1716039", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-riscv64-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "0ad96a96ae32f5979f2bd9e6992ecf122819ceb06711439c66b9f8a3dc1eaba4", "variant": null }, "cpython-3.14.0b4-linux-s390x-gnu": { @@ -123,8 +123,8 @@ "minor": 14, "patch": 0, "prerelease": "b4", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.14.0b4%2B20250708-s390x-unknown-linux-gnu-install_only_stripped.tar.gz", - "sha256": "35f93fd3336dcfd2612fb2945937221f81af9a65369efb81afa1d89784029e61", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-s390x-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "18763ccce35baeb1960e043f9bd4be3a36a511acc6844b91381532ee5b7c6da8", "variant": null }, "cpython-3.14.0b4-linux-x86_64-gnu": { @@ -139,8 +139,8 @@ "minor": 14, "patch": 0, "prerelease": "b4", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.14.0b4%2B20250708-x86_64-unknown-linux-gnu-install_only_stripped.tar.gz", - "sha256": "a76999ca5b8c6e219750b016870fc85cc395dd992de1d702576d1c831585aa95", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-x86_64-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "3d07868b329c7c9b7ae5a52af35c27d0b20b5a7f6f574a3bedb5836b4bb337d7", "variant": null }, "cpython-3.14.0b4-linux-x86_64-musl": { @@ -155,8 +155,8 @@ "minor": 14, "patch": 0, "prerelease": "b4", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.14.0b4%2B20250708-x86_64-unknown-linux-musl-install_only_stripped.tar.gz", - "sha256": "a8f12323bd6c10f1ecadbe424e64c2429434e59e69314966a422c9a7eb5f13a0", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-x86_64-unknown-linux-musl-install_only_stripped.tar.gz", + "sha256": "82ee7827c1f75a7b5150f731ddf1dc312c7958c741a6746967fb8a5656c85b91", "variant": null }, "cpython-3.14.0b4-linux-x86_64_v2-gnu": { @@ -171,8 +171,8 @@ "minor": 14, "patch": 0, "prerelease": "b4", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.14.0b4%2B20250708-x86_64_v2-unknown-linux-gnu-install_only_stripped.tar.gz", - "sha256": "64649a18cee348ba72b42ec46aa548dca3d79ed37a2abeea17f5b5fea4ad67b4", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-x86_64_v2-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "c96dd14927c89392bd0ff3264e4b7bdfeea76979f544ee30260151c913046396", "variant": null }, "cpython-3.14.0b4-linux-x86_64_v2-musl": { @@ -187,8 +187,8 @@ "minor": 14, "patch": 0, "prerelease": "b4", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.14.0b4%2B20250708-x86_64_v2-unknown-linux-musl-install_only_stripped.tar.gz", - "sha256": "352b97d9c5634787cdfe11b00a4ac83e0a254f70dc2887780fa93b52a8cdbec8", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-x86_64_v2-unknown-linux-musl-install_only_stripped.tar.gz", + "sha256": "ae82acb77c69c506a799bd7022fe9a22508814fe76d0d7e53c1f2f60b5fc77d6", "variant": null }, "cpython-3.14.0b4-linux-x86_64_v3-gnu": { @@ -203,8 +203,8 @@ "minor": 14, "patch": 0, "prerelease": "b4", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.14.0b4%2B20250708-x86_64_v3-unknown-linux-gnu-install_only_stripped.tar.gz", - "sha256": "d780f46da4c2ae2400cb08c6e5900d976d46572c1fb2dc6a9494a4c309f913f2", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-x86_64_v3-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "9fdb71600bbdcae5dd47426972d1d0af03a2f7d98ac44fbb63284203738fda2c", "variant": null }, "cpython-3.14.0b4-linux-x86_64_v3-musl": { @@ -219,8 +219,8 @@ "minor": 14, "patch": 0, "prerelease": "b4", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.14.0b4%2B20250708-x86_64_v3-unknown-linux-musl-install_only_stripped.tar.gz", - "sha256": "4ef7c85e6a6788f1838a80a23463ee36fdfd50c909c784bc6ed7011725220288", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-x86_64_v3-unknown-linux-musl-install_only_stripped.tar.gz", + "sha256": "f864428b9b6b5938efeb93526d52ec685377672ad292e4b2eee62cb6107933e1", "variant": null }, "cpython-3.14.0b4-linux-x86_64_v4-gnu": { @@ -235,8 +235,8 @@ "minor": 14, "patch": 0, "prerelease": "b4", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.14.0b4%2B20250708-x86_64_v4-unknown-linux-gnu-install_only_stripped.tar.gz", - "sha256": "cd91301114d7ebfcfccbb3377a09c8d8537dc460de629ec6e64d3880aeb7ab0c", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-x86_64_v4-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "0d3f7f0c8b881bcdff08d14a0999c736f13e309e663edd0739a2db327c43e4c2", "variant": null }, "cpython-3.14.0b4-linux-x86_64_v4-musl": { @@ -251,8 +251,8 @@ "minor": 14, "patch": 0, "prerelease": "b4", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.14.0b4%2B20250708-x86_64_v4-unknown-linux-musl-install_only_stripped.tar.gz", - "sha256": "ff8cba3869c879717c6aae2931398b1c30ab761008483a49cc5d93899a2eeb8c", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-x86_64_v4-unknown-linux-musl-install_only_stripped.tar.gz", + "sha256": "11443f91bbda5f3d440908f20bfafd549dad5357e705f1e85273ebb6db0206f3", "variant": null }, "cpython-3.14.0b4-windows-aarch64-none": { @@ -267,8 +267,8 @@ "minor": 14, "patch": 0, "prerelease": "b4", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.14.0b4%2B20250708-aarch64-pc-windows-msvc-install_only_stripped.tar.gz", - "sha256": "c21eb7a109ec8b980735aee5ca5c3b7522479919d12078f046a05114de428ff0", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-aarch64-pc-windows-msvc-install_only_stripped.tar.gz", + "sha256": "61bef0ff22c3117795c55d5e8e2c87956a94fbb4725e03231f360b7c68ba5358", "variant": null }, "cpython-3.14.0b4-windows-i686-none": { @@ -283,8 +283,8 @@ "minor": 14, "patch": 0, "prerelease": "b4", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.14.0b4%2B20250708-i686-pc-windows-msvc-install_only_stripped.tar.gz", - "sha256": "29ebdc7899a947e29aba6376477d059871698b712cf0dfb75b8e96af2e8b23cb", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-i686-pc-windows-msvc-install_only_stripped.tar.gz", + "sha256": "bcf229f25c12f81169b1f1d207a719fc2908f4e6ba5b61404787710d3b1e2120", "variant": null }, "cpython-3.14.0b4-windows-x86_64-none": { @@ -299,8 +299,8 @@ "minor": 14, "patch": 0, "prerelease": "b4", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.14.0b4%2B20250708-x86_64-pc-windows-msvc-install_only_stripped.tar.gz", - "sha256": "072b97a1850f11bc350c1abfa5c08024ce4fe008022d634e23d4647e47cc005f", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-x86_64-pc-windows-msvc-install_only_stripped.tar.gz", + "sha256": "8255b31a40867eb52ff1a2e476f56c697a717e6193d313413c788b0fbdd28a3c", "variant": null }, "cpython-3.14.0b4+freethreaded-darwin-aarch64-none": { @@ -315,8 +315,8 @@ "minor": 14, "patch": 0, "prerelease": "b4", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.14.0b4%2B20250708-aarch64-apple-darwin-freethreaded%2Bpgo%2Blto-full.tar.zst", - "sha256": "f4a28e1d77003d6cd955f2a436a244ec03bb64f142a9afc79246634d3dec5da3", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-aarch64-apple-darwin-freethreaded%2Bpgo%2Blto-full.tar.zst", + "sha256": "ce28498dcf2c5c4d3c964e6e44ff44e5b1b72a4234f807e2ff121393ed40442e", "variant": "freethreaded" }, "cpython-3.14.0b4+freethreaded-darwin-x86_64-none": { @@ -331,8 +331,8 @@ "minor": 14, "patch": 0, "prerelease": "b4", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.14.0b4%2B20250708-x86_64-apple-darwin-freethreaded%2Bpgo%2Blto-full.tar.zst", - "sha256": "f1ea70b041fa5862124980b7fe34362987243a7ecc34fde881357503e47f32ab", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-x86_64-apple-darwin-freethreaded%2Bpgo%2Blto-full.tar.zst", + "sha256": "a7d63512a17522d7c76c7bafa27b49a35f4f5f74b5140be209ca17c0cad15737", "variant": "freethreaded" }, "cpython-3.14.0b4+freethreaded-linux-aarch64-gnu": { @@ -347,8 +347,8 @@ "minor": 14, "patch": 0, "prerelease": "b4", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.14.0b4%2B20250708-aarch64-unknown-linux-gnu-freethreaded%2Bpgo%2Blto-full.tar.zst", - "sha256": "2a92a108a3fbd5c439408fe9f3b62bf569ef06dbc2b5b657de301f14a537231a", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-aarch64-unknown-linux-gnu-freethreaded%2Bpgo%2Blto-full.tar.zst", + "sha256": "0250288ab21cfd14caa826056de7203baa19ed7e85198c19e6dcdd8b2124ae0e", "variant": "freethreaded" }, "cpython-3.14.0b4+freethreaded-linux-armv7-gnueabi": { @@ -363,8 +363,8 @@ "minor": 14, "patch": 0, "prerelease": "b4", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.14.0b4%2B20250708-armv7-unknown-linux-gnueabi-freethreaded%2Blto-full.tar.zst", - "sha256": "f1d52c12f6908f6dc0658bf9d5cf1068272b4f9026aa33b59ded9f17e1d51f9f", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-armv7-unknown-linux-gnueabi-freethreaded%2Blto-full.tar.zst", + "sha256": "c0bd17a6409c21fb10b075449511c09940b53438bf785cd20db1f2e5d15ade30", "variant": "freethreaded" }, "cpython-3.14.0b4+freethreaded-linux-armv7-gnueabihf": { @@ -379,8 +379,8 @@ "minor": 14, "patch": 0, "prerelease": "b4", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.14.0b4%2B20250708-armv7-unknown-linux-gnueabihf-freethreaded%2Blto-full.tar.zst", - "sha256": "418741c7de3c53323d9ae8a42a450f0f612fa5fbea1bedeea57dee0647c82a8d", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-armv7-unknown-linux-gnueabihf-freethreaded%2Blto-full.tar.zst", + "sha256": "d747055b6b5878dcf6b9d425b0a7ea3fa7b33fe241b31681e28f56d5ed86ed5d", "variant": "freethreaded" }, "cpython-3.14.0b4+freethreaded-linux-powerpc64le-gnu": { @@ -395,8 +395,8 @@ "minor": 14, "patch": 0, "prerelease": "b4", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.14.0b4%2B20250708-ppc64le-unknown-linux-gnu-freethreaded%2Blto-full.tar.zst", - "sha256": "5823a07c957162d6d675488d5306ac3f35a3f458e946cd74da6d1ac69bc97ce3", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-ppc64le-unknown-linux-gnu-freethreaded%2Blto-full.tar.zst", + "sha256": "756376b22bf237646f7bb519bee69b1704d369a6ca5941b5ff83d5b2d022612b", "variant": "freethreaded" }, "cpython-3.14.0b4+freethreaded-linux-riscv64-gnu": { @@ -411,8 +411,8 @@ "minor": 14, "patch": 0, "prerelease": "b4", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.14.0b4%2B20250708-riscv64-unknown-linux-gnu-freethreaded%2Blto-full.tar.zst", - "sha256": "f48843e0f1c13ddeaaf9180bc105475873d924638969bc9256a2ac170faeb933", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-riscv64-unknown-linux-gnu-freethreaded%2Blto-full.tar.zst", + "sha256": "25dbe52c44b42914d9343d456dc17fbcbf234ab1f0fd0be00cae27c6e336546b", "variant": "freethreaded" }, "cpython-3.14.0b4+freethreaded-linux-s390x-gnu": { @@ -427,8 +427,8 @@ "minor": 14, "patch": 0, "prerelease": "b4", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.14.0b4%2B20250708-s390x-unknown-linux-gnu-freethreaded%2Blto-full.tar.zst", - "sha256": "a1e6f843d533c88e290d1e757d4c7953c4f4ccfb5380fef5405aceab938c6f57", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-s390x-unknown-linux-gnu-freethreaded%2Blto-full.tar.zst", + "sha256": "7ebb845ee94ae870e13146de0052251d48d584363c1b374f84fbdeb8e7936350", "variant": "freethreaded" }, "cpython-3.14.0b4+freethreaded-linux-x86_64-gnu": { @@ -443,8 +443,8 @@ "minor": 14, "patch": 0, "prerelease": "b4", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.14.0b4%2B20250708-x86_64-unknown-linux-gnu-freethreaded%2Bpgo%2Blto-full.tar.zst", - "sha256": "7f5ab66a563f48f169bdb1d216eed8c4126698583d21fa191ab4d995ca8b5506", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-x86_64-unknown-linux-gnu-freethreaded%2Bpgo%2Blto-full.tar.zst", + "sha256": "0df5305c3b95f53f7f2db762be2badf752477c359146155f8b9658b71aff2128", "variant": "freethreaded" }, "cpython-3.14.0b4+freethreaded-linux-x86_64-musl": { @@ -459,8 +459,8 @@ "minor": 14, "patch": 0, "prerelease": "b4", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.14.0b4%2B20250708-x86_64-unknown-linux-musl-freethreaded%2Blto-full.tar.zst", - "sha256": "180249191d6e84b5dd61f6f7ba7215582b1296ef4d8bd048439cd981363cd2b2", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-x86_64-unknown-linux-musl-freethreaded%2Blto-full.tar.zst", + "sha256": "c6beef48f6a2ca49da0b2798e5dc9c45233a8f0b6fa778616ba7cfdcd66f85a6", "variant": "freethreaded" }, "cpython-3.14.0b4+freethreaded-linux-x86_64_v2-gnu": { @@ -475,8 +475,8 @@ "minor": 14, "patch": 0, "prerelease": "b4", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.14.0b4%2B20250708-x86_64_v2-unknown-linux-gnu-freethreaded%2Bpgo%2Blto-full.tar.zst", - "sha256": "bc9c0f25680f1f3c3104aef3144f1cd8c72d31e4cbf45a7c6f89ddb5c1b0e952", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-x86_64_v2-unknown-linux-gnu-freethreaded%2Bpgo%2Blto-full.tar.zst", + "sha256": "31587432be64d6913317919c239ef84ae4c78a7b11f95e8d48b81dc820021be3", "variant": "freethreaded" }, "cpython-3.14.0b4+freethreaded-linux-x86_64_v2-musl": { @@ -491,8 +491,8 @@ "minor": 14, "patch": 0, "prerelease": "b4", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.14.0b4%2B20250708-x86_64_v2-unknown-linux-musl-freethreaded%2Blto-full.tar.zst", - "sha256": "b30a2004c89d79256926bb4d87bec6100b669d967d336cb9df1aa5ae9a9106cf", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-x86_64_v2-unknown-linux-musl-freethreaded%2Blto-full.tar.zst", + "sha256": "302a23af192207337db2c2268a3fed98f13845ad5324f1ff97baa68807098513", "variant": "freethreaded" }, "cpython-3.14.0b4+freethreaded-linux-x86_64_v3-gnu": { @@ -507,8 +507,8 @@ "minor": 14, "patch": 0, "prerelease": "b4", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.14.0b4%2B20250708-x86_64_v3-unknown-linux-gnu-freethreaded%2Bpgo%2Blto-full.tar.zst", - "sha256": "6941b1d02adb12cd875c2320e0d30380b7837c705333336b8d295440d93d3668", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-x86_64_v3-unknown-linux-gnu-freethreaded%2Bpgo%2Blto-full.tar.zst", + "sha256": "39747d608a5400b0fa37fbddef606678f8552fdf907f43b1d8a475436c413aa9", "variant": "freethreaded" }, "cpython-3.14.0b4+freethreaded-linux-x86_64_v3-musl": { @@ -523,8 +523,8 @@ "minor": 14, "patch": 0, "prerelease": "b4", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.14.0b4%2B20250708-x86_64_v3-unknown-linux-musl-freethreaded%2Blto-full.tar.zst", - "sha256": "b64f69cb58ac51e962080d6fa848d90dc24739bc94089a7975b3459b23ad5df3", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-x86_64_v3-unknown-linux-musl-freethreaded%2Blto-full.tar.zst", + "sha256": "9870447eb095027df97a1e412eff378fb78872a527dc6adeffc901fff8a40d70", "variant": "freethreaded" }, "cpython-3.14.0b4+freethreaded-linux-x86_64_v4-gnu": { @@ -539,8 +539,8 @@ "minor": 14, "patch": 0, "prerelease": "b4", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.14.0b4%2B20250708-x86_64_v4-unknown-linux-gnu-freethreaded%2Bpgo%2Blto-full.tar.zst", - "sha256": "b294b586bdcbc0b038e77999d4371c6fe3d90228b2b9aa632262ad3f5210487b", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-x86_64_v4-unknown-linux-gnu-freethreaded%2Bpgo%2Blto-full.tar.zst", + "sha256": "78adac3ab0696380ebdbceb96924d0f033e20b033e3a1633aa54df0295407292", "variant": "freethreaded" }, "cpython-3.14.0b4+freethreaded-linux-x86_64_v4-musl": { @@ -555,8 +555,8 @@ "minor": 14, "patch": 0, "prerelease": "b4", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.14.0b4%2B20250708-x86_64_v4-unknown-linux-musl-freethreaded%2Blto-full.tar.zst", - "sha256": "61ed61ed5052a7ca9d919194526486d7f973fd69bb97e70e95c917a984f723c7", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-x86_64_v4-unknown-linux-musl-freethreaded%2Blto-full.tar.zst", + "sha256": "59f92039b72eca4cfb4639699bc97bbb0de6b866a7894bac9cf132374cf5aa1a", "variant": "freethreaded" }, "cpython-3.14.0b4+freethreaded-windows-aarch64-none": { @@ -571,8 +571,8 @@ "minor": 14, "patch": 0, "prerelease": "b4", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.14.0b4%2B20250708-aarch64-pc-windows-msvc-freethreaded%2Bpgo-full.tar.zst", - "sha256": "d7396bafafc82b7e817f0d16208d0f37a88a97c0a71d91e477cbadc5b9d55f6d", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-aarch64-pc-windows-msvc-freethreaded%2Bpgo-full.tar.zst", + "sha256": "37fac713d3b25731f134c9c6b1c9021ffb2aacda630010ffa15497446655179f", "variant": "freethreaded" }, "cpython-3.14.0b4+freethreaded-windows-i686-none": { @@ -587,8 +587,8 @@ "minor": 14, "patch": 0, "prerelease": "b4", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.14.0b4%2B20250708-i686-pc-windows-msvc-freethreaded%2Bpgo-full.tar.zst", - "sha256": "7066fc54db97331fb25f52783f188d65f8868ad578f9e25cb9b1ae1f2c6dacc5", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-i686-pc-windows-msvc-freethreaded%2Bpgo-full.tar.zst", + "sha256": "5a7d61b1863960dab6f78027b5edc543ee41d0a45f7851413951389b842385c8", "variant": "freethreaded" }, "cpython-3.14.0b4+freethreaded-windows-x86_64-none": { @@ -603,8 +603,8 @@ "minor": 14, "patch": 0, "prerelease": "b4", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.14.0b4%2B20250708-x86_64-pc-windows-msvc-freethreaded%2Bpgo-full.tar.zst", - "sha256": "5de7968ba0e344562fcff0f9f7c9454966279f1e274b6e701edee253b4a6b565", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-x86_64-pc-windows-msvc-freethreaded%2Bpgo-full.tar.zst", + "sha256": "e503ec18fd8b1d0fcb94ded5a67be4a88334d5b101dc485b0281577ae84a6acc", "variant": "freethreaded" }, "cpython-3.14.0b4+debug-linux-aarch64-gnu": { @@ -619,8 +619,8 @@ "minor": 14, "patch": 0, "prerelease": "b4", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.14.0b4%2B20250708-aarch64-unknown-linux-gnu-debug-full.tar.zst", - "sha256": "9ac97f7531f9d74ccd1f7de8b558029094831a0be965fe9569ecc7547aeec445", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-aarch64-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "6bf05e71ef3cf092d0f40d992ea192016327468992e5e0b7bde8ac48d6b9c145", "variant": "debug" }, "cpython-3.14.0b4+debug-linux-armv7-gnueabi": { @@ -635,8 +635,8 @@ "minor": 14, "patch": 0, "prerelease": "b4", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.14.0b4%2B20250708-armv7-unknown-linux-gnueabi-debug-full.tar.zst", - "sha256": "fcb0d09a7774b69ca7df3a954fedc32bd1935838c91918f1d08b9a19914f30ec", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-armv7-unknown-linux-gnueabi-debug-full.tar.zst", + "sha256": "9b73df95176c383e4af6027b78da060c69892914bfc195107084b21281f09bfd", "variant": "debug" }, "cpython-3.14.0b4+debug-linux-armv7-gnueabihf": { @@ -651,8 +651,8 @@ "minor": 14, "patch": 0, "prerelease": "b4", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.14.0b4%2B20250708-armv7-unknown-linux-gnueabihf-debug-full.tar.zst", - "sha256": "664a70a1f73eb0ca1299bf8b26ec0b696ea1a09a26b5a1956688c3e4004b0ce2", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-armv7-unknown-linux-gnueabihf-debug-full.tar.zst", + "sha256": "2d325c459c761b4bca5e2005aeccc889ef62ee4b0811d9252e22817f3037825e", "variant": "debug" }, "cpython-3.14.0b4+debug-linux-powerpc64le-gnu": { @@ -667,8 +667,8 @@ "minor": 14, "patch": 0, "prerelease": "b4", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.14.0b4%2B20250708-ppc64le-unknown-linux-gnu-debug-full.tar.zst", - "sha256": "71ac17708fd382292c5dbc77b11646b9ee52230381c2f7067bc5f22a2e2fd9cf", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-ppc64le-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "1c49311aae1ade3afd9d39091897d2b1307aeadfdde87e5099e07b0fdc32bc2f", "variant": "debug" }, "cpython-3.14.0b4+debug-linux-riscv64-gnu": { @@ -683,8 +683,8 @@ "minor": 14, "patch": 0, "prerelease": "b4", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.14.0b4%2B20250708-riscv64-unknown-linux-gnu-debug-full.tar.zst", - "sha256": "2916572ff670885b38860861fceb395711831ac2a36e0830fe0ee029a91cec56", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-riscv64-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "ad52ff04ef3fc78430b8b0623a0442088dc4e8c6835fce6957e251676942ebbf", "variant": "debug" }, "cpython-3.14.0b4+debug-linux-s390x-gnu": { @@ -699,8 +699,8 @@ "minor": 14, "patch": 0, "prerelease": "b4", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.14.0b4%2B20250708-s390x-unknown-linux-gnu-debug-full.tar.zst", - "sha256": "4086605066914c6fb1944932e59585c328c3a688379d2c061df8e963e65e04dd", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-s390x-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "6865d4830ef7beaa99dd817df0c49bb0d380b9a0c822be6f8ca090f9a568df81", "variant": "debug" }, "cpython-3.14.0b4+debug-linux-x86_64-gnu": { @@ -715,8 +715,8 @@ "minor": 14, "patch": 0, "prerelease": "b4", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.14.0b4%2B20250708-x86_64-unknown-linux-gnu-debug-full.tar.zst", - "sha256": "c91fa37d96f46a4f58ac6d3b2d9e0178288e2fb21a05131c874abfbfae404f71", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-x86_64-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "db9c32e119c58d9f25745599efaa383be06323ca8d8524a6c50b62367b058b93", "variant": "debug" }, "cpython-3.14.0b4+debug-linux-x86_64-musl": { @@ -731,8 +731,8 @@ "minor": 14, "patch": 0, "prerelease": "b4", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.14.0b4%2B20250708-x86_64-unknown-linux-musl-debug-full.tar.zst", - "sha256": "ab08748b50a7df1e6231fab1bf59a7e0b26cfb44ff2c811a9f249fe141332d21", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-x86_64-unknown-linux-musl-debug-full.tar.zst", + "sha256": "39dece02d5b286e7d9ffbbacdd730db0d64b881bb2b2edd3b721be23c4e89609", "variant": "debug" }, "cpython-3.14.0b4+debug-linux-x86_64_v2-gnu": { @@ -747,8 +747,8 @@ "minor": 14, "patch": 0, "prerelease": "b4", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.14.0b4%2B20250708-x86_64_v2-unknown-linux-gnu-debug-full.tar.zst", - "sha256": "64dd678f10b3bb86bd047cf585651d323c80e34da840ca8ed49507f3959acc90", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-x86_64_v2-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "90453b5f3d982604a950e5f362b192889f82524257d2fa8bf979b270e8bdb370", "variant": "debug" }, "cpython-3.14.0b4+debug-linux-x86_64_v2-musl": { @@ -763,8 +763,8 @@ "minor": 14, "patch": 0, "prerelease": "b4", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.14.0b4%2B20250708-x86_64_v2-unknown-linux-musl-debug-full.tar.zst", - "sha256": "3e057342e72555a4934e05037423f2b68f42d62a6f10b36d48150ca5110d603e", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-x86_64_v2-unknown-linux-musl-debug-full.tar.zst", + "sha256": "d070ef11038828a1326c230c45782c70f02a6b89504af76cc95f0778db20caac", "variant": "debug" }, "cpython-3.14.0b4+debug-linux-x86_64_v3-gnu": { @@ -779,8 +779,8 @@ "minor": 14, "patch": 0, "prerelease": "b4", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.14.0b4%2B20250708-x86_64_v3-unknown-linux-gnu-debug-full.tar.zst", - "sha256": "265b07a17fedc8ca32a8ebd6763946c21bb472346ac65efb89d1e045e4772abd", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-x86_64_v3-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "baf92ab8fa281f72a8e8b4a1975a931876866b69aebed1eb94dafeaa219f788d", "variant": "debug" }, "cpython-3.14.0b4+debug-linux-x86_64_v3-musl": { @@ -795,8 +795,8 @@ "minor": 14, "patch": 0, "prerelease": "b4", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.14.0b4%2B20250708-x86_64_v3-unknown-linux-musl-debug-full.tar.zst", - "sha256": "5860fc768bf7c7d2051ee80109f0fd5a4d89f045ca26562f88e5f93978979abe", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-x86_64_v3-unknown-linux-musl-debug-full.tar.zst", + "sha256": "3a92a638ef08b058eebf806ecb0134aa9467c554512fd2082e6ecd1a6c517fdd", "variant": "debug" }, "cpython-3.14.0b4+debug-linux-x86_64_v4-gnu": { @@ -811,8 +811,8 @@ "minor": 14, "patch": 0, "prerelease": "b4", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.14.0b4%2B20250708-x86_64_v4-unknown-linux-gnu-debug-full.tar.zst", - "sha256": "ae0cf5352a594ce1dfd287fb49684490128a7f89b3dfbcd43f1b8d84083c8ead", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-x86_64_v4-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "7144cb9ac62b0084b8421b83e90aab0ed6e704cc5f63ba1c16f8216971d11857", "variant": "debug" }, "cpython-3.14.0b4+debug-linux-x86_64_v4-musl": { @@ -827,8 +827,8 @@ "minor": 14, "patch": 0, "prerelease": "b4", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.14.0b4%2B20250708-x86_64_v4-unknown-linux-musl-debug-full.tar.zst", - "sha256": "5e2b1a537aa9cc6e1c77e6050f31aacd866c50b16b603b54c485b8f8cfeebb4a", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-x86_64_v4-unknown-linux-musl-debug-full.tar.zst", + "sha256": "bef1d2f0e3f32667366655e8333ef1f92ab07cd7b988da110f3970a5d671e3a3", "variant": "debug" }, "cpython-3.14.0b3-darwin-aarch64-none": { @@ -6395,8 +6395,8 @@ "minor": 13, "patch": 5, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.13.5%2B20250708-aarch64-apple-darwin-install_only_stripped.tar.gz", - "sha256": "71c9af8648001c4a09943305a890339a4cfff0bd260aa5a9d8c8e82e7ef32583", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-aarch64-apple-darwin-install_only_stripped.tar.gz", + "sha256": "08d840adc7dd1724bd7c25141a0207f8343808749fa67e608d8007b46429c196", "variant": null }, "cpython-3.13.5-darwin-x86_64-none": { @@ -6411,8 +6411,8 @@ "minor": 13, "patch": 5, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.13.5%2B20250708-x86_64-apple-darwin-install_only_stripped.tar.gz", - "sha256": "65b171888e34d0a904ee0a6adef1a5366bdedcd9fca990ec06717a68eef2c4ff", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-x86_64-apple-darwin-install_only_stripped.tar.gz", + "sha256": "5277dc381e94abde80989841f3015df2aba33894893c4a31d63400887bdefd2d", "variant": null }, "cpython-3.13.5-linux-aarch64-gnu": { @@ -6427,8 +6427,8 @@ "minor": 13, "patch": 5, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.13.5%2B20250708-aarch64-unknown-linux-gnu-install_only_stripped.tar.gz", - "sha256": "e0d2322a92b9bb8e39442cbcfa6ee9590fd035de2a6199d4e6903dcbc0b6542a", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-aarch64-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "82d8a025b43c9127d47490a7070aa5d8bfede2d1deb5161c0f4c2355396f9e5d", "variant": null }, "cpython-3.13.5-linux-armv7-gnueabi": { @@ -6443,8 +6443,8 @@ "minor": 13, "patch": 5, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.13.5%2B20250708-armv7-unknown-linux-gnueabi-install_only_stripped.tar.gz", - "sha256": "59442502a4eebff23a49503a9cbe92a6b813a756bf36a299ced55fb705d5fe73", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-armv7-unknown-linux-gnueabi-install_only_stripped.tar.gz", + "sha256": "6aa50bf3245364091a7e5ca6b88166f960c2268586c33e295069645815f16195", "variant": null }, "cpython-3.13.5-linux-armv7-gnueabihf": { @@ -6459,8 +6459,8 @@ "minor": 13, "patch": 5, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.13.5%2B20250708-armv7-unknown-linux-gnueabihf-install_only_stripped.tar.gz", - "sha256": "c3de5a89b71ef3dc8ee53777a9fda3f2d7f381abc0b4a6f6f890de55d3620293", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-armv7-unknown-linux-gnueabihf-install_only_stripped.tar.gz", + "sha256": "5f776b18951b9a0507e64e890796113a16b18adb93a01d4f84c922e2564dab43", "variant": null }, "cpython-3.13.5-linux-powerpc64le-gnu": { @@ -6475,8 +6475,8 @@ "minor": 13, "patch": 5, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.13.5%2B20250708-ppc64le-unknown-linux-gnu-install_only_stripped.tar.gz", - "sha256": "c17e73fe07de36a506ffc400173739d2802f30bdc5f5b6443891bbcee926edac", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-ppc64le-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "b74b79e5a65c84ed732071fd7b445a51b86c03ef18643b87c0fe5c96242e629b", "variant": null }, "cpython-3.13.5-linux-riscv64-gnu": { @@ -6491,8 +6491,8 @@ "minor": 13, "patch": 5, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.13.5%2B20250708-riscv64-unknown-linux-gnu-install_only_stripped.tar.gz", - "sha256": "1b5da1585dca39a15452c891ff16f468ce984f76500c262f08c4aeae75e79c3c", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-riscv64-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "652416183693219b1f0f1f2a8d2a595f75f8c94e8c7b8b25ecd312ec1fdbb36e", "variant": null }, "cpython-3.13.5-linux-s390x-gnu": { @@ -6507,8 +6507,8 @@ "minor": 13, "patch": 5, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.13.5%2B20250708-s390x-unknown-linux-gnu-install_only_stripped.tar.gz", - "sha256": "d47e645034432fce6d107835c07d5fe38fd53232a66e0a9d63ead48b42da3539", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-s390x-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "29a7140db0cbd1426f450cd419a8b5892a4a72d7ef74c1760940dd656f8eaded", "variant": null }, "cpython-3.13.5-linux-x86_64-gnu": { @@ -6523,8 +6523,8 @@ "minor": 13, "patch": 5, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.13.5%2B20250708-x86_64-unknown-linux-gnu-install_only_stripped.tar.gz", - "sha256": "2f57c58edc385fe9958d2c6e41ecd389cfed3f882515a1813f1d2ba4c964f399", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-x86_64-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "e42827755c227d3ea31b0c887230db1cd411e8bddf84f16341a989de2d352c51", "variant": null }, "cpython-3.13.5-linux-x86_64-musl": { @@ -6539,8 +6539,8 @@ "minor": 13, "patch": 5, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.13.5%2B20250708-x86_64-unknown-linux-musl-install_only_stripped.tar.gz", - "sha256": "13cf16ef2008adf36a812add953317a4359945468dbcaece38b2b71466d05502", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-x86_64-unknown-linux-musl-install_only_stripped.tar.gz", + "sha256": "a652ff101318b7bd7a06181df679e2e76d592ebe70dbc4ca5db97b572889d93f", "variant": null }, "cpython-3.13.5-linux-x86_64_v2-gnu": { @@ -6555,8 +6555,8 @@ "minor": 13, "patch": 5, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.13.5%2B20250708-x86_64_v2-unknown-linux-gnu-install_only_stripped.tar.gz", - "sha256": "416d3a7bd64c3ee047b37d91ce1a58ec308733292c0268bfd860984c21eb7377", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-x86_64_v2-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "dd945e6178236e2eee27b9de8e6d0b2ef9c6f905185a177676d608e42d81bebb", "variant": null }, "cpython-3.13.5-linux-x86_64_v2-musl": { @@ -6571,8 +6571,8 @@ "minor": 13, "patch": 5, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.13.5%2B20250708-x86_64_v2-unknown-linux-musl-install_only_stripped.tar.gz", - "sha256": "c32aee456cb150a8c105c213dc4afa8a409fba1aced890a4f58001ae70074922", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-x86_64_v2-unknown-linux-musl-install_only_stripped.tar.gz", + "sha256": "af86120b3c3c48afdd512a798c1df2e01e7404875d5b54fc7bbde23f8b004265", "variant": null }, "cpython-3.13.5-linux-x86_64_v3-gnu": { @@ -6587,8 +6587,8 @@ "minor": 13, "patch": 5, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.13.5%2B20250708-x86_64_v3-unknown-linux-gnu-install_only_stripped.tar.gz", - "sha256": "f2d3a4aa566ce5a505a82357c766ccfc60f6bb4e255fab8725da2fbc28a199d3", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-x86_64_v3-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "c13783eae63223bced84ec976be9ad87d5b2ab3d9ba80c4f678520a4763410ba", "variant": null }, "cpython-3.13.5-linux-x86_64_v3-musl": { @@ -6603,8 +6603,8 @@ "minor": 13, "patch": 5, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.13.5%2B20250708-x86_64_v3-unknown-linux-musl-install_only_stripped.tar.gz", - "sha256": "f8d1c8f82a6cd694ca453e1c5e96e7415232be288a832b17bd5a4e9b7a5c09fe", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-x86_64_v3-unknown-linux-musl-install_only_stripped.tar.gz", + "sha256": "5e7433fd471a8d2a5dfa9b062b3c1af108eef5958e74d123de963c5d018b3086", "variant": null }, "cpython-3.13.5-linux-x86_64_v4-gnu": { @@ -6619,8 +6619,8 @@ "minor": 13, "patch": 5, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.13.5%2B20250708-x86_64_v4-unknown-linux-gnu-install_only_stripped.tar.gz", - "sha256": "a46b315e40f93ce673fb5ff9193c1f9dee550fe6f494fe1bba41885ef19ee094", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-x86_64_v4-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "14a4301952bf11ddf023e27ff5810963bf5a165946009f72c18bdd53f22450c0", "variant": null }, "cpython-3.13.5-linux-x86_64_v4-musl": { @@ -6635,8 +6635,8 @@ "minor": 13, "patch": 5, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.13.5%2B20250708-x86_64_v4-unknown-linux-musl-install_only_stripped.tar.gz", - "sha256": "4efeb9cd7c96f3b157478bb3037597b56334f14aad519eddc64da29849cc8031", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-x86_64_v4-unknown-linux-musl-install_only_stripped.tar.gz", + "sha256": "583b793e600a9d55b941092de2f4f7426acaac7e7430ed9a36586f7a1754a8ea", "variant": null }, "cpython-3.13.5-windows-aarch64-none": { @@ -6651,8 +6651,8 @@ "minor": 13, "patch": 5, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.13.5%2B20250708-aarch64-pc-windows-msvc-install_only_stripped.tar.gz", - "sha256": "22b73edc3afc256b58bb41b5a660aa835500781ef5b187de0c941748b1f38e3a", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-aarch64-pc-windows-msvc-install_only_stripped.tar.gz", + "sha256": "0e95119f5d018ec18bcf9ee57c91e13c9ffda2a5da5fa14f578498f8ec6e4ac0", "variant": null }, "cpython-3.13.5-windows-i686-none": { @@ -6667,8 +6667,8 @@ "minor": 13, "patch": 5, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.13.5%2B20250708-i686-pc-windows-msvc-install_only_stripped.tar.gz", - "sha256": "fffdf2a1a16b9a24ef8489008a4a08927b202d7b79401913bbe1363e4180ad3a", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-i686-pc-windows-msvc-install_only_stripped.tar.gz", + "sha256": "a877e912a7fc298e2b8ee349ed86bee00ac551232faebf258b790e334208f9d2", "variant": null }, "cpython-3.13.5-windows-x86_64-none": { @@ -6683,8 +6683,8 @@ "minor": 13, "patch": 5, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.13.5%2B20250708-x86_64-pc-windows-msvc-install_only_stripped.tar.gz", - "sha256": "0871127fcf73c79479f36b2f34177565f6e97b87b4dd9cdafe4d6c37b54c153a", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-x86_64-pc-windows-msvc-install_only_stripped.tar.gz", + "sha256": "bf9d014f24aa15f2ae37814e748773e395cbec111e368a91cdbcb4372bdff7c5", "variant": null }, "cpython-3.13.5+freethreaded-darwin-aarch64-none": { @@ -6699,8 +6699,8 @@ "minor": 13, "patch": 5, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.13.5%2B20250708-aarch64-apple-darwin-freethreaded%2Bpgo%2Blto-full.tar.zst", - "sha256": "b7764ec1b41a7018c67c83ce3c98f47b0eeac9c4039f3cd50b5bcde4e86bde96", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-aarch64-apple-darwin-freethreaded%2Bpgo%2Blto-full.tar.zst", + "sha256": "61862be1c897fff1d5ec772be045d1af44846ffd4a6186247cc11e5e9ae3d247", "variant": "freethreaded" }, "cpython-3.13.5+freethreaded-darwin-x86_64-none": { @@ -6715,8 +6715,8 @@ "minor": 13, "patch": 5, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.13.5%2B20250708-x86_64-apple-darwin-freethreaded%2Bpgo%2Blto-full.tar.zst", - "sha256": "f15f0700b64fb3475c4dcc2a41540b47857da0c777544c10eb510f71f552e8ec", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-x86_64-apple-darwin-freethreaded%2Bpgo%2Blto-full.tar.zst", + "sha256": "a51777a7a3d4b4860dd761dbcce85a8e9589031293a2f91f4a6a3679c3d0f5a8", "variant": "freethreaded" }, "cpython-3.13.5+freethreaded-linux-aarch64-gnu": { @@ -6731,8 +6731,8 @@ "minor": 13, "patch": 5, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.13.5%2B20250708-aarch64-unknown-linux-gnu-freethreaded%2Bpgo%2Blto-full.tar.zst", - "sha256": "ced03b7ba62d2864df87ae86ecc50512fbfed66897602ae6f7aacbfb8d7eab38", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-aarch64-unknown-linux-gnu-freethreaded%2Bpgo%2Blto-full.tar.zst", + "sha256": "e907a33d468de5f3936e73a0e6281a40307207acf62d59a34a1ef5a703816810", "variant": "freethreaded" }, "cpython-3.13.5+freethreaded-linux-armv7-gnueabi": { @@ -6747,8 +6747,8 @@ "minor": 13, "patch": 5, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.13.5%2B20250708-armv7-unknown-linux-gnueabi-freethreaded%2Blto-full.tar.zst", - "sha256": "0eafdd313352b0cda5cbfa872610cae8f47cfcba72da5a4267c7a1ef4dab8ccd", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-armv7-unknown-linux-gnueabi-freethreaded%2Blto-full.tar.zst", + "sha256": "fa495608f0bb7debc53a5d7e9bd10a328e7f087bba5b14203512902ead9e6142", "variant": "freethreaded" }, "cpython-3.13.5+freethreaded-linux-armv7-gnueabihf": { @@ -6763,8 +6763,8 @@ "minor": 13, "patch": 5, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.13.5%2B20250708-armv7-unknown-linux-gnueabihf-freethreaded%2Blto-full.tar.zst", - "sha256": "1a7c93ed247a564836416cbb008837059fb4e66468d1770a9b2ba2d12a415450", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-armv7-unknown-linux-gnueabihf-freethreaded%2Blto-full.tar.zst", + "sha256": "5316526a325b72a7e6a75f5c0ba8f2f4d1cbab8c8f0516f76055f7a178666f21", "variant": "freethreaded" }, "cpython-3.13.5+freethreaded-linux-powerpc64le-gnu": { @@ -6779,8 +6779,8 @@ "minor": 13, "patch": 5, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.13.5%2B20250708-ppc64le-unknown-linux-gnu-freethreaded%2Blto-full.tar.zst", - "sha256": "9c943e130a9893c9f6f375c02b34c0b7e62d186d283fc7950d0ee20d7e2f6821", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-ppc64le-unknown-linux-gnu-freethreaded%2Blto-full.tar.zst", + "sha256": "23770a0b9e176b8ca1bbbecd86029d4c9961fa8b88d0b0d584b14f0ad7a5dccc", "variant": "freethreaded" }, "cpython-3.13.5+freethreaded-linux-riscv64-gnu": { @@ -6795,8 +6795,8 @@ "minor": 13, "patch": 5, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.13.5%2B20250708-riscv64-unknown-linux-gnu-freethreaded%2Blto-full.tar.zst", - "sha256": "8075ed7b5f8c8a7c7c65563d2a1d5c20622a46416fb2e5b8d746592527472ea7", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-riscv64-unknown-linux-gnu-freethreaded%2Blto-full.tar.zst", + "sha256": "0f111d4619843451a0edd13e145fc3b1ea44aecf8d7a92184dcd4a9ed0a063c4", "variant": "freethreaded" }, "cpython-3.13.5+freethreaded-linux-s390x-gnu": { @@ -6811,8 +6811,8 @@ "minor": 13, "patch": 5, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.13.5%2B20250708-s390x-unknown-linux-gnu-freethreaded%2Blto-full.tar.zst", - "sha256": "a8dbcbe79f7603d82a3640dfd05f9dbff07264f14a6a9a616d277f19d113222c", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-s390x-unknown-linux-gnu-freethreaded%2Blto-full.tar.zst", + "sha256": "0a6df4acd93d29b0d94aa92fa46482f10bbcfe1b1e608e26909f608691c7f512", "variant": "freethreaded" }, "cpython-3.13.5+freethreaded-linux-x86_64-gnu": { @@ -6827,8 +6827,8 @@ "minor": 13, "patch": 5, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.13.5%2B20250708-x86_64-unknown-linux-gnu-freethreaded%2Bpgo%2Blto-full.tar.zst", - "sha256": "e21a8d49749ffd40a439349f62fc59cb9e6424a22b40da0242bb8af6e964ba04", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-x86_64-unknown-linux-gnu-freethreaded%2Bpgo%2Blto-full.tar.zst", + "sha256": "2c49314909be249c90071a54168f80d4cbf27ecbec7d464f8743d84427c5b7b1", "variant": "freethreaded" }, "cpython-3.13.5+freethreaded-linux-x86_64-musl": { @@ -6843,8 +6843,8 @@ "minor": 13, "patch": 5, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.13.5%2B20250708-x86_64-unknown-linux-musl-freethreaded%2Blto-full.tar.zst", - "sha256": "625ae3e251cf7f310078f3f77bfdae8bbe3f1fe2c64f0d8c2c60939cb71b99d4", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-x86_64-unknown-linux-musl-freethreaded%2Blto-full.tar.zst", + "sha256": "e27a15c987d616763619413b2d7122d1f4ba66a66c564c2ab4a22fb1f95c826d", "variant": "freethreaded" }, "cpython-3.13.5+freethreaded-linux-x86_64_v2-gnu": { @@ -6859,8 +6859,8 @@ "minor": 13, "patch": 5, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.13.5%2B20250708-x86_64_v2-unknown-linux-gnu-freethreaded%2Bpgo%2Blto-full.tar.zst", - "sha256": "7b9bc02fc1eb08ba78145946644fe81bc6353e2e28e74890ff93378daffa9547", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-x86_64_v2-unknown-linux-gnu-freethreaded%2Bpgo%2Blto-full.tar.zst", + "sha256": "6882afc2e308561b8c1a23187c0439116434aae8573fd6e6dbdce60e3af79db5", "variant": "freethreaded" }, "cpython-3.13.5+freethreaded-linux-x86_64_v2-musl": { @@ -6875,8 +6875,8 @@ "minor": 13, "patch": 5, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.13.5%2B20250708-x86_64_v2-unknown-linux-musl-freethreaded%2Blto-full.tar.zst", - "sha256": "4e163edf7e6a6a104f19213f3ad1b767f4d33a950ca8ea51f7b9ce04ba5a4c16", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-x86_64_v2-unknown-linux-musl-freethreaded%2Blto-full.tar.zst", + "sha256": "a8ef0d7a50a2616b2a1f8a5d7a3b52fa69085e6a75a6f7d3f318f7c132abfe16", "variant": "freethreaded" }, "cpython-3.13.5+freethreaded-linux-x86_64_v3-gnu": { @@ -6891,8 +6891,8 @@ "minor": 13, "patch": 5, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.13.5%2B20250708-x86_64_v3-unknown-linux-gnu-freethreaded%2Bpgo%2Blto-full.tar.zst", - "sha256": "f1390326557df5562639bccaaaad4edcebf4e710696a2948b2aa00db2abdde5a", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-x86_64_v3-unknown-linux-gnu-freethreaded%2Bpgo%2Blto-full.tar.zst", + "sha256": "ab2e44c83245d18226f1fce26b09218de866048ecb515b50b8174ba75c182b4e", "variant": "freethreaded" }, "cpython-3.13.5+freethreaded-linux-x86_64_v3-musl": { @@ -6907,8 +6907,8 @@ "minor": 13, "patch": 5, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.13.5%2B20250708-x86_64_v3-unknown-linux-musl-freethreaded%2Blto-full.tar.zst", - "sha256": "d5751f3b8af6d06e06a0ce5ea18307c1b6c38508b3879442c504eca3047d4ae2", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-x86_64_v3-unknown-linux-musl-freethreaded%2Blto-full.tar.zst", + "sha256": "bad372bd5e38ff42064907b95273736137485ffdc6ff1d90b2e49f8df2829abb", "variant": "freethreaded" }, "cpython-3.13.5+freethreaded-linux-x86_64_v4-gnu": { @@ -6923,8 +6923,8 @@ "minor": 13, "patch": 5, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.13.5%2B20250708-x86_64_v4-unknown-linux-gnu-freethreaded%2Bpgo%2Blto-full.tar.zst", - "sha256": "88d8e7dfed818877158ede9b22342d9ce0fd3f49116954ca0eae7540e675d235", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-x86_64_v4-unknown-linux-gnu-freethreaded%2Bpgo%2Blto-full.tar.zst", + "sha256": "d12f4ecb61ae7ced3723173aa0a5ddaea395e098bfede57497426c65b5776b82", "variant": "freethreaded" }, "cpython-3.13.5+freethreaded-linux-x86_64_v4-musl": { @@ -6939,8 +6939,8 @@ "minor": 13, "patch": 5, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.13.5%2B20250708-x86_64_v4-unknown-linux-musl-freethreaded%2Blto-full.tar.zst", - "sha256": "2a6de48306f788910b33c54e1640d3b9fe29ccb3c44dcdc0b0ba6d6a89213d9e", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-x86_64_v4-unknown-linux-musl-freethreaded%2Blto-full.tar.zst", + "sha256": "734233279cbab1f882f6e6b7d1a403695379aaba7473ba865b9741b860833076", "variant": "freethreaded" }, "cpython-3.13.5+freethreaded-windows-aarch64-none": { @@ -6955,8 +6955,8 @@ "minor": 13, "patch": 5, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.13.5%2B20250708-aarch64-pc-windows-msvc-freethreaded%2Bpgo-full.tar.zst", - "sha256": "accb608c75ba9d6487fa3c611e1b8038873675cb058423a23fa7e30fc849cf69", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-aarch64-pc-windows-msvc-freethreaded%2Bpgo-full.tar.zst", + "sha256": "51d116a7f93654d602d7e503e3c7132ae4f10e5a8e8fbe7e2ceb9e550f11051a", "variant": "freethreaded" }, "cpython-3.13.5+freethreaded-windows-i686-none": { @@ -6971,8 +6971,8 @@ "minor": 13, "patch": 5, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.13.5%2B20250708-i686-pc-windows-msvc-freethreaded%2Bpgo-full.tar.zst", - "sha256": "5cba33c38d25519b4c55a5b0015865771e604a2d331c7d335f52753b09d5b667", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-i686-pc-windows-msvc-freethreaded%2Bpgo-full.tar.zst", + "sha256": "d4461149a95fd6d9c97d01afb42561c4b687d08526c84e8ff9658d26514450eb", "variant": "freethreaded" }, "cpython-3.13.5+freethreaded-windows-x86_64-none": { @@ -6987,8 +6987,8 @@ "minor": 13, "patch": 5, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.13.5%2B20250708-x86_64-pc-windows-msvc-freethreaded%2Bpgo-full.tar.zst", - "sha256": "75acd65c9a44afae432abfd83db648256ac89122f31e21a59310b0c373b147f1", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-x86_64-pc-windows-msvc-freethreaded%2Bpgo-full.tar.zst", + "sha256": "eb704f14608176fc8d8d8d08ca5b7e7de14c982b12cd447727bf79b1d2b72ac7", "variant": "freethreaded" }, "cpython-3.13.5+debug-linux-aarch64-gnu": { @@ -7003,8 +7003,8 @@ "minor": 13, "patch": 5, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.13.5%2B20250708-aarch64-unknown-linux-gnu-debug-full.tar.zst", - "sha256": "bd73726128747a991d39bbc2c1a1792d97c6d2f4c7b6ed4b2db9254dd16d4ea6", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-aarch64-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "defdf6ddc233f8e97cc26afaa341651791c6085a59e02a1ab14cf8a981cdc7bf", "variant": "debug" }, "cpython-3.13.5+debug-linux-armv7-gnueabi": { @@ -7019,8 +7019,8 @@ "minor": 13, "patch": 5, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.13.5%2B20250708-armv7-unknown-linux-gnueabi-debug-full.tar.zst", - "sha256": "bbc1e704e4a2466cd52785e52f075e1b10ef5628879620b9461c6af2072e7036", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-armv7-unknown-linux-gnueabi-debug-full.tar.zst", + "sha256": "69308c195ebc63543efa8f09fabb4a6fa2fc575019bd1afbc36c66858d2122c4", "variant": "debug" }, "cpython-3.13.5+debug-linux-armv7-gnueabihf": { @@ -7035,8 +7035,8 @@ "minor": 13, "patch": 5, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.13.5%2B20250708-armv7-unknown-linux-gnueabihf-debug-full.tar.zst", - "sha256": "60389c2db232050357f24d7858ff019bb9cb37295465196275ec999e1d85f7db", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-armv7-unknown-linux-gnueabihf-debug-full.tar.zst", + "sha256": "ad3c911764e60a94c073c57361dc44ed1e04885652cabb1d1f3a1d11d466650d", "variant": "debug" }, "cpython-3.13.5+debug-linux-powerpc64le-gnu": { @@ -7051,8 +7051,8 @@ "minor": 13, "patch": 5, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.13.5%2B20250708-ppc64le-unknown-linux-gnu-debug-full.tar.zst", - "sha256": "e93c5832c3c6e39a2131d69de2e700bddab3a4f8bce74039e69276cec645f3a8", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-ppc64le-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "bd91893c42edc3b23ee45df6fff77250dab8f94646bbdf2087c0a209231f210d", "variant": "debug" }, "cpython-3.13.5+debug-linux-riscv64-gnu": { @@ -7067,8 +7067,8 @@ "minor": 13, "patch": 5, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.13.5%2B20250708-riscv64-unknown-linux-gnu-debug-full.tar.zst", - "sha256": "6fb1da6dd6ccc40eea19062cb494f7cf0207c1e99a0a8cf9cae8fdc9cc30a4b6", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-riscv64-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "7f3e649685358af0d78c8d7dcc4d357d5674e24aeaecbcc309ce83d5694821ce", "variant": "debug" }, "cpython-3.13.5+debug-linux-s390x-gnu": { @@ -7083,8 +7083,8 @@ "minor": 13, "patch": 5, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.13.5%2B20250708-s390x-unknown-linux-gnu-debug-full.tar.zst", - "sha256": "a62a131ed07e9ef322ded45fb5257aa58502b10cb6e2a18298145838a041637b", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-s390x-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "fc013b0375c357286bf6886c0160c9a7fca774869c8a5896114ac1bf338f0b2e", "variant": "debug" }, "cpython-3.13.5+debug-linux-x86_64-gnu": { @@ -7099,8 +7099,8 @@ "minor": 13, "patch": 5, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.13.5%2B20250708-x86_64-unknown-linux-gnu-debug-full.tar.zst", - "sha256": "a054dca4b204562ae34cd38f7b31ff53f035acd012310f9f7c8817eac9852db2", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-x86_64-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "3502c7c36500fa1a84096f0e9c04dc036f3dbbae117d6b86d05b0a71a65e53cb", "variant": "debug" }, "cpython-3.13.5+debug-linux-x86_64-musl": { @@ -7115,8 +7115,8 @@ "minor": 13, "patch": 5, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.13.5%2B20250708-x86_64-unknown-linux-musl-debug-full.tar.zst", - "sha256": "5da37b4623286ed7283277ec6288d0be88fcd3d208e98c075a140385734f0056", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-x86_64-unknown-linux-musl-debug-full.tar.zst", + "sha256": "b42647c29dca10e55ceeaa10b6425f4ff851721376b4b9de82ce10c21da2b5f2", "variant": "debug" }, "cpython-3.13.5+debug-linux-x86_64_v2-gnu": { @@ -7131,8 +7131,8 @@ "minor": 13, "patch": 5, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.13.5%2B20250708-x86_64_v2-unknown-linux-gnu-debug-full.tar.zst", - "sha256": "83faa4f0a92287a55887ef402bb138ca7aa46848afb7c9a30ebc337f8cb4b86c", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-x86_64_v2-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "5dee021b1e82ddeacae72fdee5ba6d2727faf1b39b8d4b9361a7961e5321c347", "variant": "debug" }, "cpython-3.13.5+debug-linux-x86_64_v2-musl": { @@ -7147,8 +7147,8 @@ "minor": 13, "patch": 5, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.13.5%2B20250708-x86_64_v2-unknown-linux-musl-debug-full.tar.zst", - "sha256": "8caaba837f778d2da1b041f15f0f46a3c117a531a55d6e79f5aaca836ecfb84f", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-x86_64_v2-unknown-linux-musl-debug-full.tar.zst", + "sha256": "623e2fedb44f5c8c123371a9e82771792d1a64ea11cb963259947679c1bb7027", "variant": "debug" }, "cpython-3.13.5+debug-linux-x86_64_v3-gnu": { @@ -7163,8 +7163,8 @@ "minor": 13, "patch": 5, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.13.5%2B20250708-x86_64_v3-unknown-linux-gnu-debug-full.tar.zst", - "sha256": "d4d2e746af77d16428d8168d11f8bf5b90424667949af7895413cdc18ebcaee8", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-x86_64_v3-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "f24df9f31d052c4e9cabec7a897d78ceccf9fb90a6edaa6f4f128e49d5f27162", "variant": "debug" }, "cpython-3.13.5+debug-linux-x86_64_v3-musl": { @@ -7179,8 +7179,8 @@ "minor": 13, "patch": 5, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.13.5%2B20250708-x86_64_v3-unknown-linux-musl-debug-full.tar.zst", - "sha256": "f76628dc2447a1fc55f463623c81f9a19002b5f968afe77b57136fdc41833993", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-x86_64_v3-unknown-linux-musl-debug-full.tar.zst", + "sha256": "2821ef432b962ab4968e339f8d55a790eb64e266ccba674837589d58fb40f0d0", "variant": "debug" }, "cpython-3.13.5+debug-linux-x86_64_v4-gnu": { @@ -7195,8 +7195,8 @@ "minor": 13, "patch": 5, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.13.5%2B20250708-x86_64_v4-unknown-linux-gnu-debug-full.tar.zst", - "sha256": "4620c454e6ae9ad0093785b54790ddb68c2d3f2d868aa79a5aa678b98e1138a3", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-x86_64_v4-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "8f9f953c202e0f6b5f7e7abff2b34beaff7a627d1f7ff8cdfe4d29f4fc12f067", "variant": "debug" }, "cpython-3.13.5+debug-linux-x86_64_v4-musl": { @@ -7211,8 +7211,8 @@ "minor": 13, "patch": 5, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.13.5%2B20250708-x86_64_v4-unknown-linux-musl-debug-full.tar.zst", - "sha256": "e1f4f398dadd9cd83e351ea08a068bc3ea24f870ccddbeb3b65ce65a3bc5c106", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-x86_64_v4-unknown-linux-musl-debug-full.tar.zst", + "sha256": "5c0740e8df7d69b4e2ead4f11db97e3d884e77377d84cbf6fba58077043388fb", "variant": "debug" }, "cpython-3.13.4-darwin-aarch64-none": { @@ -11435,8 +11435,8 @@ "minor": 12, "patch": 11, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.12.11%2B20250708-aarch64-apple-darwin-install_only_stripped.tar.gz", - "sha256": "3c948bee581f42c4a3b072a5e1ff261e0eb1636c00d5474c28a13fa627c95578", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.12.11%2B20250712-aarch64-apple-darwin-install_only_stripped.tar.gz", + "sha256": "0a5748a455ebd0ef0419bffa0b239c1596ea021937fa4c9eb3b8893cf7b46d48", "variant": null }, "cpython-3.12.11-darwin-x86_64-none": { @@ -11451,8 +11451,8 @@ "minor": 12, "patch": 11, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.12.11%2B20250708-x86_64-apple-darwin-install_only_stripped.tar.gz", - "sha256": "c81794121d513b7eab710a210202e78393400460251a6878c85b927977098b38", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.12.11%2B20250712-x86_64-apple-darwin-install_only_stripped.tar.gz", + "sha256": "1154b0be69bdd8c144272cee596181f096577d535bff1548f8df49e0d7d9c721", "variant": null }, "cpython-3.12.11-linux-aarch64-gnu": { @@ -11467,8 +11467,8 @@ "minor": 12, "patch": 11, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.12.11%2B20250708-aarch64-unknown-linux-gnu-install_only_stripped.tar.gz", - "sha256": "7ac6956ce9119a44531e9cbe3fe4d0beadcf244e02be81a863b95aa69041314f", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.12.11%2B20250712-aarch64-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "73a22b9fa275682f326393df8f8afe82c302330e760bf9b4667378a3a98613ba", "variant": null }, "cpython-3.12.11-linux-armv7-gnueabi": { @@ -11483,8 +11483,8 @@ "minor": 12, "patch": 11, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.12.11%2B20250708-armv7-unknown-linux-gnueabi-install_only_stripped.tar.gz", - "sha256": "4cc102db1b315425d2feda63407ee0e737902d94eaecf52e3ec8ea6f6d7cee4d", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.12.11%2B20250712-armv7-unknown-linux-gnueabi-install_only_stripped.tar.gz", + "sha256": "6a60953cc821d673bf67724d05a430576d0921a60cfceeca11af5a758bd3ae71", "variant": null }, "cpython-3.12.11-linux-armv7-gnueabihf": { @@ -11499,8 +11499,8 @@ "minor": 12, "patch": 11, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.12.11%2B20250708-armv7-unknown-linux-gnueabihf-install_only_stripped.tar.gz", - "sha256": "c62d2c512b4e35dfb40d29246ed02cf0049e645bf333eca0a9e703da51f64597", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.12.11%2B20250712-armv7-unknown-linux-gnueabihf-install_only_stripped.tar.gz", + "sha256": "1f8b03c8bf51f36f659961364f9d78a093af84305bbe416f95b5ecb64a11314d", "variant": null }, "cpython-3.12.11-linux-powerpc64le-gnu": { @@ -11515,8 +11515,8 @@ "minor": 12, "patch": 11, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.12.11%2B20250708-ppc64le-unknown-linux-gnu-install_only_stripped.tar.gz", - "sha256": "1025a919ad5170f76c58fb73f4b2b3a5e2ed910d1f802390f032b4da91152f23", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.12.11%2B20250712-ppc64le-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "10164c4c0e7f9a29024677226bc5f7c0b8b2b6ac5109a0d51a0fb7963f4bec48", "variant": null }, "cpython-3.12.11-linux-riscv64-gnu": { @@ -11531,8 +11531,8 @@ "minor": 12, "patch": 11, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.12.11%2B20250708-riscv64-unknown-linux-gnu-install_only_stripped.tar.gz", - "sha256": "0178724fd0ce4712092c2afb66094e12d1f7e07744cf9d0c462aad516a82b984", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.12.11%2B20250712-riscv64-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "f47a3ad7d96ba16b8b38f68f69296e0dca1e910b8ff9b89dd9e9309fab9aa379", "variant": null }, "cpython-3.12.11-linux-s390x-gnu": { @@ -11547,8 +11547,8 @@ "minor": 12, "patch": 11, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.12.11%2B20250708-s390x-unknown-linux-gnu-install_only_stripped.tar.gz", - "sha256": "cb480b2fd0fefcdf71e07ab6a321e878bbc6d2c855356575db29fcbb48d5eae1", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.12.11%2B20250712-s390x-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "0714bccd13e1bfd7cce812255f4ba960b9ac5eb0a8b876daef7f8796dbd79c7a", "variant": null }, "cpython-3.12.11-linux-x86_64-gnu": { @@ -11563,8 +11563,8 @@ "minor": 12, "patch": 11, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.12.11%2B20250708-x86_64-unknown-linux-gnu-install_only_stripped.tar.gz", - "sha256": "50f2684ecd4dfdff732d091f0e3d383261a9d524a850784cd01a1c0839ece3e7", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.12.11%2B20250712-x86_64-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "e42c16fe50fda85dad3f5042b6d507476ea8e88c0f039018fef0680038d87c17", "variant": null }, "cpython-3.12.11-linux-x86_64-musl": { @@ -11579,8 +11579,8 @@ "minor": 12, "patch": 11, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.12.11%2B20250708-x86_64-unknown-linux-musl-install_only_stripped.tar.gz", - "sha256": "f7ef6763b79a50da594fd1e03a6ee39017db6002c552539dbe0edffefc453804", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.12.11%2B20250712-x86_64-unknown-linux-musl-install_only_stripped.tar.gz", + "sha256": "3676e47a82e674878b986a6ba05d5e2829cb8061bfda3c72258c232ad2a5c9f1", "variant": null }, "cpython-3.12.11-linux-x86_64_v2-gnu": { @@ -11595,8 +11595,8 @@ "minor": 12, "patch": 11, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.12.11%2B20250708-x86_64_v2-unknown-linux-gnu-install_only_stripped.tar.gz", - "sha256": "32320209ab9b187b142a81bc4063c8aab9aa05ddb9833ca921c17eefdd2f1509", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.12.11%2B20250712-x86_64_v2-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "ddf0c26a2df22156672e7476fda10845056d13d4b5223de6ba054d25bfcd9d3c", "variant": null }, "cpython-3.12.11-linux-x86_64_v2-musl": { @@ -11611,8 +11611,8 @@ "minor": 12, "patch": 11, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.12.11%2B20250708-x86_64_v2-unknown-linux-musl-install_only_stripped.tar.gz", - "sha256": "14abfef4e25db478db20dd15627576f47ff012a0eb3f7de3f9d1101ea409d02c", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.12.11%2B20250712-x86_64_v2-unknown-linux-musl-install_only_stripped.tar.gz", + "sha256": "2be8e228b2a698b66f9d96819bcc6f31ac5bdc773f6ec6dbd917ab351d665da2", "variant": null }, "cpython-3.12.11-linux-x86_64_v3-gnu": { @@ -11627,8 +11627,8 @@ "minor": 12, "patch": 11, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.12.11%2B20250708-x86_64_v3-unknown-linux-gnu-install_only_stripped.tar.gz", - "sha256": "4f71291857a656cf4b780d7c5bd2667ecde14f9ec093e026cf28d2c8727d69ad", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.12.11%2B20250712-x86_64_v3-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "820174fbb713495a1beecd087cc651d2d4f1d10b1bb2e308c61aecec006fea0a", "variant": null }, "cpython-3.12.11-linux-x86_64_v3-musl": { @@ -11643,8 +11643,8 @@ "minor": 12, "patch": 11, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.12.11%2B20250708-x86_64_v3-unknown-linux-musl-install_only_stripped.tar.gz", - "sha256": "b3530c771104b7765241b87b2ac749f6fce1886b4d2b677a1fc46aaca9378019", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.12.11%2B20250712-x86_64_v3-unknown-linux-musl-install_only_stripped.tar.gz", + "sha256": "5cfc247d6ee2303c98fecddfbdf6ddd2e0d44c59a033cb47a3eb6ab4bd236933", "variant": null }, "cpython-3.12.11-linux-x86_64_v4-gnu": { @@ -11659,8 +11659,8 @@ "minor": 12, "patch": 11, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.12.11%2B20250708-x86_64_v4-unknown-linux-gnu-install_only_stripped.tar.gz", - "sha256": "0de444c0e4ac45f2f4863889e57f2dbbe79f01593afcc21f63b4ddb5832edd61", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.12.11%2B20250712-x86_64_v4-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "01519be2a0930f86a43ac93f25fb0f44b3dbf8077ecd23c98c5b3011150ef16a", "variant": null }, "cpython-3.12.11-linux-x86_64_v4-musl": { @@ -11675,8 +11675,8 @@ "minor": 12, "patch": 11, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.12.11%2B20250708-x86_64_v4-unknown-linux-musl-install_only_stripped.tar.gz", - "sha256": "3d761bb79ef0946ee76b659c9bcf034dc8a67e1d414bef51ecb498c595a2b262", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.12.11%2B20250712-x86_64_v4-unknown-linux-musl-install_only_stripped.tar.gz", + "sha256": "93a9714ef88ece8575707e1841369b753f9d320c42832efffda8df8dfcbd9ca7", "variant": null }, "cpython-3.12.11-windows-aarch64-none": { @@ -11691,8 +11691,8 @@ "minor": 12, "patch": 11, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.12.11%2B20250708-aarch64-pc-windows-msvc-install_only_stripped.tar.gz", - "sha256": "c2b541cd75cd12d7b1d52ebee724cc1b1f4d7367901d06b2f3f4a2e3ded4145e", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.12.11%2B20250712-aarch64-pc-windows-msvc-install_only_stripped.tar.gz", + "sha256": "512ae77ca0afe3a81d990c975548f052b9cde78187190eb5457b3b9cdad37a9c", "variant": null }, "cpython-3.12.11-windows-i686-none": { @@ -11707,8 +11707,8 @@ "minor": 12, "patch": 11, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.12.11%2B20250708-i686-pc-windows-msvc-install_only_stripped.tar.gz", - "sha256": "cbf9c2bd5f182f6fc6da969729d0d4a5683d5f392f3a9bed3d7240cbe7385c11", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.12.11%2B20250712-i686-pc-windows-msvc-install_only_stripped.tar.gz", + "sha256": "c815e6eadc40013227269d4999d5aef856c4967e175beedadef60e429275be57", "variant": null }, "cpython-3.12.11-windows-x86_64-none": { @@ -11723,8 +11723,8 @@ "minor": 12, "patch": 11, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.12.11%2B20250708-x86_64-pc-windows-msvc-install_only_stripped.tar.gz", - "sha256": "79e5d97e543975309fe3a22e27f2d83d7b08cff462d699bfa721854971773ec6", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.12.11%2B20250712-x86_64-pc-windows-msvc-install_only_stripped.tar.gz", + "sha256": "49911a479230f9a0ad33fc6742229128249f695502360dab3f5fd9096585e9a5", "variant": null }, "cpython-3.12.11+debug-linux-aarch64-gnu": { @@ -11739,8 +11739,8 @@ "minor": 12, "patch": 11, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.12.11%2B20250708-aarch64-unknown-linux-gnu-debug-full.tar.zst", - "sha256": "a8d1e10b91253cf528c9233c314e6958de7d9380c5e949a2ce1b1b4dc8538ebd", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.12.11%2B20250712-aarch64-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "aed96d0c279ff78619991fadf2ef85539d9ca208f2204ea252d3197b82092e37", "variant": "debug" }, "cpython-3.12.11+debug-linux-armv7-gnueabi": { @@ -11755,8 +11755,8 @@ "minor": 12, "patch": 11, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.12.11%2B20250708-armv7-unknown-linux-gnueabi-debug-full.tar.zst", - "sha256": "46a11e0955ea444a0fe3fabbe9b1f36be4a72c804b8265d90f84f26a3de3199e", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.12.11%2B20250712-armv7-unknown-linux-gnueabi-debug-full.tar.zst", + "sha256": "360e6b2b9bf34d8fb086c43f3b0ce95e7918a458b491c6d85bf2624ab7e75ae3", "variant": "debug" }, "cpython-3.12.11+debug-linux-armv7-gnueabihf": { @@ -11771,8 +11771,8 @@ "minor": 12, "patch": 11, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.12.11%2B20250708-armv7-unknown-linux-gnueabihf-debug-full.tar.zst", - "sha256": "ccc5fbb01a83f1a264e90d8f92324c64d3dc2b2bdc4568340bb58dc62b061cce", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.12.11%2B20250712-armv7-unknown-linux-gnueabihf-debug-full.tar.zst", + "sha256": "fffb9b6c2e81b03aa8a1d8932a351da172cd6069bbdc192f020c8862d262eab5", "variant": "debug" }, "cpython-3.12.11+debug-linux-powerpc64le-gnu": { @@ -11787,8 +11787,8 @@ "minor": 12, "patch": 11, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.12.11%2B20250708-ppc64le-unknown-linux-gnu-debug-full.tar.zst", - "sha256": "0f334bbaa774e7b98f264e04456dfb6130519294ac0c25593cebb41c92571e34", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.12.11%2B20250712-ppc64le-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "a8bed95f73ccd6451cad69163ef7097bfc17eda984d2932a93e2dda639f06ff2", "variant": "debug" }, "cpython-3.12.11+debug-linux-riscv64-gnu": { @@ -11803,8 +11803,8 @@ "minor": 12, "patch": 11, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.12.11%2B20250708-riscv64-unknown-linux-gnu-debug-full.tar.zst", - "sha256": "f048e364a7895b535c9e68f987cf17e3ee5f3bd3b7189b95cc7db30cd8a7b9b5", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.12.11%2B20250712-riscv64-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "395d73e73ff0d0085ddb83f15d51375c655756e28b0e44c0266eb49f8d2b2f27", "variant": "debug" }, "cpython-3.12.11+debug-linux-s390x-gnu": { @@ -11819,8 +11819,8 @@ "minor": 12, "patch": 11, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.12.11%2B20250708-s390x-unknown-linux-gnu-debug-full.tar.zst", - "sha256": "c48068f9f02f16314265567acb56e411e9936abc9b18c9d67811f5faade66031", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.12.11%2B20250712-s390x-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "097dc82abc3805b8e1721e67869fd4ae6419fb9089d7289aec4dd61b9c834db4", "variant": "debug" }, "cpython-3.12.11+debug-linux-x86_64-gnu": { @@ -11835,8 +11835,8 @@ "minor": 12, "patch": 11, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.12.11%2B20250708-x86_64-unknown-linux-gnu-debug-full.tar.zst", - "sha256": "ef2fe47be6b147bc376ce8f2949cc3d193c9c1d2e362fa9dcbabf0e7c60f8a19", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.12.11%2B20250712-x86_64-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "d11f20d2adaa582ac3e3ab6f56a3c1f4e468e1aa4712d6fe76dd2776fdb28330", "variant": "debug" }, "cpython-3.12.11+debug-linux-x86_64-musl": { @@ -11851,8 +11851,8 @@ "minor": 12, "patch": 11, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.12.11%2B20250708-x86_64-unknown-linux-musl-debug-full.tar.zst", - "sha256": "a88306d6b3a09b85f93514d43b2c8bd35dff417cf861bd2a1ead4d87c5666f8a", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.12.11%2B20250712-x86_64-unknown-linux-musl-debug-full.tar.zst", + "sha256": "a4cfaa4c7915c35ecf4a15a3f25cdda68b1e2de06280cfe98680b4eed3e11ac1", "variant": "debug" }, "cpython-3.12.11+debug-linux-x86_64_v2-gnu": { @@ -11867,8 +11867,8 @@ "minor": 12, "patch": 11, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.12.11%2B20250708-x86_64_v2-unknown-linux-gnu-debug-full.tar.zst", - "sha256": "b8637c81f61f41d49bf95699cc4c295579d671912f81b5446c3ba2496dac2627", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.12.11%2B20250712-x86_64_v2-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "e040fa65666bd109534c8ed4c70d198954a28e87dffbab1b138a55c8c98c4db5", "variant": "debug" }, "cpython-3.12.11+debug-linux-x86_64_v2-musl": { @@ -11883,8 +11883,8 @@ "minor": 12, "patch": 11, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.12.11%2B20250708-x86_64_v2-unknown-linux-musl-debug-full.tar.zst", - "sha256": "5dab0c1eb4ce013826a462247629263eae7726b635d868408152444cbf83a778", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.12.11%2B20250712-x86_64_v2-unknown-linux-musl-debug-full.tar.zst", + "sha256": "89504b7f5fba85aa2644be63aa9377e69e56f6c6f4c57a96e0a6050e95e2b8d8", "variant": "debug" }, "cpython-3.12.11+debug-linux-x86_64_v3-gnu": { @@ -11899,8 +11899,8 @@ "minor": 12, "patch": 11, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.12.11%2B20250708-x86_64_v3-unknown-linux-gnu-debug-full.tar.zst", - "sha256": "eef2733d40a9511a2af9d83808ad640993c5d8b6fb436bc240cd9bac6be4ffc5", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.12.11%2B20250712-x86_64_v3-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "5eb9cb98d4528045f1e03373373ddb783fbbf6646e3d0e683fb563e5f1d198e6", "variant": "debug" }, "cpython-3.12.11+debug-linux-x86_64_v3-musl": { @@ -11915,8 +11915,8 @@ "minor": 12, "patch": 11, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.12.11%2B20250708-x86_64_v3-unknown-linux-musl-debug-full.tar.zst", - "sha256": "9f7fbd3712e13f91414e7a498a58160d8745fa02b9d2898db8f6f3c589920b6d", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.12.11%2B20250712-x86_64_v3-unknown-linux-musl-debug-full.tar.zst", + "sha256": "0d463ebb5c0886e019c54e07963965ee53c52d01e42b3ca8a994e8599c2d7242", "variant": "debug" }, "cpython-3.12.11+debug-linux-x86_64_v4-gnu": { @@ -11931,8 +11931,8 @@ "minor": 12, "patch": 11, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.12.11%2B20250708-x86_64_v4-unknown-linux-gnu-debug-full.tar.zst", - "sha256": "acf0037e25e80cbc3e8a1ff1e3b83da10ed2b00d8ff7df0ff1d207d896e2225f", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.12.11%2B20250712-x86_64_v4-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "94924bb8ca1f03bf06c87554be2ea50ff8db47f2a3b02c5ff3b27d5a502d5fe4", "variant": "debug" }, "cpython-3.12.11+debug-linux-x86_64_v4-musl": { @@ -11947,8 +11947,8 @@ "minor": 12, "patch": 11, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.12.11%2B20250708-x86_64_v4-unknown-linux-musl-debug-full.tar.zst", - "sha256": "fefe36ed014e3a6baf0eb122161b42262c1a00ae403de18fb03353cf80d46c1f", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.12.11%2B20250712-x86_64_v4-unknown-linux-musl-debug-full.tar.zst", + "sha256": "47d315cae2b1cd67155cd072410e4a6c0f428e78f09bb5da9ff7eb08480c05c4", "variant": "debug" }, "cpython-3.12.10-darwin-aarch64-none": { @@ -15995,8 +15995,8 @@ "minor": 11, "patch": 13, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.11.13%2B20250708-aarch64-apple-darwin-install_only_stripped.tar.gz", - "sha256": "f35b94b5aaefaff34b59f4aab09a5eec02c93e3b61a46c6694f4e93fb2aea86c", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.11.13%2B20250712-aarch64-apple-darwin-install_only_stripped.tar.gz", + "sha256": "cb07230fc0946bab64762b2a97cca278c32c0fa4b1cf5c5c3eb848f08757498a", "variant": null }, "cpython-3.11.13-darwin-x86_64-none": { @@ -16011,8 +16011,8 @@ "minor": 11, "patch": 13, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.11.13%2B20250708-x86_64-apple-darwin-install_only_stripped.tar.gz", - "sha256": "c2a6b3053af4354d74b70d25ccf744bea7c545ee00da38a93e8b392ec9f062f1", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.11.13%2B20250712-x86_64-apple-darwin-install_only_stripped.tar.gz", + "sha256": "1eec204b5dffad8a430c2380fd14895fad2b47406f6d69e07f00b954ffdb8064", "variant": null }, "cpython-3.11.13-linux-aarch64-gnu": { @@ -16027,8 +16027,8 @@ "minor": 11, "patch": 13, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.11.13%2B20250708-aarch64-unknown-linux-gnu-install_only_stripped.tar.gz", - "sha256": "a05521f2fa75e60920cb1172722920262c73d7ead3045a2a5b4844d287a1dfdd", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.11.13%2B20250712-aarch64-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "c5155a27d8e8df696eff8c39b1b37e5330f12a764fdf79b5f52ea2deb98a73a0", "variant": null }, "cpython-3.11.13-linux-armv7-gnueabi": { @@ -16043,8 +16043,8 @@ "minor": 11, "patch": 13, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.11.13%2B20250708-armv7-unknown-linux-gnueabi-install_only_stripped.tar.gz", - "sha256": "a4bb388a080d1dc4a7d381d2bc7f74d00311d5fc6ef66d457178b5c62d7e0ac1", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.11.13%2B20250712-armv7-unknown-linux-gnueabi-install_only_stripped.tar.gz", + "sha256": "680ecfd9fc09d62dbe68cfb201e567086e3df9a27d061d9bcde78fad4f7f4d94", "variant": null }, "cpython-3.11.13-linux-armv7-gnueabihf": { @@ -16059,8 +16059,8 @@ "minor": 11, "patch": 13, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.11.13%2B20250708-armv7-unknown-linux-gnueabihf-install_only_stripped.tar.gz", - "sha256": "80444ffb9f33d39a9462e2efa04ba7edbef6af2e957457a71a0710344972f0ba", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.11.13%2B20250712-armv7-unknown-linux-gnueabihf-install_only_stripped.tar.gz", + "sha256": "af2508bfab6c90a28d7e271e9c1cede875769556f3537fc7b0e3b6dd1f1c92b7", "variant": null }, "cpython-3.11.13-linux-powerpc64le-gnu": { @@ -16075,8 +16075,8 @@ "minor": 11, "patch": 13, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.11.13%2B20250708-ppc64le-unknown-linux-gnu-install_only_stripped.tar.gz", - "sha256": "40e5fcea272e4a8253cf2bc392fbad36ca4260de75a12ef3c95711eb86f57a0c", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.11.13%2B20250712-ppc64le-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "c83b749e3908140dec9ffadbf6b3f98bacaf4ca2230ead6adbd8a0923eebf362", "variant": null }, "cpython-3.11.13-linux-riscv64-gnu": { @@ -16091,8 +16091,8 @@ "minor": 11, "patch": 13, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.11.13%2B20250708-riscv64-unknown-linux-gnu-install_only_stripped.tar.gz", - "sha256": "eae2bbaf28b1f5886408e6cae4c5d393f3065dbd3293231b93bd0122f5f0543d", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.11.13%2B20250712-riscv64-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "7f0dfc489925e04ba015f170f4f30309330fae711d28bc4ed11ff13b9c3d9443", "variant": null }, "cpython-3.11.13-linux-s390x-gnu": { @@ -16107,8 +16107,8 @@ "minor": 11, "patch": 13, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.11.13%2B20250708-s390x-unknown-linux-gnu-install_only_stripped.tar.gz", - "sha256": "702fd03db386a6711afbf14778a5b2aca6d4c3e47ff26e85a4d85991023ee0db", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.11.13%2B20250712-s390x-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "603e7bad4e81cee7d4c1c9ca3cb5573036fb1d226a9a9634ca0763120740d8ff", "variant": null }, "cpython-3.11.13-linux-x86_64-gnu": { @@ -16123,8 +16123,8 @@ "minor": 11, "patch": 13, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.11.13%2B20250708-x86_64-unknown-linux-gnu-install_only_stripped.tar.gz", - "sha256": "f730f5d09fc41e2573b0092ef143dd8976a8f6593ad31b833ea1d0adbc5562dd", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.11.13%2B20250712-x86_64-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "e50197b0784baaf2d47c8c8773daa4600b2809330829565e9f31e6cfbc657eae", "variant": null }, "cpython-3.11.13-linux-x86_64-musl": { @@ -16139,8 +16139,8 @@ "minor": 11, "patch": 13, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.11.13%2B20250708-x86_64-unknown-linux-musl-install_only_stripped.tar.gz", - "sha256": "337e164de474fefe5a2bf63c5d836093eae3532be80ed54b8d1abfd6dcb1b742", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.11.13%2B20250712-x86_64-unknown-linux-musl-install_only_stripped.tar.gz", + "sha256": "a233b0492531f187ac33ecfd466debf21537a8b3ae90d799758808d74af09162", "variant": null }, "cpython-3.11.13-linux-x86_64_v2-gnu": { @@ -16155,8 +16155,8 @@ "minor": 11, "patch": 13, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.11.13%2B20250708-x86_64_v2-unknown-linux-gnu-install_only_stripped.tar.gz", - "sha256": "3903459242e57e9979ca6e581c06f3e4c573cf1d3e2d3eb62ce2cba8e3d83fd9", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.11.13%2B20250712-x86_64_v2-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "5f970ce2eecd824c367132c4fd8d066a0af3d079e46acf972e672588a578b246", "variant": null }, "cpython-3.11.13-linux-x86_64_v2-musl": { @@ -16171,8 +16171,8 @@ "minor": 11, "patch": 13, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.11.13%2B20250708-x86_64_v2-unknown-linux-musl-install_only_stripped.tar.gz", - "sha256": "624494b5583fcec1f75464797686ffeb4727cf0ccdc54cf9c73f0b45888d5274", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.11.13%2B20250712-x86_64_v2-unknown-linux-musl-install_only_stripped.tar.gz", + "sha256": "a2df9657ecbecce2a50f8bb27cb8755d54c478195d49558de1c9c56f5de84033", "variant": null }, "cpython-3.11.13-linux-x86_64_v3-gnu": { @@ -16187,8 +16187,8 @@ "minor": 11, "patch": 13, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.11.13%2B20250708-x86_64_v3-unknown-linux-gnu-install_only_stripped.tar.gz", - "sha256": "d5898a58943ed9f770a94125e7af85fbfd50b87e19135628708e8dbc6c8bd0b4", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.11.13%2B20250712-x86_64_v3-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "c30fd4073a10ac6ee0b8719d106bb6195ca73b7f85340aac6e33069869ae4ee8", "variant": null }, "cpython-3.11.13-linux-x86_64_v3-musl": { @@ -16203,8 +16203,8 @@ "minor": 11, "patch": 13, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.11.13%2B20250708-x86_64_v3-unknown-linux-musl-install_only_stripped.tar.gz", - "sha256": "8bc18b17a9f8d36271dca160d402c18a42552b0e50708bf3732d0e2b1985235d", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.11.13%2B20250712-x86_64_v3-unknown-linux-musl-install_only_stripped.tar.gz", + "sha256": "cd15f24848c848b058a41dd0b05c4e5beca692d2c60c962fcb912fffc690afef", "variant": null }, "cpython-3.11.13-linux-x86_64_v4-gnu": { @@ -16219,8 +16219,8 @@ "minor": 11, "patch": 13, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.11.13%2B20250708-x86_64_v4-unknown-linux-gnu-install_only_stripped.tar.gz", - "sha256": "257e29dc405d10062184da4078e1d46a787e19a04cba2a1c1831c21e52d0a557", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.11.13%2B20250712-x86_64_v4-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "8c390cae0b2d163f18117cae43bcbe430e58146d97e0c39b4afe72842e55f5fc", "variant": null }, "cpython-3.11.13-linux-x86_64_v4-musl": { @@ -16235,8 +16235,8 @@ "minor": 11, "patch": 13, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.11.13%2B20250708-x86_64_v4-unknown-linux-musl-install_only_stripped.tar.gz", - "sha256": "4b7dec009dbdfb4821aebdb5ca082ac7765ecdb67980dc86adebd57febaf1aec", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.11.13%2B20250712-x86_64_v4-unknown-linux-musl-install_only_stripped.tar.gz", + "sha256": "f2ac3addbdf3c08ccf2320bdbed20213b45acd3399d44a990046f09dd883824e", "variant": null }, "cpython-3.11.13-windows-aarch64-none": { @@ -16251,8 +16251,8 @@ "minor": 11, "patch": 13, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.11.13%2B20250708-aarch64-pc-windows-msvc-install_only_stripped.tar.gz", - "sha256": "d45d2a6009dc50a76e4630c39ea36ba85e51555b7a17e1683d1bcf01c3bf7e1a", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.11.13%2B20250712-aarch64-pc-windows-msvc-install_only_stripped.tar.gz", + "sha256": "84058f18798534e76f6b9d15b96c41116aad0055e01c6e3ab2ab02db24826b9a", "variant": null }, "cpython-3.11.13-windows-i686-none": { @@ -16267,8 +16267,8 @@ "minor": 11, "patch": 13, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.11.13%2B20250708-i686-pc-windows-msvc-install_only_stripped.tar.gz", - "sha256": "892f215501ae1cfe36e210224f4de106e5825f34f41ad8d458ef73f3012be61f", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.11.13%2B20250712-i686-pc-windows-msvc-install_only_stripped.tar.gz", + "sha256": "8044a253950315481784b9f4764e1025b0d4a7a2760b7a82df849f4667113f80", "variant": null }, "cpython-3.11.13-windows-x86_64-none": { @@ -16283,8 +16283,8 @@ "minor": 11, "patch": 13, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.11.13%2B20250708-x86_64-pc-windows-msvc-install_only_stripped.tar.gz", - "sha256": "d19baf214caf1ad3d1b34c6931dcd6d915abedd419ba4aecb0cacb7e1ec7884a", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.11.13%2B20250712-x86_64-pc-windows-msvc-install_only_stripped.tar.gz", + "sha256": "43a574437fb7e11c439e13d84dd094fa25c741d32f9245c5ffc0e5f9523aafa9", "variant": null }, "cpython-3.11.13+debug-linux-aarch64-gnu": { @@ -16299,8 +16299,8 @@ "minor": 11, "patch": 13, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.11.13%2B20250708-aarch64-unknown-linux-gnu-debug-full.tar.zst", - "sha256": "bf9e9c0295634d5ead7d3756651898d6af8d1bfdd8cc410769f9354d3e0871e4", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.11.13%2B20250712-aarch64-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "b6ca253ced82c9575935a32d327d29dcffa9cb15963b9331c621ac91aa151933", "variant": "debug" }, "cpython-3.11.13+debug-linux-armv7-gnueabi": { @@ -16315,8 +16315,8 @@ "minor": 11, "patch": 13, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.11.13%2B20250708-armv7-unknown-linux-gnueabi-debug-full.tar.zst", - "sha256": "c0a5f208bbb1d51dfc3e98919f7856ae3a5643d2e6a6b5edfcbfa7ea41bb822e", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.11.13%2B20250712-armv7-unknown-linux-gnueabi-debug-full.tar.zst", + "sha256": "3e02d8ff6b63bb83a9b4cbf428d75c90d06f79df211fa176d291f3864c1e77df", "variant": "debug" }, "cpython-3.11.13+debug-linux-armv7-gnueabihf": { @@ -16331,8 +16331,8 @@ "minor": 11, "patch": 13, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.11.13%2B20250708-armv7-unknown-linux-gnueabihf-debug-full.tar.zst", - "sha256": "3d091a03c7d5fb47ac6050bffff371ce3904978ca3dc3c49f2bfacdc6b434a1d", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.11.13%2B20250712-armv7-unknown-linux-gnueabihf-debug-full.tar.zst", + "sha256": "c7f9429f877d9e78a1b7e71c83b2beea38a727f239899ed325b3648e4e4cc1bf", "variant": "debug" }, "cpython-3.11.13+debug-linux-powerpc64le-gnu": { @@ -16347,8 +16347,8 @@ "minor": 11, "patch": 13, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.11.13%2B20250708-ppc64le-unknown-linux-gnu-debug-full.tar.zst", - "sha256": "5c2be36a8aa027761b6c5da5bc4bb7ef92c6a8fa70a166f45fcc6f1c8b78330c", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.11.13%2B20250712-ppc64le-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "1f47dd100661489bf86befae148ce290009b91a7b62994f087136916ba4cfe4f", "variant": "debug" }, "cpython-3.11.13+debug-linux-riscv64-gnu": { @@ -16363,8 +16363,8 @@ "minor": 11, "patch": 13, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.11.13%2B20250708-riscv64-unknown-linux-gnu-debug-full.tar.zst", - "sha256": "609cd34b0f86f576eec2e55a917d07e4d322e2c58309d6ae2243470207ed369b", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.11.13%2B20250712-riscv64-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "47c5cae609e683e59bf6aff225c06216305b939374476a4cf796d65888a00436", "variant": "debug" }, "cpython-3.11.13+debug-linux-s390x-gnu": { @@ -16379,8 +16379,8 @@ "minor": 11, "patch": 13, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.11.13%2B20250708-s390x-unknown-linux-gnu-debug-full.tar.zst", - "sha256": "dd849e7e5308066f03d1f2be307cdfd95d5c815aec9dc743bf53c98731005cd5", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.11.13%2B20250712-s390x-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "7c16d22e0eeddfec0275f413ccca73c62ba55736230e889e5e78213e456bae1c", "variant": "debug" }, "cpython-3.11.13+debug-linux-x86_64-gnu": { @@ -16395,8 +16395,8 @@ "minor": 11, "patch": 13, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.11.13%2B20250708-x86_64-unknown-linux-gnu-debug-full.tar.zst", - "sha256": "6ed2ab536fce32ba93ddf3ea572c92aee3a5c12575f9096defbab858011a9810", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.11.13%2B20250712-x86_64-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "22b0309a7232568c054790a23979f490143c2a65f5b4638b52ebfa2e02ad7b20", "variant": "debug" }, "cpython-3.11.13+debug-linux-x86_64-musl": { @@ -16411,8 +16411,8 @@ "minor": 11, "patch": 13, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.11.13%2B20250708-x86_64-unknown-linux-musl-debug-full.tar.zst", - "sha256": "a4df9df180fa29800467eef491b3d22019aec3eca8160f9babd27b24cf6ebf39", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.11.13%2B20250712-x86_64-unknown-linux-musl-debug-full.tar.zst", + "sha256": "6a3c83db95e39a68ace7515787be03e77993f023bb0c908eaed4cf79480f24d4", "variant": "debug" }, "cpython-3.11.13+debug-linux-x86_64_v2-gnu": { @@ -16427,8 +16427,8 @@ "minor": 11, "patch": 13, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.11.13%2B20250708-x86_64_v2-unknown-linux-gnu-debug-full.tar.zst", - "sha256": "b27f28286c97e589521c496fe327e940c5ab99a406d652fe470008c2a525a159", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.11.13%2B20250712-x86_64_v2-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "0d7a5be35f70db94f151656a912fd66e0c001c515969007906b3f97c3fe46364", "variant": "debug" }, "cpython-3.11.13+debug-linux-x86_64_v2-musl": { @@ -16443,8 +16443,8 @@ "minor": 11, "patch": 13, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.11.13%2B20250708-x86_64_v2-unknown-linux-musl-debug-full.tar.zst", - "sha256": "9ffcf6f5b69805c47fb39c43810030cf1ff0fefab4b858734da75130f2184f7e", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.11.13%2B20250712-x86_64_v2-unknown-linux-musl-debug-full.tar.zst", + "sha256": "7c4ae94fe3f488027f1a97f304ef4dbe2d83f4b97381b5d6dd5552ce01065027", "variant": "debug" }, "cpython-3.11.13+debug-linux-x86_64_v3-gnu": { @@ -16459,8 +16459,8 @@ "minor": 11, "patch": 13, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.11.13%2B20250708-x86_64_v3-unknown-linux-gnu-debug-full.tar.zst", - "sha256": "c546e8dc6d21eb9e3fc8a849b67fe5564ebd69456c800e1e9ba685a6450e1db3", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.11.13%2B20250712-x86_64_v3-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "5fec7d7868079bd9107c190a3187d3bffe8e3a0214d09f8ce7fbe02788f6030d", "variant": "debug" }, "cpython-3.11.13+debug-linux-x86_64_v3-musl": { @@ -16475,8 +16475,8 @@ "minor": 11, "patch": 13, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.11.13%2B20250708-x86_64_v3-unknown-linux-musl-debug-full.tar.zst", - "sha256": "190734e9714c4041a160d50240a1e5489fd416091bb2f4f0ae1e17e46a67f641", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.11.13%2B20250712-x86_64_v3-unknown-linux-musl-debug-full.tar.zst", + "sha256": "ac5f52aca1051354e336448634b8e544476198d1f8db73f0bcd6dff64267cf9e", "variant": "debug" }, "cpython-3.11.13+debug-linux-x86_64_v4-gnu": { @@ -16491,8 +16491,8 @@ "minor": 11, "patch": 13, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.11.13%2B20250708-x86_64_v4-unknown-linux-gnu-debug-full.tar.zst", - "sha256": "629c39a382faed464041836b9299a2f3159e3cc5d07844f5cb5be8d579898166", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.11.13%2B20250712-x86_64_v4-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "467cee90b4081db0ddfef98e213bf9b69355068c2899853c7cf38bea44661fd5", "variant": "debug" }, "cpython-3.11.13+debug-linux-x86_64_v4-musl": { @@ -16507,8 +16507,8 @@ "minor": 11, "patch": 13, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.11.13%2B20250708-x86_64_v4-unknown-linux-musl-debug-full.tar.zst", - "sha256": "7909d1992f8bc7346b081f46a0d4c37e7ccabd041a947d89c17caa1cc497007b", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.11.13%2B20250712-x86_64_v4-unknown-linux-musl-debug-full.tar.zst", + "sha256": "1ac6812cca22b1d3c70b932d5f6f6da0bc693a532e78132661f856bafcd40e2b", "variant": "debug" }, "cpython-3.11.12-darwin-aarch64-none": { @@ -20299,8 +20299,8 @@ "minor": 10, "patch": 18, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.10.18%2B20250708-aarch64-apple-darwin-install_only_stripped.tar.gz", - "sha256": "5076f23af532e6225b85106393a092c1e43c67605f5038a2687efe2608e999b0", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.10.18%2B20250712-aarch64-apple-darwin-install_only_stripped.tar.gz", + "sha256": "73939b9c93d50163cd0f1af8b3ce751c941a3a8d6eba9c08edcc9235dc5888c7", "variant": null }, "cpython-3.10.18-darwin-x86_64-none": { @@ -20315,8 +20315,8 @@ "minor": 10, "patch": 18, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.10.18%2B20250708-x86_64-apple-darwin-install_only_stripped.tar.gz", - "sha256": "8e9436c3aec957de1e79fd670b7c7801ad59f174a178a7e92964e4642ade8eda", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.10.18%2B20250712-x86_64-apple-darwin-install_only_stripped.tar.gz", + "sha256": "1ba1523d81d042a516068b98ded99d3490d3f4bb6c214fc468b62dadde88e5ac", "variant": null }, "cpython-3.10.18-linux-aarch64-gnu": { @@ -20331,8 +20331,8 @@ "minor": 10, "patch": 18, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.10.18%2B20250708-aarch64-unknown-linux-gnu-install_only_stripped.tar.gz", - "sha256": "9e7581dc4e6e75135650551040d1ad9529bb1b7b2b6c2dbf9b80483507284a50", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.10.18%2B20250712-aarch64-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "54c490a7f22ac03171334e5265081ca90d75ca0525b154b001f0ee96ad961c18", "variant": null }, "cpython-3.10.18-linux-armv7-gnueabi": { @@ -20347,8 +20347,8 @@ "minor": 10, "patch": 18, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.10.18%2B20250708-armv7-unknown-linux-gnueabi-install_only_stripped.tar.gz", - "sha256": "973db52fb00257045a4d3ea13c59c50588bc6f708b0a0230a2adb2154f710009", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.10.18%2B20250712-armv7-unknown-linux-gnueabi-install_only_stripped.tar.gz", + "sha256": "56ca1369651cb56221053676d206aa675ee91ddad5de71cb8de7e357f213ff59", "variant": null }, "cpython-3.10.18-linux-armv7-gnueabihf": { @@ -20363,8 +20363,8 @@ "minor": 10, "patch": 18, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.10.18%2B20250708-armv7-unknown-linux-gnueabihf-install_only_stripped.tar.gz", - "sha256": "87368650aa19e173da8b365231f75f1584f2d9e8b95d763b9c47f7fc053a644a", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.10.18%2B20250712-armv7-unknown-linux-gnueabihf-install_only_stripped.tar.gz", + "sha256": "eacff45758c90b3cdd4456a31b1217d665e122df8b5a0b8b238efcc59b8d8867", "variant": null }, "cpython-3.10.18-linux-powerpc64le-gnu": { @@ -20379,8 +20379,8 @@ "minor": 10, "patch": 18, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.10.18%2B20250708-ppc64le-unknown-linux-gnu-install_only_stripped.tar.gz", - "sha256": "cc3079246949bcef9be0118f58e6713fc8af2ba49927db015bc6f4d8fca6ab26", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.10.18%2B20250712-ppc64le-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "6e4180591050ec321a76ac278f9eab9c80017136293ce965229f3cbea3a1a855", "variant": null }, "cpython-3.10.18-linux-riscv64-gnu": { @@ -20395,8 +20395,8 @@ "minor": 10, "patch": 18, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.10.18%2B20250708-riscv64-unknown-linux-gnu-install_only_stripped.tar.gz", - "sha256": "73c6d8cf8eb865595ef232f5bb7d7a55cb0c861e2ee72a6b23e61409010bf6ee", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.10.18%2B20250712-riscv64-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "ef176d45d3199989df3563e8a578fb00084190fa139ecc752debdee7d9acc77d", "variant": null }, "cpython-3.10.18-linux-s390x-gnu": { @@ -20411,8 +20411,8 @@ "minor": 10, "patch": 18, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.10.18%2B20250708-s390x-unknown-linux-gnu-install_only_stripped.tar.gz", - "sha256": "668f8d911eec50bdd36996f3c0c098255fd90360e83d73efc383c136a93cbd30", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.10.18%2B20250712-s390x-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "f744cbebf0cc0236fd234aa99ae799105ed2edb0a01cf3fe9991d6dd85bd157c", "variant": null }, "cpython-3.10.18-linux-x86_64-gnu": { @@ -20427,8 +20427,8 @@ "minor": 10, "patch": 18, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.10.18%2B20250708-x86_64-unknown-linux-gnu-install_only_stripped.tar.gz", - "sha256": "c6e79f2c78b893339c4fbb4f337647f5e14d491ca2c05ecec8f78187bfd9480c", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.10.18%2B20250712-x86_64-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "ba282bc7e494c38c7f5483437fd1108e1d55f0b24effb3eb5b28e03966667d7c", "variant": null }, "cpython-3.10.18-linux-x86_64-musl": { @@ -20443,8 +20443,8 @@ "minor": 10, "patch": 18, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.10.18%2B20250708-x86_64-unknown-linux-musl-install_only_stripped.tar.gz", - "sha256": "cb6f4ea6cb5eef904d5a8fb4bcfee77bc34bca4946f8a12bab70c103f503f676", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.10.18%2B20250712-x86_64-unknown-linux-musl-install_only_stripped.tar.gz", + "sha256": "0502186e5ccc85134a2c7d11913198eb5319477da1702deb5d4b89c3f692b166", "variant": null }, "cpython-3.10.18-linux-x86_64_v2-gnu": { @@ -20459,8 +20459,8 @@ "minor": 10, "patch": 18, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.10.18%2B20250708-x86_64_v2-unknown-linux-gnu-install_only_stripped.tar.gz", - "sha256": "dbc05eadb1cdf504718688bb29367ab16fc0868c3b873031ea49b85e919a3bee", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.10.18%2B20250712-x86_64_v2-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "ddd7ff4a13131c29011dd508d2f398c95977dc5c055be891835a3aa12df7acfa", "variant": null }, "cpython-3.10.18-linux-x86_64_v2-musl": { @@ -20475,8 +20475,8 @@ "minor": 10, "patch": 18, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.10.18%2B20250708-x86_64_v2-unknown-linux-musl-install_only_stripped.tar.gz", - "sha256": "5c7ac0653d42d1ab391fec12c1f1f1d940c7ebe20013979d91d4651c3fcb62b9", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.10.18%2B20250712-x86_64_v2-unknown-linux-musl-install_only_stripped.tar.gz", + "sha256": "feb3d0c6ddfa959948321d6ac3de32d5cde32fe50135862c65165c9415cafedf", "variant": null }, "cpython-3.10.18-linux-x86_64_v3-gnu": { @@ -20491,8 +20491,8 @@ "minor": 10, "patch": 18, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.10.18%2B20250708-x86_64_v3-unknown-linux-gnu-install_only_stripped.tar.gz", - "sha256": "1199924aba81e7475479b9e709e91f5cbb5cf3dc269cc0c30c27cf25cbfe8f01", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.10.18%2B20250712-x86_64_v3-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "69c634bf5c979ca3d6fac7e5a34613915e55fc6671bfb0dee7470f3960a649ee", "variant": null }, "cpython-3.10.18-linux-x86_64_v3-musl": { @@ -20507,8 +20507,8 @@ "minor": 10, "patch": 18, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.10.18%2B20250708-x86_64_v3-unknown-linux-musl-install_only_stripped.tar.gz", - "sha256": "66a78c15f1f2cd0cfd0196edf323bdffe77481e6904751e125d4db23db78bad0", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.10.18%2B20250712-x86_64_v3-unknown-linux-musl-install_only_stripped.tar.gz", + "sha256": "dbe2e101bb60277ef0f9354b7f0b1aaa85b07dec3a12ca72ae133baa080deeca", "variant": null }, "cpython-3.10.18-linux-x86_64_v4-gnu": { @@ -20523,8 +20523,8 @@ "minor": 10, "patch": 18, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.10.18%2B20250708-x86_64_v4-unknown-linux-gnu-install_only_stripped.tar.gz", - "sha256": "7c0aaa49f3a5b15689ae43d6cd4f418732ee95070aaa96dabf968bb3ac45b29e", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.10.18%2B20250712-x86_64_v4-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "a6b2530a580061eb9d08168ac5e8808b8df1d2e7b8dd683c424b59cc9124a3a2", "variant": null }, "cpython-3.10.18-linux-x86_64_v4-musl": { @@ -20539,8 +20539,8 @@ "minor": 10, "patch": 18, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.10.18%2B20250708-x86_64_v4-unknown-linux-musl-install_only_stripped.tar.gz", - "sha256": "b14649f4bdb22cf8b2c3656034687b9854f0ad0489018a65a1d44e886a000e96", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.10.18%2B20250712-x86_64_v4-unknown-linux-musl-install_only_stripped.tar.gz", + "sha256": "3a2abc86a8e740d4e7dddcd697781630d9d9e6ce538095b43a4789a531f8239b", "variant": null }, "cpython-3.10.18-windows-i686-none": { @@ -20555,8 +20555,8 @@ "minor": 10, "patch": 18, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.10.18%2B20250708-i686-pc-windows-msvc-install_only_stripped.tar.gz", - "sha256": "e633c5093644502c477ba2391bde9bf23fb5d695aaa7de0e727b363592d81edf", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.10.18%2B20250712-i686-pc-windows-msvc-install_only_stripped.tar.gz", + "sha256": "1326fb50a7f39ff80b338a95c47acbeda30f484ee28ff168c3e395320345ee01", "variant": null }, "cpython-3.10.18-windows-x86_64-none": { @@ -20571,8 +20571,8 @@ "minor": 10, "patch": 18, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.10.18%2B20250708-x86_64-pc-windows-msvc-install_only_stripped.tar.gz", - "sha256": "9b168333744e676d221d0e47b73328e38a78a080bbeff009db72d0eae201a3a7", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.10.18%2B20250712-x86_64-pc-windows-msvc-install_only_stripped.tar.gz", + "sha256": "0dec10054eefa76d4e47e8f53d9993e51a6d76252d9f8e5162b1b9805e6ffc20", "variant": null }, "cpython-3.10.18+debug-linux-aarch64-gnu": { @@ -20587,8 +20587,8 @@ "minor": 10, "patch": 18, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.10.18%2B20250708-aarch64-unknown-linux-gnu-debug-full.tar.zst", - "sha256": "2997824229577882eb7f0000118c93d0fb12f97bee10bd7c41ed46b7123c6d5d", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.10.18%2B20250712-aarch64-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "ed4d68544efef0d7c158c4464d8e3b4407a02e2ea014e76dfa65fddfd49384af", "variant": "debug" }, "cpython-3.10.18+debug-linux-armv7-gnueabi": { @@ -20603,8 +20603,8 @@ "minor": 10, "patch": 18, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.10.18%2B20250708-armv7-unknown-linux-gnueabi-debug-full.tar.zst", - "sha256": "5650962a60d540d9a71b6af917f78386ae69f4368f9b3537828b8368400aee8f", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.10.18%2B20250712-armv7-unknown-linux-gnueabi-debug-full.tar.zst", + "sha256": "39fdc60b2645262ef658ebbf5edfaffd655524855d3aa35bfb05a149a271e4f5", "variant": "debug" }, "cpython-3.10.18+debug-linux-armv7-gnueabihf": { @@ -20619,8 +20619,8 @@ "minor": 10, "patch": 18, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.10.18%2B20250708-armv7-unknown-linux-gnueabihf-debug-full.tar.zst", - "sha256": "891540ab2a6e2534115787c95e06111176c2630dc261bad2169251924ec41fc6", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.10.18%2B20250712-armv7-unknown-linux-gnueabihf-debug-full.tar.zst", + "sha256": "cf0c02ab4b46c9b6a0854e5bd9da9b322d8d91ae5803190b798ff15cb25ab153", "variant": "debug" }, "cpython-3.10.18+debug-linux-powerpc64le-gnu": { @@ -20635,8 +20635,8 @@ "minor": 10, "patch": 18, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.10.18%2B20250708-ppc64le-unknown-linux-gnu-debug-full.tar.zst", - "sha256": "7266278b47151f48b7b57790cda43aeb12bb1a776711fbb552a60ace2d9e68fc", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.10.18%2B20250712-ppc64le-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "e9f346d7fa001e85cea92cf027b924c2095d54f7db297287b2df550f04e6c304", "variant": "debug" }, "cpython-3.10.18+debug-linux-riscv64-gnu": { @@ -20651,8 +20651,8 @@ "minor": 10, "patch": 18, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.10.18%2B20250708-riscv64-unknown-linux-gnu-debug-full.tar.zst", - "sha256": "ba07bece860b8f98da3740860f4e91de18d0e05a30f1970203f0d5f98489210c", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.10.18%2B20250712-riscv64-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "c11eba8055c7bb643f55694fb1828d8d13e4ade2cb3ec60d8d9bb38fbf7500d8", "variant": "debug" }, "cpython-3.10.18+debug-linux-s390x-gnu": { @@ -20667,8 +20667,8 @@ "minor": 10, "patch": 18, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.10.18%2B20250708-s390x-unknown-linux-gnu-debug-full.tar.zst", - "sha256": "217a35c1c9ef9bfef37970587245ce06c3e63f92322b083e0baa7da2a82587cf", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.10.18%2B20250712-s390x-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "c7b407062dc86e011c2e3d8f5f0e1db8d8eac3124e4d0b597f561d7f7b2a8723", "variant": "debug" }, "cpython-3.10.18+debug-linux-x86_64-gnu": { @@ -20683,8 +20683,8 @@ "minor": 10, "patch": 18, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.10.18%2B20250708-x86_64-unknown-linux-gnu-debug-full.tar.zst", - "sha256": "1d485c1882d0ecefe858ef8db3864fb6b91a938941f3d7350c06f3b6a03734db", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.10.18%2B20250712-x86_64-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "1ba2a0159629d92207966cbf2038774afd0f78cc59e94efb8a86e88a32563bdd", "variant": "debug" }, "cpython-3.10.18+debug-linux-x86_64-musl": { @@ -20699,8 +20699,8 @@ "minor": 10, "patch": 18, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.10.18%2B20250708-x86_64-unknown-linux-musl-debug-full.tar.zst", - "sha256": "cdbead37d85fff493e6eb3e6adf3d6935a721315b4711666db56d157e796396b", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.10.18%2B20250712-x86_64-unknown-linux-musl-debug-full.tar.zst", + "sha256": "ebee02e3380e50e394962697dc4d4c845f60ac356da88f671be563ef0dafaa9b", "variant": "debug" }, "cpython-3.10.18+debug-linux-x86_64_v2-gnu": { @@ -20715,8 +20715,8 @@ "minor": 10, "patch": 18, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.10.18%2B20250708-x86_64_v2-unknown-linux-gnu-debug-full.tar.zst", - "sha256": "5ae93dac6ae65c7f13c355ce1fe28b78a0a9b272c428bb27f5dbf2a357275bc2", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.10.18%2B20250712-x86_64_v2-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "4de984931af2c4a2b18139ff123843671c5037900524065c2fef26ff3d1a5771", "variant": "debug" }, "cpython-3.10.18+debug-linux-x86_64_v2-musl": { @@ -20731,8 +20731,8 @@ "minor": 10, "patch": 18, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.10.18%2B20250708-x86_64_v2-unknown-linux-musl-debug-full.tar.zst", - "sha256": "a588754cd0e959123c5beedd1d50cc849f8c3bed4908174a6f55730951a10241", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.10.18%2B20250712-x86_64_v2-unknown-linux-musl-debug-full.tar.zst", + "sha256": "fd97d5565e0fb98ad78db65f107789e287f84c53f4d9f3ccb37fdd5f3849288b", "variant": "debug" }, "cpython-3.10.18+debug-linux-x86_64_v3-gnu": { @@ -20747,8 +20747,8 @@ "minor": 10, "patch": 18, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.10.18%2B20250708-x86_64_v3-unknown-linux-gnu-debug-full.tar.zst", - "sha256": "65976255591b39e428ae750050e398521a32bcdefb96053dd2cf9007165411da", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.10.18%2B20250712-x86_64_v3-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "ea450da681ab3fdef0da5181d90ebff7331ce1f7f827bb3b56657badc4127fad", "variant": "debug" }, "cpython-3.10.18+debug-linux-x86_64_v3-musl": { @@ -20763,8 +20763,8 @@ "minor": 10, "patch": 18, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.10.18%2B20250708-x86_64_v3-unknown-linux-musl-debug-full.tar.zst", - "sha256": "fc8ba366396b3e6b5aca7e3ba449ad094350a533f31a0c99c6ed1ac0d41ef7d2", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.10.18%2B20250712-x86_64_v3-unknown-linux-musl-debug-full.tar.zst", + "sha256": "ff9fe8b880460ce9529db369e2becca20a7e6a042df2deba2277e35c5cdcd35a", "variant": "debug" }, "cpython-3.10.18+debug-linux-x86_64_v4-gnu": { @@ -20779,8 +20779,8 @@ "minor": 10, "patch": 18, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.10.18%2B20250708-x86_64_v4-unknown-linux-gnu-debug-full.tar.zst", - "sha256": "2bf6024c48b82b667dc3bab77d9ff143ac3983e75be94c32cdc22b9cd7e50d15", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.10.18%2B20250712-x86_64_v4-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "c1a1d9661cf1d45096478fefd1e70ff6d0cbc419194cf094414d24fa336f5116", "variant": "debug" }, "cpython-3.10.18+debug-linux-x86_64_v4-musl": { @@ -20795,8 +20795,8 @@ "minor": 10, "patch": 18, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.10.18%2B20250708-x86_64_v4-unknown-linux-musl-debug-full.tar.zst", - "sha256": "41696205b706ea5b0ef89eefd695bfe87f44dae57f9318711892b1ceb144cff7", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.10.18%2B20250712-x86_64_v4-unknown-linux-musl-debug-full.tar.zst", + "sha256": "2bf809a85ffc45a37b32d5107f1a3ee8a6d12f07bb5fd3ad26ba16501418a8a7", "variant": "debug" }, "cpython-3.10.17-darwin-aarch64-none": { @@ -25739,8 +25739,8 @@ "minor": 9, "patch": 23, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.9.23%2B20250708-aarch64-apple-darwin-install_only_stripped.tar.gz", - "sha256": "aff1156fa5be26caf1ac2d4029936eb9379dc4351bb1d32d2120b10f2ba61747", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.9.23%2B20250712-aarch64-apple-darwin-install_only_stripped.tar.gz", + "sha256": "3ab0d1885fee62dadc1123f0b23814e51b6abe5dcf6182a0c9af6cfc69764741", "variant": null }, "cpython-3.9.23-darwin-x86_64-none": { @@ -25755,8 +25755,8 @@ "minor": 9, "patch": 23, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.9.23%2B20250708-x86_64-apple-darwin-install_only_stripped.tar.gz", - "sha256": "9de5325065b159e3e7daa53c133126df6b3eeed2316176d84e7761b01d16ba7f", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.9.23%2B20250712-x86_64-apple-darwin-install_only_stripped.tar.gz", + "sha256": "0fbb8bcc5d203b83ba1e63f9b8b1debe9162c22dd0f7481543f310b298255d6a", "variant": null }, "cpython-3.9.23-linux-aarch64-gnu": { @@ -25771,8 +25771,8 @@ "minor": 9, "patch": 23, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.9.23%2B20250708-aarch64-unknown-linux-gnu-install_only_stripped.tar.gz", - "sha256": "51fe6b026253b9f9c83205d1907572d7618ea47216e40a351d30eaa55f879c3e", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.9.23%2B20250712-aarch64-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "da2e4a73d7318241031d87da2acb7da99070f94d715b8c9f8c973a5d586b20a6", "variant": null }, "cpython-3.9.23-linux-armv7-gnueabi": { @@ -25787,8 +25787,8 @@ "minor": 9, "patch": 23, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.9.23%2B20250708-armv7-unknown-linux-gnueabi-install_only_stripped.tar.gz", - "sha256": "1faeec85e15cd17acb90683bc42cc8bccdb5250816501863d3407713deb6215e", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.9.23%2B20250712-armv7-unknown-linux-gnueabi-install_only_stripped.tar.gz", + "sha256": "41599a37d0f6fa48b44183d15a7c98a299839b83fa28774ff3f01d28500da9a6", "variant": null }, "cpython-3.9.23-linux-armv7-gnueabihf": { @@ -25803,8 +25803,8 @@ "minor": 9, "patch": 23, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.9.23%2B20250708-armv7-unknown-linux-gnueabihf-install_only_stripped.tar.gz", - "sha256": "08261e7a2328c989409a7f0f4574bfca84adfab7e5db6556209642ebba55de5e", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.9.23%2B20250712-armv7-unknown-linux-gnueabihf-install_only_stripped.tar.gz", + "sha256": "2263daa7d9cda3e53449091dc86aa7931409721031bad1a1a160b214777c5cd6", "variant": null }, "cpython-3.9.23-linux-powerpc64le-gnu": { @@ -25819,8 +25819,8 @@ "minor": 9, "patch": 23, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.9.23%2B20250708-ppc64le-unknown-linux-gnu-install_only_stripped.tar.gz", - "sha256": "2ab4c6c616b23b2220829420028f90d0aa4f767ae60fcdf5d2edff08644bb5af", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.9.23%2B20250712-ppc64le-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "fc068ac5cf5e4effc74e2b63e34c2618e5a838737a19ca8f7f17cc2f10e44f26", "variant": null }, "cpython-3.9.23-linux-riscv64-gnu": { @@ -25835,8 +25835,8 @@ "minor": 9, "patch": 23, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.9.23%2B20250708-riscv64-unknown-linux-gnu-install_only_stripped.tar.gz", - "sha256": "bea6c21421b016ca03e786f0fb91a03cc9d3f39aa8069785632efe3666e90df5", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.9.23%2B20250712-riscv64-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "5475f1106abed1b1163fa7964f8f8e834cbdafc26ddb9ab79cc5c10fb8110457", "variant": null }, "cpython-3.9.23-linux-s390x-gnu": { @@ -25851,8 +25851,8 @@ "minor": 9, "patch": 23, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.9.23%2B20250708-s390x-unknown-linux-gnu-install_only_stripped.tar.gz", - "sha256": "6715a5b8af51e76929c1f7a81c9085053243d2b4025bac29f8ec18301766d795", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.9.23%2B20250712-s390x-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "2d571c79b0722488b4980badb163ebd83e48b02b5a125239c67239df8dd37476", "variant": null }, "cpython-3.9.23-linux-x86_64-gnu": { @@ -25867,8 +25867,8 @@ "minor": 9, "patch": 23, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.9.23%2B20250708-x86_64-unknown-linux-gnu-install_only_stripped.tar.gz", - "sha256": "ad39b79d0168f0f7cc5dbe14d99ff8d1068077f15cc2b03456fe3364630157e8", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.9.23%2B20250712-x86_64-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "7932256affbd8fe7e055fb54715dae47e4557919bfe84bb8f33260a7a792633a", "variant": null }, "cpython-3.9.23-linux-x86_64-musl": { @@ -25883,8 +25883,8 @@ "minor": 9, "patch": 23, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.9.23%2B20250708-x86_64-unknown-linux-musl-install_only_stripped.tar.gz", - "sha256": "977af02740232123c385e7f8e70eb8acdcf8ffd4126526f9d3d8cb1bd20fd669", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.9.23%2B20250712-x86_64-unknown-linux-musl-install_only_stripped.tar.gz", + "sha256": "64c4bb8c76b50f264a6900f3391156efd0c39ad75447f1b561aa0b150069e361", "variant": null }, "cpython-3.9.23-linux-x86_64_v2-gnu": { @@ -25899,8 +25899,8 @@ "minor": 9, "patch": 23, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.9.23%2B20250708-x86_64_v2-unknown-linux-gnu-install_only_stripped.tar.gz", - "sha256": "ffbb92f9213591ab7b253c89d34218c3adab25327668b89bc6120038cc2b0a37", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.9.23%2B20250712-x86_64_v2-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "c2bdab1548c60ed0bda4c69bea6dd17569c1d681065ed5ec5395175ed165f47a", "variant": null }, "cpython-3.9.23-linux-x86_64_v2-musl": { @@ -25915,8 +25915,8 @@ "minor": 9, "patch": 23, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.9.23%2B20250708-x86_64_v2-unknown-linux-musl-install_only_stripped.tar.gz", - "sha256": "e53121074856e6ef4e8f3a865c2848d4287431a1d0ceef21fd389cc39649f917", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.9.23%2B20250712-x86_64_v2-unknown-linux-musl-install_only_stripped.tar.gz", + "sha256": "61b59f2c19575acd088e1d63ca95e810e8e2b1af20f37d7acebf90f864c22ca4", "variant": null }, "cpython-3.9.23-linux-x86_64_v3-gnu": { @@ -25931,8 +25931,8 @@ "minor": 9, "patch": 23, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.9.23%2B20250708-x86_64_v3-unknown-linux-gnu-install_only_stripped.tar.gz", - "sha256": "1856f202d42555e8e8709db0291bbfac5a896724734314746ef20c014cca8552", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.9.23%2B20250712-x86_64_v3-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "f791037703a7370783c853bb406034532599ff561dfbf5bc67d44323d131b3c3", "variant": null }, "cpython-3.9.23-linux-x86_64_v3-musl": { @@ -25947,8 +25947,8 @@ "minor": 9, "patch": 23, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.9.23%2B20250708-x86_64_v3-unknown-linux-musl-install_only_stripped.tar.gz", - "sha256": "94f94fa20477b5088a147936c565c2b0a5a18e353d954ad6bbd5048e933d9a67", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.9.23%2B20250712-x86_64_v3-unknown-linux-musl-install_only_stripped.tar.gz", + "sha256": "88c3ad43158942c232039752e4d269cd89e282795e4c7f863f76f3e307b852f4", "variant": null }, "cpython-3.9.23-linux-x86_64_v4-gnu": { @@ -25963,8 +25963,8 @@ "minor": 9, "patch": 23, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.9.23%2B20250708-x86_64_v4-unknown-linux-gnu-install_only_stripped.tar.gz", - "sha256": "55209fe80fac7837837c5b4d310e71e1de822ca413465bf7589fabae5dd9ba7a", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.9.23%2B20250712-x86_64_v4-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "0a71dcb46a9ff949f7672f65090d210ee79d80846f10629e3f234eb7f5fe58e8", "variant": null }, "cpython-3.9.23-linux-x86_64_v4-musl": { @@ -25979,8 +25979,8 @@ "minor": 9, "patch": 23, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.9.23%2B20250708-x86_64_v4-unknown-linux-musl-install_only_stripped.tar.gz", - "sha256": "41e1237774abf02a8c3b33c365d959ba8529f6a845d93789e3fe7ba4203fb8c2", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.9.23%2B20250712-x86_64_v4-unknown-linux-musl-install_only_stripped.tar.gz", + "sha256": "cd574a9a36a729aa964e1c52bb3084a36350d905c4d16427d85dd3f80e1b3dcd", "variant": null }, "cpython-3.9.23-windows-i686-none": { @@ -25995,8 +25995,8 @@ "minor": 9, "patch": 23, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.9.23%2B20250708-i686-pc-windows-msvc-install_only_stripped.tar.gz", - "sha256": "f8d558d6d260cc970f02e04f5b6555acd5148b1b2bef25d2c945ab2b8dfd3ce2", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.9.23%2B20250712-i686-pc-windows-msvc-install_only_stripped.tar.gz", + "sha256": "f5b6a6185ed80463160cbd95e520d8d741873736d816ac314d3e08d61f4df222", "variant": null }, "cpython-3.9.23-windows-x86_64-none": { @@ -26011,8 +26011,8 @@ "minor": 9, "patch": 23, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.9.23%2B20250708-x86_64-pc-windows-msvc-install_only_stripped.tar.gz", - "sha256": "3a150e1126b1b7645a95ba06992d886cd03dab524d7c2660bd94bcf51f499fa1", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.9.23%2B20250712-x86_64-pc-windows-msvc-install_only_stripped.tar.gz", + "sha256": "a8f80f8da7901fba2b271cdc5351a79b3d12fd95ee50cc4fe78410dc693eb150", "variant": null }, "cpython-3.9.23+debug-linux-aarch64-gnu": { @@ -26027,8 +26027,8 @@ "minor": 9, "patch": 23, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.9.23%2B20250708-aarch64-unknown-linux-gnu-debug-full.tar.zst", - "sha256": "51cfb2db5abdd1e10d2998289fbf3235352a61b4b6a3ef8ac4fbf4252ae09c78", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.9.23%2B20250712-aarch64-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "c00ba3d83356c187e39c9d6b1541733299a675663690dc1b49c62a152d2db191", "variant": "debug" }, "cpython-3.9.23+debug-linux-armv7-gnueabi": { @@ -26043,8 +26043,8 @@ "minor": 9, "patch": 23, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.9.23%2B20250708-armv7-unknown-linux-gnueabi-debug-full.tar.zst", - "sha256": "369a0f68be191dbb45a3ca173c9589d77f973be3552f08225d03f5e013795d25", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.9.23%2B20250712-armv7-unknown-linux-gnueabi-debug-full.tar.zst", + "sha256": "eb4875c6220036fd1b40af4d885823057122d61fc60f0b2c364065259adad0cc", "variant": "debug" }, "cpython-3.9.23+debug-linux-armv7-gnueabihf": { @@ -26059,8 +26059,8 @@ "minor": 9, "patch": 23, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.9.23%2B20250708-armv7-unknown-linux-gnueabihf-debug-full.tar.zst", - "sha256": "0821af742c0187823ae3194c53b7590e7bf0524a14b94580300391e0b13bdd8a", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.9.23%2B20250712-armv7-unknown-linux-gnueabihf-debug-full.tar.zst", + "sha256": "eca68cac8c0880f08de5c1bcae91ff0bd7fe64e5788a433fc182a5e037af671c", "variant": "debug" }, "cpython-3.9.23+debug-linux-powerpc64le-gnu": { @@ -26075,8 +26075,8 @@ "minor": 9, "patch": 23, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.9.23%2B20250708-ppc64le-unknown-linux-gnu-debug-full.tar.zst", - "sha256": "45525a2d123981cb56f5fe4cd87e9bbe18c3fffe6b778313e8ef76f864315513", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.9.23%2B20250712-ppc64le-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "5ffc8d84b6098cfa5e2e3aaedcc3e130809d5caa1958d5155995ed3df15d8cc7", "variant": "debug" }, "cpython-3.9.23+debug-linux-riscv64-gnu": { @@ -26091,8 +26091,8 @@ "minor": 9, "patch": 23, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.9.23%2B20250708-riscv64-unknown-linux-gnu-debug-full.tar.zst", - "sha256": "9280d5f805d1f1ff992657af852a343f90cdaf7ef40287b55f48a73e409a4fe3", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.9.23%2B20250712-riscv64-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "d7f38d5539d7a0b15ce6071ba3290ce1a4ac2da3bd490d023b4d7b36c6c33c89", "variant": "debug" }, "cpython-3.9.23+debug-linux-s390x-gnu": { @@ -26107,8 +26107,8 @@ "minor": 9, "patch": 23, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.9.23%2B20250708-s390x-unknown-linux-gnu-debug-full.tar.zst", - "sha256": "340c153709d2d428d0604802983bd017079ea95f48ccbb8877e08c87b8c93f4f", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.9.23%2B20250712-s390x-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "14250195a8c4c42fa9b22e7ca70ac5be3fe5e0ca81239c0672043eddeb6bb96e", "variant": "debug" }, "cpython-3.9.23+debug-linux-x86_64-gnu": { @@ -26123,8 +26123,8 @@ "minor": 9, "patch": 23, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.9.23%2B20250708-x86_64-unknown-linux-gnu-debug-full.tar.zst", - "sha256": "e63909ea5cf383db126d5af9c3ba09fc68868104cf8db265723ad1220a5fafae", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.9.23%2B20250712-x86_64-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "846ad94f04ca8413762e6cfaee752156bbaa75f3ec030bcc235453f708e3577c", "variant": "debug" }, "cpython-3.9.23+debug-linux-x86_64-musl": { @@ -26139,8 +26139,8 @@ "minor": 9, "patch": 23, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.9.23%2B20250708-x86_64-unknown-linux-musl-debug-full.tar.zst", - "sha256": "1f58c434a2772e136506e517e412cc450359807a32742064d9ef3ec18ae1ef3e", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.9.23%2B20250712-x86_64-unknown-linux-musl-debug-full.tar.zst", + "sha256": "4ef30683e0dd6a08a6ef591ab37a218baa42a7352f5c3951131538ab0ef83865", "variant": "debug" }, "cpython-3.9.23+debug-linux-x86_64_v2-gnu": { @@ -26155,8 +26155,8 @@ "minor": 9, "patch": 23, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.9.23%2B20250708-x86_64_v2-unknown-linux-gnu-debug-full.tar.zst", - "sha256": "6702268ce25da3f547ed1f48ee20144d0cdc1db967a467f25d097f43cb52a25e", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.9.23%2B20250712-x86_64_v2-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "8964daf898c112bc5caa9499e8d1ba4c0d82911b4c3e07044c7f5abf489b97c6", "variant": "debug" }, "cpython-3.9.23+debug-linux-x86_64_v2-musl": { @@ -26171,8 +26171,8 @@ "minor": 9, "patch": 23, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.9.23%2B20250708-x86_64_v2-unknown-linux-musl-debug-full.tar.zst", - "sha256": "606eeb49821a06fb874527494f6493606e5f837cf56dba8235e75149ec53297b", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.9.23%2B20250712-x86_64_v2-unknown-linux-musl-debug-full.tar.zst", + "sha256": "868f2f3e994992a1b68eb051fa2678a2e57bbbe1fcfc9f48461b0d2d87c5b6a8", "variant": "debug" }, "cpython-3.9.23+debug-linux-x86_64_v3-gnu": { @@ -26187,8 +26187,8 @@ "minor": 9, "patch": 23, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.9.23%2B20250708-x86_64_v3-unknown-linux-gnu-debug-full.tar.zst", - "sha256": "11dcf8d92a18e609f32750ceb758a65855505a79907302142c8b70785c5c9a03", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.9.23%2B20250712-x86_64_v3-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "1616c6f535b6edf4160ee97b9beca8146f9cd77a4de8c240a0a3f095a09795e9", "variant": "debug" }, "cpython-3.9.23+debug-linux-x86_64_v3-musl": { @@ -26203,8 +26203,8 @@ "minor": 9, "patch": 23, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.9.23%2B20250708-x86_64_v3-unknown-linux-musl-debug-full.tar.zst", - "sha256": "d246a1a69cee5ec4bf467fb1ea42f6218925d3047afd3817b34fc3f8ad199200", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.9.23%2B20250712-x86_64_v3-unknown-linux-musl-debug-full.tar.zst", + "sha256": "1f9d7987734042d04badc60686f5503eb373ea8b7b7f3ade6a58a37f7d808265", "variant": "debug" }, "cpython-3.9.23+debug-linux-x86_64_v4-gnu": { @@ -26219,8 +26219,8 @@ "minor": 9, "patch": 23, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.9.23%2B20250708-x86_64_v4-unknown-linux-gnu-debug-full.tar.zst", - "sha256": "05b81fde271d35e97d5e411a2d9e232baa424a55c8ea6e09a15e1606c08833f4", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.9.23%2B20250712-x86_64_v4-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "4b8f925b20b6b74c1eb48fa869ee79cde20745fb93c83776e5c71924448e7e53", "variant": "debug" }, "cpython-3.9.23+debug-linux-x86_64_v4-musl": { @@ -26235,8 +26235,8 @@ "minor": 9, "patch": 23, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250708/cpython-3.9.23%2B20250708-x86_64_v4-unknown-linux-musl-debug-full.tar.zst", - "sha256": "33e7411e88033865e8a4e9c995112cb3867f284102624b3ce1dbcdb4f4c03ea3", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.9.23%2B20250712-x86_64_v4-unknown-linux-musl-debug-full.tar.zst", + "sha256": "ecab1905698e5dd4a11c46a1dc6be49cf0e37f70b81191adbb7dad6e453906cb", "variant": "debug" }, "cpython-3.9.22-darwin-aarch64-none": { From 4175e3eb4d7e484004d3eba6f0ecaded5810e03a Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Sun, 13 Jul 2025 08:20:51 -0500 Subject: [PATCH 015/130] Sync latest Python releases (#14581) Automated update for Python releases. Co-authored-by: zanieb <2586601+zanieb@users.noreply.github.com> --- crates/uv-dev/src/generate_sysconfig_mappings.rs | 4 ++-- crates/uv-python/src/sysconfig/generated_mappings.rs | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/crates/uv-dev/src/generate_sysconfig_mappings.rs b/crates/uv-dev/src/generate_sysconfig_mappings.rs index b9f58dd92..8357ee7fb 100644 --- a/crates/uv-dev/src/generate_sysconfig_mappings.rs +++ b/crates/uv-dev/src/generate_sysconfig_mappings.rs @@ -11,7 +11,7 @@ use crate::ROOT_DIR; use crate::generate_all::Mode; /// Contains current supported targets -const TARGETS_YML_URL: &str = "https://raw.githubusercontent.com/astral-sh/python-build-standalone/refs/tags/20250708/cpython-unix/targets.yml"; +const TARGETS_YML_URL: &str = "https://raw.githubusercontent.com/astral-sh/python-build-standalone/refs/tags/20250712/cpython-unix/targets.yml"; #[derive(clap::Args)] pub(crate) struct Args { @@ -130,7 +130,7 @@ async fn generate() -> Result { output.push_str("//! DO NOT EDIT\n"); output.push_str("//!\n"); output.push_str("//! Generated with `cargo run dev generate-sysconfig-metadata`\n"); - output.push_str("//! Targets from \n"); + output.push_str("//! Targets from \n"); output.push_str("//!\n"); // Disable clippy/fmt diff --git a/crates/uv-python/src/sysconfig/generated_mappings.rs b/crates/uv-python/src/sysconfig/generated_mappings.rs index 54170aba5..646501b07 100644 --- a/crates/uv-python/src/sysconfig/generated_mappings.rs +++ b/crates/uv-python/src/sysconfig/generated_mappings.rs @@ -1,7 +1,7 @@ //! DO NOT EDIT //! //! Generated with `cargo run dev generate-sysconfig-metadata` -//! Targets from +//! Targets from //! #![allow(clippy::all)] #![cfg_attr(any(), rustfmt::skip)] From a57241c0d7b7412a3f2f7c9fadbb0bf55c50daea Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Sun, 13 Jul 2025 21:10:28 -0400 Subject: [PATCH 016/130] Update pre-commit hook astral-sh/ruff-pre-commit to v0.12.3 (#14592) --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 1c8965c0f..5476c9dc8 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -42,7 +42,7 @@ repos: types_or: [yaml, json5] - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.12.2 + rev: v0.12.3 hooks: - id: ruff-format - id: ruff From e9509fde84ea84ea8d588c5a47eff1810e6b2f43 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Mon, 14 Jul 2025 15:42:56 +0200 Subject: [PATCH 017/130] Update Rust crate clap to v4.5.41 (#14593) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR contains the following updates: | Package | Type | Update | Change | |---|---|---|---| | [clap](https://redirect.github.com/clap-rs/clap) | workspace.dependencies | patch | `4.5.40` -> `4.5.41` | --- > [!WARNING] > Some dependencies could not be looked up. Check the Dependency Dashboard for more information. --- ### Release Notes
    clap-rs/clap (clap) ### [`v4.5.41`](https://redirect.github.com/clap-rs/clap/blob/HEAD/CHANGELOG.md#4541---2025-07-09) [Compare Source](https://redirect.github.com/clap-rs/clap/compare/v4.5.40...v4.5.41) ##### Features - Add `Styles::context` and `Styles::context_value` to customize the styling of `[default: value]` like notes in the `--help`
    --- ### Configuration 📅 **Schedule**: Branch creation - "before 4am on Monday" (UTC), Automerge - At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻ **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. 🔕 **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box --- This PR was generated by [Mend Renovate](https://mend.io/renovate/). View the [repository job log](https://developer.mend.io/github/astral-sh/uv). Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- Cargo.lock | 30 +++++++++++++++--------------- 1 file changed, 15 insertions(+), 15 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index bc42e30af..4079390e9 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -619,9 +619,9 @@ dependencies = [ [[package]] name = "clap" -version = "4.5.40" +version = "4.5.41" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "40b6887a1d8685cebccf115538db5c0efe625ccac9696ad45c409d96566e910f" +checksum = "be92d32e80243a54711e5d7ce823c35c41c9d929dc4ab58e1276f625841aadf9" dependencies = [ "clap_builder", "clap_derive", @@ -629,9 +629,9 @@ dependencies = [ [[package]] name = "clap_builder" -version = "4.5.40" +version = "4.5.41" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e0c66c08ce9f0c698cbce5c0279d0bb6ac936d8674174fe48f736533b964f59e" +checksum = "707eab41e9622f9139419d573eca0900137718000c517d47da73045f54331c3d" dependencies = [ "anstream", "anstyle", @@ -672,9 +672,9 @@ dependencies = [ [[package]] name = "clap_derive" -version = "4.5.40" +version = "4.5.41" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d2c7947ae4cc3d851207c1adb5b5e260ff0cca11446b1d6d1423788e442257ce" +checksum = "ef4f52386a59ca4c860f7393bcf8abd8dfd91ecccc0f774635ff68e92eeef491" dependencies = [ "heck", "proc-macro2", @@ -761,7 +761,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "117725a109d387c937a1533ce01b450cbde6b88abceea8473c4d7a85853cda3c" dependencies = [ "lazy_static", - "windows-sys 0.59.0", + "windows-sys 0.48.0", ] [[package]] @@ -1138,7 +1138,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "33d852cb9b869c2a9b3df2f71a3074817f01e1844f839a144f5fcef059a4eb5d" dependencies = [ "libc", - "windows-sys 0.59.0", + "windows-sys 0.52.0", ] [[package]] @@ -1966,7 +1966,7 @@ checksum = "e19b23d53f35ce9f56aebc7d1bb4e6ac1e9c0db7ac85c8d1760c04379edced37" dependencies = [ "hermit-abi 0.4.0", "libc", - "windows-sys 0.59.0", + "windows-sys 0.52.0", ] [[package]] @@ -2026,7 +2026,7 @@ dependencies = [ "portable-atomic", "portable-atomic-util", "serde", - "windows-sys 0.59.0", + "windows-sys 0.52.0", ] [[package]] @@ -2885,7 +2885,7 @@ dependencies = [ "once_cell", "socket2", "tracing", - "windows-sys 0.59.0", + "windows-sys 0.52.0", ] [[package]] @@ -3317,7 +3317,7 @@ dependencies = [ "errno", "libc", "linux-raw-sys 0.4.15", - "windows-sys 0.59.0", + "windows-sys 0.52.0", ] [[package]] @@ -3330,7 +3330,7 @@ dependencies = [ "errno", "libc", "linux-raw-sys 0.9.2", - "windows-sys 0.59.0", + "windows-sys 0.52.0", ] [[package]] @@ -3913,7 +3913,7 @@ dependencies = [ "getrandom 0.3.1", "once_cell", "rustix 1.0.7", - "windows-sys 0.59.0", + "windows-sys 0.52.0", ] [[package]] @@ -6293,7 +6293,7 @@ version = "0.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cf221c93e13a30d793f7645a0e7762c55d169dbb0a49671918a2319d289b10bb" dependencies = [ - "windows-sys 0.59.0", + "windows-sys 0.48.0", ] [[package]] From 4c40dd341e68dfd69073cb4e7cc36cf1dcdb04c7 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Mon, 14 Jul 2025 15:45:56 +0200 Subject: [PATCH 018/130] Update Rust crate hyper-util to v0.1.15 (#14595) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR contains the following updates: | Package | Type | Update | Change | |---|---|---|---| | [hyper-util](https://hyper.rs) ([source](https://redirect.github.com/hyperium/hyper-util)) | dev-dependencies | patch | `0.1.14` -> `0.1.15` | --- > [!WARNING] > Some dependencies could not be looked up. Check the Dependency Dashboard for more information. --- ### Release Notes
    hyperium/hyper-util (hyper-util) ### [`v0.1.15`](https://redirect.github.com/hyperium/hyper-util/blob/HEAD/CHANGELOG.md#0115-2025-07-07) [Compare Source](https://redirect.github.com/hyperium/hyper-util/compare/v0.1.14...v0.1.15) - Add header casing options to `auto::Builder`. - Fix `proxy::Socksv5` to check for enough bytes before parsing ipv6 responses. - Fix including `client-proxy` in the `full` feature set.
    --- ### Configuration 📅 **Schedule**: Branch creation - "before 4am on Monday" (UTC), Automerge - At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻ **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. 🔕 **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box --- This PR was generated by [Mend Renovate](https://mend.io/renovate/). View the [repository job log](https://developer.mend.io/github/astral-sh/uv). Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- Cargo.lock | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 4079390e9..f2ef0b4a3 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1703,9 +1703,9 @@ dependencies = [ [[package]] name = "hyper-util" -version = "0.1.14" +version = "0.1.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dc2fdfdbff08affe55bb779f33b053aa1fe5dd5b54c257343c17edfa55711bdb" +checksum = "7f66d5bd4c6f02bf0542fad85d626775bab9258cf795a4256dcaf3161114d1df" dependencies = [ "base64 0.22.1", "bytes", From ef7ab76206e96bbc531a5ee9c065f6a2462cb651 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Mon, 14 Jul 2025 15:48:47 +0200 Subject: [PATCH 019/130] Update Rust crate codspeed-criterion-compat to v3.0.3 (#14594) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR contains the following updates: | Package | Type | Update | Change | |---|---|---|---| | [codspeed-criterion-compat](https://codspeed.io) ([source](https://redirect.github.com/CodSpeedHQ/codspeed-rust)) | dependencies | patch | `3.0.2` -> `3.0.3` | --- > [!WARNING] > Some dependencies could not be looked up. Check the Dependency Dashboard for more information. --- ### Release Notes
    CodSpeedHQ/codspeed-rust (codspeed-criterion-compat) ### [`v3.0.3`](https://redirect.github.com/CodSpeedHQ/codspeed-rust/releases/tag/v3.0.3) [Compare Source](https://redirect.github.com/CodSpeedHQ/codspeed-rust/compare/v3.0.2...v3.0.3) #### What's Changed - tests: cargo-bench should work with the compat layers by [@​art049](https://redirect.github.com/art049) in [https://github.com/CodSpeedHQ/codspeed-rust/pull/110](https://redirect.github.com/CodSpeedHQ/codspeed-rust/pull/110) - fix: handle rustflags from .cargo/config.toml by [@​GuillaumeLagrange](https://redirect.github.com/GuillaumeLagrange) in [https://github.com/CodSpeedHQ/codspeed-rust/pull/109](https://redirect.github.com/CodSpeedHQ/codspeed-rust/pull/109) **Full Changelog**: https://github.com/CodSpeedHQ/codspeed-rust/compare/v3.0.2...v3.0.3
    --- ### Configuration 📅 **Schedule**: Branch creation - "before 4am on Monday" (UTC), Automerge - At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻ **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. 🔕 **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box --- This PR was generated by [Mend Renovate](https://mend.io/renovate/). View the [repository job log](https://developer.mend.io/github/astral-sh/uv). Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- Cargo.lock | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index f2ef0b4a3..53bd78f42 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -690,9 +690,9 @@ checksum = "f46ad14479a25103f283c0f10005961cf086d8dc42205bb44c46ac563475dca6" [[package]] name = "codspeed" -version = "3.0.2" +version = "3.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "922018102595f6668cdd09c03f4bff2d951ce2318c6dca4fe11bdcb24b65b2bf" +checksum = "a7524e02ff6173bc143d9abc01b518711b77addb60de871bbe5686843f88fb48" dependencies = [ "anyhow", "bincode", @@ -708,9 +708,9 @@ dependencies = [ [[package]] name = "codspeed-criterion-compat" -version = "3.0.2" +version = "3.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "24d8ad82d2383cb74995f58993cbdd2914aed57b2f91f46580310dd81dc3d05a" +checksum = "2f71662331c4f854131a42b95055f3f8cbca53640348985f699635b1f96d8c26" dependencies = [ "codspeed", "codspeed-criterion-compat-walltime", @@ -719,9 +719,9 @@ dependencies = [ [[package]] name = "codspeed-criterion-compat-walltime" -version = "3.0.2" +version = "3.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "61badaa6c452d192a29f8387147888f0ab358553597c3fe9bf8a162ef7c2fa64" +checksum = "e3c9bd9e895e0aa263d139a8b5f58a4ea4abb86d5982ec7f58d3c7b8465c1e01" dependencies = [ "anes", "cast", From d179c496ddbbc2c8434608dc60da1bbd9f18c30b Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Mon, 14 Jul 2025 15:49:11 +0200 Subject: [PATCH 020/130] Update Rust crate spdx to v0.10.9 (#14596) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR contains the following updates: | Package | Type | Update | Change | |---|---|---|---| | [spdx](https://redirect.github.com/EmbarkStudios/spdx) | workspace.dependencies | patch | `0.10.8` -> `0.10.9` | --- > [!WARNING] > Some dependencies could not be looked up. Check the Dependency Dashboard for more information. --- ### Release Notes
    EmbarkStudios/spdx (spdx) ### [`v0.10.9`](https://redirect.github.com/EmbarkStudios/spdx/blob/HEAD/CHANGELOG.md#0109---2025-07-12) [Compare Source](https://redirect.github.com/EmbarkStudios/spdx/compare/0.10.8...0.10.9) ##### Changed - [PR#74](https://redirect.github.com/EmbarkStudios/spdx/pull/76) update SPDX license list to 3.27.0.
    --- ### Configuration 📅 **Schedule**: Branch creation - "before 4am on Monday" (UTC), Automerge - At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻ **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. 🔕 **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box --- This PR was generated by [Mend Renovate](https://mend.io/renovate/). View the [repository job log](https://developer.mend.io/github/astral-sh/uv). Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- Cargo.lock | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 53bd78f42..5dbdfaf65 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3729,9 +3729,9 @@ dependencies = [ [[package]] name = "spdx" -version = "0.10.8" +version = "0.10.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "58b69356da67e2fc1f542c71ea7e654a361a79c938e4424392ecf4fa065d2193" +checksum = "c3e17e880bafaeb362a7b751ec46bdc5b61445a188f80e0606e68167cd540fa3" dependencies = [ "smallvec", ] From 9efd053d27882cdb763057f368b5f85b7448af67 Mon Sep 17 00:00:00 2001 From: Zanie Blue Date: Mon, 14 Jul 2025 08:56:39 -0500 Subject: [PATCH 021/130] Add test case for `uv tool` Python re-resolves (#14605) A test case for https://github.com/astral-sh/uv/pull/10401 and https://github.com/astral-sh/uv/pull/14606 --- crates/uv/tests/it/tool_run.rs | 70 ++++++++++++++++++++++++++++++++++ 1 file changed, 70 insertions(+) diff --git a/crates/uv/tests/it/tool_run.rs b/crates/uv/tests/it/tool_run.rs index fb6287454..8bcf5c3d1 100644 --- a/crates/uv/tests/it/tool_run.rs +++ b/crates/uv/tests/it/tool_run.rs @@ -2975,3 +2975,73 @@ fn tool_run_windows_runnable_types() -> anyhow::Result<()> { Ok(()) } + +#[test] +fn tool_run_reresolve_python() -> anyhow::Result<()> { + let context = TestContext::new_with_versions(&["3.11", "3.12"]).with_filtered_counts(); + let tool_dir = context.temp_dir.child("tools"); + let bin_dir = context.temp_dir.child("bin"); + let foo_dir = context.temp_dir.child("foo"); + let foo_pyproject_toml = foo_dir.child("pyproject.toml"); + + foo_pyproject_toml.write_str(indoc! { r#" + [project] + name = "foo" + version = "1.0.0" + requires-python = ">=3.12" + dependencies = [] + + [project.scripts] + foo = "foo:run" + "# + })?; + let foo_project_src = foo_dir.child("src"); + let foo_module = foo_project_src.child("foo"); + let foo_init = foo_module.child("__init__.py"); + foo_init.write_str(indoc! { r#" + import sys + + def run(): + print(".".join(str(key) for key in sys.version_info[:2])) + "# + })?; + + // Although 3.11 is first on the path, we'll re-resolve with 3.12 because the `requires-python` + // is not compatible with 3.11. + uv_snapshot!(context.filters(), context.tool_run() + .arg("--from") + .arg("./foo") + .arg("foo") + .env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str()) + .env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str()), @r" + success: true + exit_code: 0 + ----- stdout ----- + 3.12 + + ----- stderr ----- + Resolved [N] packages in [TIME] + Prepared [N] packages in [TIME] + Installed [N] packages in [TIME] + + foo==1.0.0 (from file://[TEMP_DIR]/foo) + "); + + uv_snapshot!(context.filters(), context.tool_run() + .arg("--from") + .arg("./foo") + .arg("--python") + .arg("3.11") + .arg("foo") + .env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str()) + .env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str()), @r" + success: true + exit_code: 0 + ----- stdout ----- + 3.12 + + ----- stderr ----- + Resolved [N] packages in [TIME] + "); + + Ok(()) +} From 3b050b554519f2bbe94a5987aaa87d097dfca5b5 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Mon, 14 Jul 2025 09:58:55 -0400 Subject: [PATCH 022/130] Update Rust crate tokio to v1.46.1 (#14599) --- Cargo.lock | 17 +++++++++++++++-- 1 file changed, 15 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 5dbdfaf65..8232c6905 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1942,6 +1942,17 @@ dependencies = [ "similar", ] +[[package]] +name = "io-uring" +version = "0.7.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b86e202f00093dcba4275d4636b93ef9dd75d025ae560d2521b45ea28ab49013" +dependencies = [ + "bitflags 2.9.1", + "cfg-if", + "libc", +] + [[package]] name = "ipnet" version = "2.11.0" @@ -4134,17 +4145,19 @@ source = "git+https://github.com/astral-sh/tl.git?rev=6e25b2ee2513d75385101a8ff9 [[package]] name = "tokio" -version = "1.45.1" +version = "1.46.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "75ef51a33ef1da925cea3e4eb122833cb377c61439ca401b770f54902b806779" +checksum = "0cc3a2344dafbe23a245241fe8b09735b521110d30fcefbbd5feb1797ca35d17" dependencies = [ "backtrace", "bytes", + "io-uring", "libc", "mio", "parking_lot", "pin-project-lite", "signal-hook-registry", + "slab", "socket2", "tokio-macros", "windows-sys 0.52.0", From 0af025eafbf24215898628d7682f8c787356868d Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Mon, 14 Jul 2025 08:59:09 -0500 Subject: [PATCH 023/130] Update CodSpeedHQ/action action to v3.7.0 (#14597) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR contains the following updates: | Package | Type | Update | Change | |---|---|---|---| | [CodSpeedHQ/action](https://redirect.github.com/CodSpeedHQ/action) | action | minor | `v3.5.0` -> `v3.7.0` | --- > [!WARNING] > Some dependencies could not be looked up. Check the Dependency Dashboard for more information. --- ### Release Notes
    CodSpeedHQ/action (CodSpeedHQ/action) ### [`v3.7.0`](https://redirect.github.com/CodSpeedHQ/action/releases/tag/v3.7.0) [Compare Source](https://redirect.github.com/CodSpeedHQ/action/compare/v3.6.1...v3.7.0) #### What's Changed ##### 🚀 Features - Add pre- and post-benchmark scripts by [@​not-matthias](https://redirect.github.com/not-matthias) - Add cli args for perf by [@​not-matthias](https://redirect.github.com/not-matthias) in [#​94](https://redirect.github.com/CodSpeedHQ/runner/pull/94) ##### 🐛 Bug Fixes - Forward environment to systemd-run cmd by [@​not-matthias](https://redirect.github.com/not-matthias) - Only panic in upload for non-existing integration by [@​not-matthias](https://redirect.github.com/not-matthias) - Multi-line commands in valgrind by [@​not-matthias](https://redirect.github.com/not-matthias) - Symlink libpython doesn't work for statically linked python by [@​not-matthias](https://redirect.github.com/not-matthias) in [#​89](https://redirect.github.com/CodSpeedHQ/runner/pull/89) - Run perf with sudo; support systemd-run for non-perf walltime by [@​not-matthias](https://redirect.github.com/not-matthias) - Use correct path for unwind info by [@​not-matthias](https://redirect.github.com/not-matthias) ##### ⚙️ Internals - Add executor tests by [@​not-matthias](https://redirect.github.com/not-matthias) in [#​95](https://redirect.github.com/CodSpeedHQ/runner/pull/95) - Add log to detect invalid origin url by [@​not-matthias](https://redirect.github.com/not-matthias) - Upgrade to edition 2024 by [@​not-matthias](https://redirect.github.com/not-matthias) - Add debug logs for proc maps by [@​not-matthias](https://redirect.github.com/not-matthias) in [#​88](https://redirect.github.com/CodSpeedHQ/runner/pull/88) - Enhance version resolution with 'latest' support and flexible formats by [@​art049](https://redirect.github.com/art049) in [https://github.com/CodSpeedHQ/action/pull/132](https://redirect.github.com/CodSpeedHQ/action/pull/132) **Full Changelog**: https://github.com/CodSpeedHQ/action/compare/v3.6.1...v3.7.0 **Full Runner Changelog**: https://github.com/CodSpeedHQ/runner/blob/main/CHANGELOG.md ### [`v3.6.1`](https://redirect.github.com/CodSpeedHQ/action/releases/tag/v3.6.1) [Compare Source](https://redirect.github.com/CodSpeedHQ/action/compare/v3.5.0...v3.6.1) ##### What's Changed ##### 🚀 Features - Allow setting upload url via env var for convenience by [@​GuillaumeLagrange](https://redirect.github.com/GuillaumeLagrange) in [#​85](https://redirect.github.com/CodSpeedHQ/runner/pull/85) - Send unknown cpu\_brand when it is not recognized by [@​adriencaccia](https://redirect.github.com/adriencaccia) - Allow only running the benchmarks, and only uploading the results by [@​GuillaumeLagrange](https://redirect.github.com/GuillaumeLagrange) in [#​81](https://redirect.github.com/CodSpeedHQ/runner/pull/81) - Install perf on setup by [@​not-matthias](https://redirect.github.com/not-matthias) - Add perf integration for python by [@​not-matthias](https://redirect.github.com/not-matthias) - Add perf integration for rust by [@​not-matthias](https://redirect.github.com/not-matthias) - Add fifo ipc by [@​not-matthias](https://redirect.github.com/not-matthias) - Use custom time formatting to be in line with the rest of CodSpeed by [@​GuillaumeLagrange](https://redirect.github.com/GuillaumeLagrange) in [#​77](https://redirect.github.com/CodSpeedHQ/runner/pull/77) - Output information about benches after a local run by [@​GuillaumeLagrange](https://redirect.github.com/GuillaumeLagrange) in [#​76](https://redirect.github.com/CodSpeedHQ/runner/pull/76) - Allow specifying oauth token through CLI by [@​GuillaumeLagrange](https://redirect.github.com/GuillaumeLagrange) in [#​75](https://redirect.github.com/CodSpeedHQ/runner/pull/75) - Add option to output structured json by [@​GuillaumeLagrange](https://redirect.github.com/GuillaumeLagrange) in [#​74](https://redirect.github.com/CodSpeedHQ/runner/pull/74) - Add flags to specify repository from CLI by [@​GuillaumeLagrange](https://redirect.github.com/GuillaumeLagrange) - Improve error handling for valgrind by [@​not-matthias](https://redirect.github.com/not-matthias) in [#​67](https://redirect.github.com/CodSpeedHQ/runner/pull/67) - Handle local run failure by [@​adriencaccia](https://redirect.github.com/adriencaccia) in [#​71](https://redirect.github.com/CodSpeedHQ/runner/pull/71) - Run benchmark with systemd (for optional cpu isolation) by [@​not-matthias](https://redirect.github.com/not-matthias) in [#​86](https://redirect.github.com/CodSpeedHQ/runner/pull/86) ##### 🐛 Bug Fixes - Persist logs when running with skip\_upload by [@​GuillaumeLagrange](https://redirect.github.com/GuillaumeLagrange) in [#​84](https://redirect.github.com/CodSpeedHQ/runner/pull/84) - Valgrind crash for unresolved libpython by [@​not-matthias](https://redirect.github.com/not-matthias) in [#​82](https://redirect.github.com/CodSpeedHQ/runner/pull/82) - Support trailing slash in origin url by [@​not-matthias](https://redirect.github.com/not-matthias) in [#​83](https://redirect.github.com/CodSpeedHQ/runner/pull/83) - Use bash to ensure correct behavior across systems by [@​not-matthias](https://redirect.github.com/not-matthias) - Fix test randomly failing due to other test run in parallel by [@​GuillaumeLagrange](https://redirect.github.com/GuillaumeLagrange) - Check child status code after valgrind by [@​not-matthias](https://redirect.github.com/not-matthias) in [#​72](https://redirect.github.com/CodSpeedHQ/runner/pull/72) - Only show perf output at debug or trace level by [@​not-matthias](https://redirect.github.com/not-matthias) in [#​87](https://redirect.github.com/CodSpeedHQ/runner/pull/87) ##### ⚙️ Internals - Dont use regex in perf map harvest by [@​not-matthias](https://redirect.github.com/not-matthias) - Switch to astral-sh/cargo-dist by [@​adriencaccia](https://redirect.github.com/adriencaccia) in [#​80](https://redirect.github.com/CodSpeedHQ/runner/pull/80) **Full Changelog**: https://github.com/CodSpeedHQ/action/compare/v3.5.0...v3.6.1 **Full Runner Changelog**: https://github.com/CodSpeedHQ/runner/blob/main/CHANGELOG.md
    --- ### Configuration 📅 **Schedule**: Branch creation - "before 4am on Monday" (UTC), Automerge - At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻ **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. 🔕 **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box --- This PR was generated by [Mend Renovate](https://mend.io/renovate/). View the [repository job log](https://developer.mend.io/github/astral-sh/uv). Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- .github/workflows/ci.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index ba7a4b4d1..0ccc9ea4e 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -2532,7 +2532,7 @@ jobs: run: cargo codspeed build --profile profiling --features codspeed -p uv-bench - name: "Run benchmarks" - uses: CodSpeedHQ/action@0010eb0ca6e89b80c88e8edaaa07cfe5f3e6664d # v3.5.0 + uses: CodSpeedHQ/action@c28fe9fbe7d57a3da1b7834ae3761c1d8217612d # v3.7.0 with: run: cargo codspeed run token: ${{ secrets.CODSPEED_TOKEN }} @@ -2569,7 +2569,7 @@ jobs: run: cargo codspeed build --profile profiling --features codspeed -p uv-bench - name: "Run benchmarks" - uses: CodSpeedHQ/action@0010eb0ca6e89b80c88e8edaaa07cfe5f3e6664d # v3.5.0 + uses: CodSpeedHQ/action@c28fe9fbe7d57a3da1b7834ae3761c1d8217612d # v3.7.0 with: run: cargo codspeed run token: ${{ secrets.CODSPEED_TOKEN }} From 4890f3ef2bbde7a2b36641dc1d8f92e7695f68ec Mon Sep 17 00:00:00 2001 From: Zanie Blue Date: Mon, 14 Jul 2025 09:07:30 -0500 Subject: [PATCH 024/130] Do not re-resolve with a new Python version in `uv tool` if it is incompatible with `--python` (#14606) Closes https://github.com/astral-sh/uv/issues/14604 --- crates/uv/src/commands/tool/common.rs | 22 +++++++++++----------- crates/uv/tests/it/tool_run.rs | 20 ++++++++++++++++++++ 2 files changed, 31 insertions(+), 11 deletions(-) diff --git a/crates/uv/src/commands/tool/common.rs b/crates/uv/src/commands/tool/common.rs index ffc1b5645..b24a64e25 100644 --- a/crates/uv/src/commands/tool/common.rs +++ b/crates/uv/src/commands/tool/common.rs @@ -98,14 +98,6 @@ pub(crate) async fn refine_interpreter( return Ok(None); } - // If the user passed a `--python` request, and the refined interpreter is incompatible, we - // can't use it. - if let Some(python_request) = python_request { - if !python_request.satisfied(interpreter, cache) { - return Ok(None); - } - } - // We want an interpreter that's as close to the required version as possible. If we choose the // "latest" Python, we risk choosing a version that lacks wheels for the tool's requirements // (assuming those requirements don't publish source distributions). @@ -135,15 +127,15 @@ pub(crate) async fn refine_interpreter( Bound::Unbounded => unreachable!("`requires-python` should never be unbounded"), }; - let python_request = PythonRequest::Version(VersionRequest::Range( + let requires_python_request = PythonRequest::Version(VersionRequest::Range( VersionSpecifiers::from_iter([lower_bound, upper_bound]), PythonVariant::default(), )); - debug!("Refining interpreter with: {python_request}"); + debug!("Refining interpreter with: {requires_python_request}"); let interpreter = PythonInstallation::find_or_download( - Some(&python_request), + Some(&requires_python_request), EnvironmentPreference::OnlySystem, python_preference, python_downloads, @@ -158,6 +150,14 @@ pub(crate) async fn refine_interpreter( .await? .into_interpreter(); + // If the user passed a `--python` request, and the refined interpreter is incompatible, we + // can't use it. + if let Some(python_request) = python_request { + if !python_request.satisfied(&interpreter, cache) { + return Ok(None); + } + } + Ok(Some(interpreter)) } diff --git a/crates/uv/tests/it/tool_run.rs b/crates/uv/tests/it/tool_run.rs index 8bcf5c3d1..90d906fb5 100644 --- a/crates/uv/tests/it/tool_run.rs +++ b/crates/uv/tests/it/tool_run.rs @@ -3026,6 +3026,7 @@ fn tool_run_reresolve_python() -> anyhow::Result<()> { + foo==1.0.0 (from file://[TEMP_DIR]/foo) "); + // When an incompatible Python version is explicitly requested, we should not re-resolve uv_snapshot!(context.filters(), context.tool_run() .arg("--from") .arg("./foo") @@ -3034,6 +3035,25 @@ fn tool_run_reresolve_python() -> anyhow::Result<()> { .arg("foo") .env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str()) .env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str()), @r" + success: false + exit_code: 1 + ----- stdout ----- + + ----- stderr ----- + × No solution found when resolving tool dependencies: + ╰─▶ Because the current Python version (3.11.[X]) does not satisfy Python>=3.12 and foo==1.0.0 depends on Python>=3.12, we can conclude that foo==1.0.0 cannot be used. + And because only foo==1.0.0 is available and you require foo, we can conclude that your requirements are unsatisfiable. + "); + + // Unless the discovered interpreter is compatible with the request + uv_snapshot!(context.filters(), context.tool_run() + .arg("--from") + .arg("./foo") + .arg("--python") + .arg(">=3.11") + .arg("foo") + .env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str()) + .env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str()), @r" success: true exit_code: 0 ----- stdout ----- From 852aba4f90988b7bd437573b721062228d504b49 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Mon, 14 Jul 2025 16:09:06 +0200 Subject: [PATCH 025/130] Update Rust crate indicatif to 0.18.0 (#14598) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR contains the following updates: | Package | Type | Update | Change | |---|---|---|---| | [indicatif](https://redirect.github.com/console-rs/indicatif) | workspace.dependencies | minor | `0.17.8` -> `0.18.0` | --- > [!WARNING] > Some dependencies could not be looked up. Check the Dependency Dashboard for more information. --- ### Release Notes
    console-rs/indicatif (indicatif) ### [`v0.18.0`](https://redirect.github.com/console-rs/indicatif/releases/tag/0.18.0) [Compare Source](https://redirect.github.com/console-rs/indicatif/compare/0.17.12...0.18.0) Unfortunately [0.17.12](https://redirect.github.com/console-rs/indicatif/releases/0.17.12) had to be yanked because the console upgrade was a semver-incompatible change. Rerelease as 0.18.0 instead. #### What's Changed - Bump version to 0.18.0 by [@​djc](https://redirect.github.com/djc) in [https://github.com/console-rs/indicatif/pull/715](https://redirect.github.com/console-rs/indicatif/pull/715) ### [`v0.17.12`](https://redirect.github.com/console-rs/indicatif/releases/tag/0.17.12) [Compare Source](https://redirect.github.com/console-rs/indicatif/compare/0.17.11...0.17.12) #### What's Changed - Add ProgressBar::force\_draw by [@​jaheba](https://redirect.github.com/jaheba) in [https://github.com/console-rs/indicatif/pull/689](https://redirect.github.com/console-rs/indicatif/pull/689) - Use width to truncate `HumanFloatCount` values by [@​ReagentX](https://redirect.github.com/ReagentX) in [https://github.com/console-rs/indicatif/pull/696](https://redirect.github.com/console-rs/indicatif/pull/696) - `ProgressStyle` enable/disable colors based on draw target by [@​tonywu6](https://redirect.github.com/tonywu6) in [https://github.com/console-rs/indicatif/pull/699](https://redirect.github.com/console-rs/indicatif/pull/699) - Switch dep number\_prefix to unit\_prefix by [@​kimono-koans](https://redirect.github.com/kimono-koans) in [https://github.com/console-rs/indicatif/pull/709](https://redirect.github.com/console-rs/indicatif/pull/709) - draw\_target: inline the format arg to silence clippy by [@​chris-laplante](https://redirect.github.com/chris-laplante) in [https://github.com/console-rs/indicatif/pull/711](https://redirect.github.com/console-rs/indicatif/pull/711) - Upgrade to console 0.16 by [@​djc](https://redirect.github.com/djc) in [https://github.com/console-rs/indicatif/pull/712](https://redirect.github.com/console-rs/indicatif/pull/712)
    --- ### Configuration 📅 **Schedule**: Branch creation - "before 4am on Monday" (UTC), Automerge - At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻ **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. 🔕 **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box --- This PR was generated by [Mend Renovate](https://mend.io/renovate/). View the [repository job log](https://developer.mend.io/github/astral-sh/uv). Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- Cargo.lock | 59 ++++++++++++++++++++++++++++++++++++------------------ Cargo.toml | 2 +- 2 files changed, 41 insertions(+), 20 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 8232c6905..7f6b601ca 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -788,10 +788,22 @@ dependencies = [ "encode_unicode", "libc", "once_cell", - "unicode-width 0.2.1", "windows-sys 0.59.0", ] +[[package]] +name = "console" +version = "0.16.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2e09ced7ebbccb63b4c65413d821f2e00ce54c5ca4514ddc6b3c892fdbcbc69d" +dependencies = [ + "encode_unicode", + "libc", + "once_cell", + "unicode-width 0.2.1", + "windows-sys 0.60.2", +] + [[package]] name = "core-foundation" version = "0.10.0" @@ -1910,14 +1922,14 @@ dependencies = [ [[package]] name = "indicatif" -version = "0.17.11" +version = "0.18.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "183b3088984b400f4cfac3620d5e076c84da5364016b4f49473de574b2586235" +checksum = "70a646d946d06bedbbc4cac4c218acf4bbf2d87757a784857025f4d447e4e1cd" dependencies = [ - "console", - "number_prefix", + "console 0.16.0", "portable-atomic", "unicode-width 0.2.1", + "unit-prefix", "web-time", ] @@ -1933,7 +1945,7 @@ version = "1.43.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "154934ea70c58054b556dd430b99a98c2a7ff5309ac9891597e339b5c28f4371" dependencies = [ - "console", + "console 0.15.11", "once_cell", "pest", "pest_derive", @@ -2465,12 +2477,6 @@ dependencies = [ "libc", ] -[[package]] -name = "number_prefix" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "830b246a0e5f20af87141b25c173cd1b609bd7779a4617d6ec582abaf90870f3" - [[package]] name = "object" version = "0.36.7" @@ -4523,6 +4529,12 @@ version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4a1a07cc7db3810833284e8d372ccdc6da29741639ecc70c9ec107df0fa6154c" +[[package]] +name = "unit-prefix" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "323402cff2dd658f39ca17c789b502021b3f18707c91cdf22e3838e1b4023817" + [[package]] name = "unsafe-libyaml" version = "0.2.11" @@ -4631,7 +4643,7 @@ dependencies = [ "base64 0.22.1", "byteorder", "clap", - "console", + "console 0.15.11", "ctrlc", "dotenvy", "dunce", @@ -5038,7 +5050,7 @@ dependencies = [ name = "uv-console" version = "0.0.1" dependencies = [ - "console", + "console 0.15.11", ] [[package]] @@ -5675,7 +5687,7 @@ version = "0.1.0" dependencies = [ "anyhow", "configparser", - "console", + "console 0.15.11", "fs-err 3.1.1", "futures", "rustc-hash", @@ -6332,7 +6344,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7f919aee0a93304be7f62e8e5027811bbba96bcb1de84d6618be56e43f8a32a1" dependencies = [ "windows-core 0.59.0", - "windows-targets 0.53.0", + "windows-targets 0.53.2", ] [[package]] @@ -6379,7 +6391,7 @@ dependencies = [ "windows-interface 0.59.1", "windows-result 0.3.4", "windows-strings 0.3.1", - "windows-targets 0.53.0", + "windows-targets 0.53.2", ] [[package]] @@ -6550,6 +6562,15 @@ dependencies = [ "windows-targets 0.52.6", ] +[[package]] +name = "windows-sys" +version = "0.60.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f2f500e4d28234f72040990ec9d39e3a6b950f9f22d3dba18416c35882612bcb" +dependencies = [ + "windows-targets 0.53.2", +] + [[package]] name = "windows-targets" version = "0.48.5" @@ -6583,9 +6604,9 @@ dependencies = [ [[package]] name = "windows-targets" -version = "0.53.0" +version = "0.53.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b1e4c7e8ceaaf9cb7d7507c974735728ab453b67ef8f18febdd7c11fe59dca8b" +checksum = "c66f69fcc9ce11da9966ddb31a40968cad001c5bedeb5c2b82ede4253ab48aef" dependencies = [ "windows_aarch64_gnullvm 0.53.0", "windows_aarch64_msvc 0.53.0", diff --git a/Cargo.toml b/Cargo.toml index ecdc11701..3405cff53 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -116,7 +116,7 @@ home = { version = "0.5.9" } html-escape = { version = "0.2.13" } http = { version = "1.1.0" } indexmap = { version = "2.5.0" } -indicatif = { version = "0.17.8" } +indicatif = { version = "0.18.0" } indoc = { version = "2.0.5" } itertools = { version = "0.14.0" } jiff = { version = "0.2.0", features = ["serde"] } From df44199ceb4c856142bb67e060f464ba5c06d72e Mon Sep 17 00:00:00 2001 From: Geoffrey Thomas Date: Mon, 14 Jul 2025 10:42:35 -0400 Subject: [PATCH 026/130] Add an exception handler on Windows (#14582) We've seen a few cases of uv.exe exiting with an exception code as its exit status and no user-visible output (#14563 in the field, and #13812 in CI). It seems that recent versions of Windows no longer show dialog boxes on access violations (what UNIX calls segfaults) or similar errors. Something is probably sent to Windows Error Reporting, and we can maybe sign up to get the crashes from Microsoft, but the user experience of seeing uv exit with no output is poor, both for end users and during development. While it's possible to opt out of this behavior or set up a debugger, this isn't the default configuration. (See https://superuser.com/q/1246626 for some pointers.) In order to get some output on a crash, we need to install our own default handler for unhandled exceptions (or call all our code inside a Structured Exception Handling __try/__catch block, which is complicated on Rust). This is the moral equivalent of a segfault handler on Windows; the kernel creates a new stack frame and passes arguments to it with some processor state. This commit adds a relatively simple exception handler that leans on Rust's own backtrace implementation and also displays some minimal information from the exception itself. This should be enough info to communicate that something went wrong and let us collect enough information to attempt to debug. There are also a handful of (non-Rust) open-source libraries for this like Breakpad and Crashpad (both from Google) and crashrpt. The approach here, of using SetUnhandledExceptionFilter, seems to be the standard one taken by other such libraries. Crashpad also seems to try to use a newer mechanism for an out-of-tree DLL to report the crash: https://issues.chromium.org/issues/42310037 If we have serious problems with memory corruption, it might be worth adopting some third-party library that has already implemented this approach. (In general, the docs of other crash reporting libraries are worth skimming to understand how these things ought to work.) Co-authored-by: samypr100 <3933065+samypr100@users.noreply.github.com> --- Cargo.lock | 1 + Cargo.toml | 2 +- crates/uv/Cargo.toml | 1 + crates/uv/src/lib.rs | 5 ++ crates/uv/src/windows_exception.rs | 130 +++++++++++++++++++++++++++++ 5 files changed, 138 insertions(+), 1 deletion(-) create mode 100644 crates/uv/src/windows_exception.rs diff --git a/Cargo.lock b/Cargo.lock index 7f6b601ca..f2bebefc9 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4734,6 +4734,7 @@ dependencies = [ "walkdir", "which", "whoami", + "windows 0.59.0", "wiremock", "zip", ] diff --git a/Cargo.toml b/Cargo.toml index 3405cff53..752955223 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -184,7 +184,7 @@ url = { version = "2.5.2", features = ["serde"] } version-ranges = { git = "https://github.com/astral-sh/pubgrub", rev = "06ec5a5f59ffaeb6cf5079c6cb184467da06c9db" } walkdir = { version = "2.5.0" } which = { version = "8.0.0", features = ["regex"] } -windows = { version = "0.59.0", features = ["Win32_Storage_FileSystem"] } +windows = { version = "0.59.0", features = ["Win32_System_Kernel", "Win32_System_Diagnostics_Debug", "Win32_Storage_FileSystem"] } windows-core = { version = "0.59.0" } windows-registry = { version = "0.5.0" } windows-result = { version = "0.3.0" } diff --git a/crates/uv/Cargo.toml b/crates/uv/Cargo.toml index 7fa28ed67..904cc8fc3 100644 --- a/crates/uv/Cargo.toml +++ b/crates/uv/Cargo.toml @@ -108,6 +108,7 @@ zip = { workspace = true } [target.'cfg(target_os = "windows")'.dependencies] self-replace = { workspace = true } +windows = { workspace = true } [dev-dependencies] assert_cmd = { version = "2.0.16" } diff --git a/crates/uv/src/lib.rs b/crates/uv/src/lib.rs index 84d889599..2a163d32c 100644 --- a/crates/uv/src/lib.rs +++ b/crates/uv/src/lib.rs @@ -57,6 +57,8 @@ pub(crate) mod commands; pub(crate) mod logging; pub(crate) mod printer; pub(crate) mod settings; +#[cfg(windows)] +mod windows_exception; #[instrument(skip_all)] async fn run(mut cli: Cli) -> Result { @@ -2189,6 +2191,9 @@ where I: IntoIterator, T: Into + Clone, { + #[cfg(windows)] + windows_exception::setup(); + // Set the `UV` variable to the current executable so it is implicitly propagated to all child // processes, e.g., in `uv run`. if let Ok(current_exe) = std::env::current_exe() { diff --git a/crates/uv/src/windows_exception.rs b/crates/uv/src/windows_exception.rs new file mode 100644 index 000000000..e96075f96 --- /dev/null +++ b/crates/uv/src/windows_exception.rs @@ -0,0 +1,130 @@ +//! Helper for setting up Windows exception handling. +//! +//! Recent versions of Windows seem to no longer show dialog boxes on access violations +//! (segfaults) or similar errors. The user experience is that the command exits with +//! the exception code as its exit status and no visible output. In order to see these +//! errors both in the field and in CI, we need to install our own exception handler. +//! +//! This is a relatively simple exception handler that leans on Rust's own backtrace +//! implementation and also displays some minimal information from the exception itself. + +#![allow(unsafe_code)] +#![allow(clippy::print_stderr)] + +use windows::Win32::{ + Foundation, + System::Diagnostics::Debug::{ + CONTEXT, EXCEPTION_CONTINUE_SEARCH, EXCEPTION_POINTERS, SetUnhandledExceptionFilter, + }, +}; + +fn display_exception_info(name: &str, info: &[usize; 15]) { + match info[0] { + 0 => eprintln!("{name} reading {:#x}", info[1]), + 1 => eprintln!("{name} writing {:#x}", info[1]), + 8 => eprintln!("{name} executing {:#x}", info[1]), + _ => eprintln!("{name} from operation {} at {:#x}", info[0], info[1]), + } +} + +#[cfg(target_arch = "x86")] +fn dump_regs(c: &CONTEXT) { + eprintln!( + "eax={:08x} ebx={:08x} ecx={:08x} edx={:08x} esi={:08x} edi={:08x}", + c.Eax, c.Ebx, c.Ecx, c.Edx, c.Esi, c.Edi + ); + eprintln!( + "eip={:08x} ebp={:08x} esp={:08x} eflags={:08x}", + c.Eip, c.Ebp, c.Esp, c.EFlags + ); +} + +#[cfg(target_arch = "x86_64")] +fn dump_regs(c: &CONTEXT) { + eprintln!("rax={:016x} rbx={:016x} rcx={:016x}", c.Rax, c.Rbx, c.Rcx); + eprintln!("rdx={:016x} rsx={:016x} rdi={:016x}", c.Rdx, c.Rsi, c.Rdi); + eprintln!("rsp={:016x} rbp={:016x} r8={:016x}", c.Rsp, c.Rbp, c.R8); + eprintln!(" r9={:016x} r10={:016x} r11={:016x}", c.R9, c.R10, c.R11); + eprintln!("r12={:016x} r13={:016x} r14={:016x}", c.R12, c.R13, c.R14); + eprintln!( + "r15={:016x} rip={:016x} eflags={:016x}", + c.R15, c.Rip, c.EFlags + ); +} + +#[cfg(target_arch = "aarch64")] +fn dump_regs(c: &CONTEXT) { + // SAFETY: The two variants of this anonymous union are equivalent, + // one's an array and one has named registers. + let r = unsafe { c.Anonymous.Anonymous }; + eprintln!("cpsr={:016x} sp={:016x} pc={:016x}", c.Cpsr, c.Sp, c.Pc); + eprintln!(" x0={:016x} x1={:016x} x2={:016x}", r.X0, r.X1, r.X2); + eprintln!(" x3={:016x} x4={:016x} x5={:016x}", r.X3, r.X4, r.X5); + eprintln!(" x6={:016x} x7={:016x} x8={:016x}", r.X6, r.X7, r.X8); + eprintln!(" x9={:016x} x10={:016x} x11={:016x}", r.X9, r.X10, r.X11); + eprintln!(" x12={:016x} x13={:016x} x14={:016x}", r.X12, r.X13, r.X14); + eprintln!(" x15={:016x} x16={:016x} x17={:016x}", r.X15, r.X16, r.X17); + eprintln!(" x18={:016x} x19={:016x} x20={:016x}", r.X18, r.X19, r.X20); + eprintln!(" x21={:016x} x22={:016x} x23={:016x}", r.X21, r.X22, r.X23); + eprintln!(" x24={:016x} x25={:016x} x26={:016x}", r.X24, r.X25, r.X26); + eprintln!(" x27={:016x} x28={:016x}", r.X27, r.X28); + eprintln!(" fp={:016x} lr={:016x}", r.Fp, r.Lr); +} + +unsafe extern "system" fn unhandled_exception_filter( + exception_info: *const EXCEPTION_POINTERS, +) -> i32 { + // TODO: Really we should not be using eprintln here because Stderr is not async-signal-safe. + // Probably we should be calling the console APIs directly. + eprintln!("error: unhandled exception in uv, please report a bug:"); + let mut context = None; + // SAFETY: Pointer comes from the OS + if let Some(info) = unsafe { exception_info.as_ref() } { + // SAFETY: Pointer comes from the OS + if let Some(exc) = unsafe { info.ExceptionRecord.as_ref() } { + eprintln!( + "code {:#X} at address {:?}", + exc.ExceptionCode.0, exc.ExceptionAddress + ); + match exc.ExceptionCode { + Foundation::EXCEPTION_ACCESS_VIOLATION => { + display_exception_info("EXCEPTION_ACCESS_VIOLATION", &exc.ExceptionInformation); + } + Foundation::EXCEPTION_IN_PAGE_ERROR => { + display_exception_info("EXCEPTION_IN_PAGE_ERROR", &exc.ExceptionInformation); + } + Foundation::EXCEPTION_ILLEGAL_INSTRUCTION => { + eprintln!("EXCEPTION_ILLEGAL_INSTRUCTION"); + } + Foundation::EXCEPTION_STACK_OVERFLOW => { + eprintln!("EXCEPTION_STACK_OVERFLOW"); + } + _ => {} + } + } else { + eprintln!("(ExceptionRecord is NULL)"); + } + // SAFETY: Pointer comes from the OS + context = unsafe { info.ContextRecord.as_ref() }; + } else { + eprintln!("(ExceptionInfo is NULL)"); + } + let backtrace = std::backtrace::Backtrace::capture(); + if backtrace.status() == std::backtrace::BacktraceStatus::Disabled { + eprintln!("note: run with `RUST_BACKTRACE=1` environment variable to display a backtrace"); + } else { + if let Some(context) = context { + dump_regs(context); + } + eprintln!("stack backtrace:\n{backtrace:#}"); + } + EXCEPTION_CONTINUE_SEARCH +} + +/// Set up our handler for unhandled exceptions. +pub(crate) fn setup() { + // SAFETY: winapi call + unsafe { + SetUnhandledExceptionFilter(Some(Some(unhandled_exception_filter))); + } +} From 34fbc06ad6111f43a259993c51f12ac021cc2238 Mon Sep 17 00:00:00 2001 From: Aria Desires Date: Mon, 14 Jul 2025 10:53:39 -0400 Subject: [PATCH 027/130] Add experimental `uv sync --output-format json` (#13689) This is a continuation of the work in * #12405 I have: * moved to an architecture where the human output is derived from the json structs to centralize more of the printing state/logic * cleaned up some of the names/types * added tests * removed the restriction that this output is --dry-run only I have not yet added package info, which was TBD in their design. --------- Co-authored-by: x0rw Co-authored-by: Zanie Blue Co-authored-by: John Mumm --- crates/uv-cli/src/lib.rs | 13 + crates/uv-fs/src/path.rs | 6 + crates/uv/src/commands/project/mod.rs | 16 + crates/uv/src/commands/project/sync.rs | 615 ++++++++--- crates/uv/src/lib.rs | 1 + crates/uv/src/settings.rs | 7 +- crates/uv/tests/it/common/mod.rs | 2 +- crates/uv/tests/it/sync.rs | 1326 ++++++++++++++++-------- docs/reference/cli.md | 7 +- 9 files changed, 1389 insertions(+), 604 deletions(-) diff --git a/crates/uv-cli/src/lib.rs b/crates/uv-cli/src/lib.rs index 056447959..0f3652341 100644 --- a/crates/uv-cli/src/lib.rs +++ b/crates/uv-cli/src/lib.rs @@ -46,6 +46,15 @@ pub enum PythonListFormat { Json, } +#[derive(Debug, Default, Clone, Copy, clap::ValueEnum)] +pub enum SyncFormat { + /// Display the result in a human-readable format. + #[default] + Text, + /// Display the result in JSON format. + Json, +} + #[derive(Debug, Default, Clone, clap::ValueEnum)] pub enum ListFormat { /// Display the list of packages in a human-readable table. @@ -3207,6 +3216,10 @@ pub struct SyncArgs { #[arg(long, conflicts_with = "all_extras", value_parser = extra_name_with_clap_error)] pub extra: Option>, + /// Select the output format. + #[arg(long, value_enum, default_value_t = SyncFormat::default())] + pub output_format: SyncFormat, + /// Include all optional dependencies. /// /// When two or more extras are declared as conflicting in `tool.uv.conflicts`, using this flag diff --git a/crates/uv-fs/src/path.rs b/crates/uv-fs/src/path.rs index 40e579f8e..45d1da1c8 100644 --- a/crates/uv-fs/src/path.rs +++ b/crates/uv-fs/src/path.rs @@ -398,6 +398,12 @@ impl From> for PortablePathBuf { } } +impl<'a> From<&'a Path> for PortablePathBuf { + fn from(path: &'a Path) -> Self { + Box::::from(path).into() + } +} + #[cfg(feature = "serde")] impl serde::Serialize for PortablePathBuf { fn serialize(&self, serializer: S) -> Result diff --git a/crates/uv/src/commands/project/mod.rs b/crates/uv/src/commands/project/mod.rs index 1a0274cac..774009f63 100644 --- a/crates/uv/src/commands/project/mod.rs +++ b/crates/uv/src/commands/project/mod.rs @@ -1408,6 +1408,14 @@ impl ProjectEnvironment { Self::WouldCreate(..) => Err(ProjectError::DroppedEnvironment), } } + + /// Return the path to the actual target, if this was a dry run environment. + pub(crate) fn dry_run_target(&self) -> Option<&Path> { + match self { + Self::WouldReplace(path, _, _) | Self::WouldCreate(path, _, _) => Some(path), + Self::Created(_) | Self::Existing(_) | Self::Replaced(_) => None, + } + } } impl std::ops::Deref for ProjectEnvironment { @@ -1588,6 +1596,14 @@ impl ScriptEnvironment { Self::WouldCreate(..) => Err(ProjectError::DroppedEnvironment), } } + + /// Return the path to the actual target, if this was a dry run environment. + pub(crate) fn dry_run_target(&self) -> Option<&Path> { + match self { + Self::WouldReplace(path, _, _) | Self::WouldCreate(path, _, _) => Some(path), + Self::Created(_) | Self::Existing(_) | Self::Replaced(_) => None, + } + } } impl std::ops::Deref for ScriptEnvironment { diff --git a/crates/uv/src/commands/project/sync.rs b/crates/uv/src/commands/project/sync.rs index a9a161527..94586004f 100644 --- a/crates/uv/src/commands/project/sync.rs +++ b/crates/uv/src/commands/project/sync.rs @@ -6,9 +6,10 @@ use std::sync::Arc; use anyhow::{Context, Result}; use itertools::Itertools; use owo_colors::OwoColorize; +use serde::Serialize; use tracing::warn; - use uv_cache::Cache; +use uv_cli::SyncFormat; use uv_client::{BaseClientBuilder, FlatIndexClient, RegistryClientBuilder}; use uv_configuration::{ Concurrency, Constraints, DependencyGroups, DependencyGroupsWithDefaults, DryRun, EditableMode, @@ -19,7 +20,7 @@ use uv_dispatch::BuildDispatch; use uv_distribution_types::{ DirectorySourceDist, Dist, Index, Requirement, Resolution, ResolvedDist, SourceDist, }; -use uv_fs::Simplified; +use uv_fs::{PortablePathBuf, Simplified}; use uv_installer::SitePackages; use uv_normalize::{DefaultExtras, DefaultGroups, PackageName}; use uv_pep508::{MarkerTree, VersionOrUrl}; @@ -77,7 +78,14 @@ pub(crate) async fn sync( cache: &Cache, printer: Printer, preview: PreviewMode, + output_format: SyncFormat, ) -> Result { + if preview.is_enabled() && matches!(output_format, SyncFormat::Json) { + warn_user!( + "The `--output-format json` option is experimental and the schema may change without warning. Pass `--preview` to disable this warning." + ); + } + // Identify the target. let workspace_cache = WorkspaceCache::default(); let target = if let Some(script) = script { @@ -180,103 +188,16 @@ pub(crate) async fn sync( }) .ok(); - // Notify the user of any environment changes. - match &environment { - SyncEnvironment::Project(ProjectEnvironment::Existing(environment)) - if dry_run.enabled() => - { - writeln!( - printer.stderr(), - "{}", - format!( - "Discovered existing environment at: {}", - environment.root().user_display().bold() - ) - .dimmed() - )?; - } - SyncEnvironment::Project(ProjectEnvironment::WouldReplace(root, ..)) - if dry_run.enabled() => - { - writeln!( - printer.stderr(), - "{}", - format!( - "Would replace existing virtual environment at: {}", - root.user_display().bold() - ) - .dimmed() - )?; - } - SyncEnvironment::Project(ProjectEnvironment::WouldCreate(root, ..)) - if dry_run.enabled() => - { - writeln!( - printer.stderr(), - "{}", - format!( - "Would create virtual environment at: {}", - root.user_display().bold() - ) - .dimmed() - )?; - } - SyncEnvironment::Script(ScriptEnvironment::Existing(environment)) => { - if dry_run.enabled() { - writeln!( - printer.stderr(), - "{}", - format!( - "Discovered existing environment at: {}", - environment.root().user_display().bold() - ) - .dimmed() - )?; - } else { - writeln!( - printer.stderr(), - "Using script environment at: {}", - environment.root().user_display().cyan() - )?; - } - } - SyncEnvironment::Script(ScriptEnvironment::Replaced(environment)) if !dry_run.enabled() => { - writeln!( - printer.stderr(), - "Recreating script environment at: {}", - environment.root().user_display().cyan() - )?; - } - SyncEnvironment::Script(ScriptEnvironment::Created(environment)) if !dry_run.enabled() => { - writeln!( - printer.stderr(), - "Creating script environment at: {}", - environment.root().user_display().cyan() - )?; - } - SyncEnvironment::Script(ScriptEnvironment::WouldReplace(root, ..)) if dry_run.enabled() => { - writeln!( - printer.stderr(), - "{}", - format!( - "Would replace existing script environment at: {}", - root.user_display().bold() - ) - .dimmed() - )?; - } - SyncEnvironment::Script(ScriptEnvironment::WouldCreate(root, ..)) if dry_run.enabled() => { - writeln!( - printer.stderr(), - "{}", - format!( - "Would create script environment at: {}", - root.user_display().bold() - ) - .dimmed() - )?; - } - _ => {} + let sync_report = SyncReport { + dry_run: dry_run.enabled(), + environment: EnvironmentReport::from(&environment), + action: SyncAction::from(&environment), + target: TargetName::from(&target), + }; + + // Show the intermediate results if relevant + if let Some(message) = sync_report.format(output_format) { + writeln!(printer.stderr(), "{message}")?; } // Special-case: we're syncing a script that doesn't have an associated lockfile. In that case, @@ -340,7 +261,23 @@ pub(crate) async fn sync( ) .await { - Ok(..) => return Ok(ExitStatus::Success), + Ok(..) => { + // Generate a report for the script without a lockfile + let report = Report { + schema: SchemaReport::default(), + target: TargetName::from(&target), + project: None, + script: Some(ScriptReport::from(script)), + sync: sync_report, + lock: None, + dry_run: dry_run.enabled(), + }; + if let Some(output) = report.format(output_format) { + writeln!(printer.stdout(), "{output}")?; + } + return Ok(ExitStatus::Success); + } + // TODO(zanieb): We should respect `--output-format json` for the error case Err(ProjectError::Operation(err)) => { return diagnostics::OperationDiagnostic::native_tls( network_settings.native_tls, @@ -387,46 +324,7 @@ pub(crate) async fn sync( .execute(lock_target) .await { - Ok(result) => { - if dry_run.enabled() { - match result { - LockResult::Unchanged(..) => { - writeln!( - printer.stderr(), - "{}", - format!( - "Found up-to-date lockfile at: {}", - lock_target.lock_path().user_display().bold() - ) - .dimmed() - )?; - } - LockResult::Changed(None, ..) => { - writeln!( - printer.stderr(), - "{}", - format!( - "Would create lockfile at: {}", - lock_target.lock_path().user_display().bold() - ) - .dimmed() - )?; - } - LockResult::Changed(Some(..), ..) => { - writeln!( - printer.stderr(), - "{}", - format!( - "Would update lockfile at: {}", - lock_target.lock_path().user_display().bold() - ) - .dimmed() - )?; - } - } - } - Outcome::Success(result.into_lock()) - } + Ok(result) => Outcome::Success(result), Err(ProjectError::Operation(err)) => { return diagnostics::OperationDiagnostic::native_tls(network_settings.native_tls) .report(err) @@ -440,6 +338,25 @@ pub(crate) async fn sync( Err(err) => return Err(err.into()), }; + let lock_report = LockReport::from((&lock_target, &mode, &outcome)); + if let Some(message) = lock_report.format(output_format) { + writeln!(printer.stderr(), "{message}")?; + } + + let report = Report { + schema: SchemaReport::default(), + target: TargetName::from(&target), + project: target.project().map(ProjectReport::from), + script: target.script().map(ScriptReport::from), + sync: sync_report, + lock: Some(lock_report), + dry_run: dry_run.enabled(), + }; + + if let Some(output) = report.format(output_format) { + writeln!(printer.stdout(), "{output}")?; + } + // Identify the installation target. let sync_target = identify_installation_target(&target, outcome.lock(), all_packages, package.as_ref()); @@ -490,7 +407,7 @@ pub(crate) async fn sync( #[allow(clippy::large_enum_variant)] enum Outcome { /// The `lock` operation was successful. - Success(Lock), + Success(LockResult), /// The `lock` operation successfully resolved, but failed due to a mismatch (e.g., with `--locked`). LockMismatch(Box), } @@ -499,7 +416,7 @@ impl Outcome { /// Return the [`Lock`] associated with this outcome. fn lock(&self) -> &Lock { match self { - Self::Success(lock) => lock, + Self::Success(lock) => lock.lock(), Self::LockMismatch(lock) => lock, } } @@ -563,6 +480,22 @@ enum SyncTarget { Script(Pep723Script), } +impl SyncTarget { + fn project(&self) -> Option<&VirtualProject> { + match self { + Self::Project(project) => Some(project), + Self::Script(_) => None, + } + } + + fn script(&self) -> Option<&Pep723Script> { + match self { + Self::Project(_) => None, + Self::Script(script) => Some(script), + } + } +} + #[derive(Debug)] enum SyncEnvironment { /// A Python environment for a project. @@ -571,6 +504,15 @@ enum SyncEnvironment { Script(ScriptEnvironment), } +impl SyncEnvironment { + fn dry_run_target(&self) -> Option<&Path> { + match self { + Self::Project(env) => env.dry_run_target(), + Self::Script(env) => env.dry_run_target(), + } + } +} + impl Deref for SyncEnvironment { type Target = PythonEnvironment; @@ -892,3 +834,392 @@ fn store_credentials_from_target(target: InstallTarget<'_>) { } } } + +#[derive(Debug, Serialize)] +#[serde(rename_all = "snake_case")] +struct WorkspaceReport { + /// The workspace directory path. + path: PortablePathBuf, +} + +impl From<&Workspace> for WorkspaceReport { + fn from(workspace: &Workspace) -> Self { + Self { + path: workspace.install_path().as_path().into(), + } + } +} +#[derive(Debug, Serialize)] +#[serde(rename_all = "snake_case")] +struct ProjectReport { + // + path: PortablePathBuf, + workspace: WorkspaceReport, +} + +impl From<&VirtualProject> for ProjectReport { + fn from(project: &VirtualProject) -> Self { + Self { + path: project.root().into(), + workspace: WorkspaceReport::from(project.workspace()), + } + } +} + +impl From<&SyncTarget> for TargetName { + fn from(target: &SyncTarget) -> Self { + match target { + SyncTarget::Project(_) => TargetName::Project, + SyncTarget::Script(_) => TargetName::Script, + } + } +} + +#[derive(Serialize, Debug)] +struct ScriptReport { + /// The path to the script. + path: PortablePathBuf, +} + +impl From<&Pep723Script> for ScriptReport { + fn from(script: &Pep723Script) -> Self { + Self { + path: script.path.as_path().into(), + } + } +} + +#[derive(Serialize, Debug, Default)] +#[serde(rename_all = "snake_case")] +enum SchemaVersion { + /// An unstable, experimental schema. + #[default] + Preview, +} + +#[derive(Serialize, Debug, Default)] +struct SchemaReport { + /// The version of the schema. + version: SchemaVersion, +} + +/// A report of the uv sync operation +#[derive(Debug, Serialize)] +#[serde(rename_all = "snake_case")] +struct Report { + /// The schema of this report. + schema: SchemaReport, + /// The target of the sync operation, either a project or a script. + target: TargetName, + /// The report for a [`TargetName::Project`], if applicable. + #[serde(skip_serializing_if = "Option::is_none")] + project: Option, + /// The report for a [`TargetName::Script`], if applicable. + #[serde(skip_serializing_if = "Option::is_none")] + script: Option, + /// The report for the sync operation. + sync: SyncReport, + /// The report for the lock operation. + lock: Option, + /// Whether this is a dry run. + dry_run: bool, +} + +/// The kind of target +#[derive(Debug, Serialize, Clone, Copy)] +#[serde(rename_all = "snake_case")] +enum TargetName { + Project, + Script, +} + +impl std::fmt::Display for TargetName { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + TargetName::Project => write!(f, "project"), + TargetName::Script => write!(f, "script"), + } + } +} + +/// Represents the action taken during a sync. +#[derive(Serialize, Debug)] +#[serde(rename_all = "snake_case")] +enum SyncAction { + /// The environment was checked and required no updates. + Check, + /// The environment was updated. + Update, + /// The environment was replaced. + Replace, + /// A new environment was created. + Create, +} + +impl From<&SyncEnvironment> for SyncAction { + fn from(env: &SyncEnvironment) -> Self { + match &env { + SyncEnvironment::Project(ProjectEnvironment::Existing(..)) => SyncAction::Check, + SyncEnvironment::Project(ProjectEnvironment::Created(..)) => SyncAction::Create, + SyncEnvironment::Project(ProjectEnvironment::WouldCreate(..)) => SyncAction::Create, + SyncEnvironment::Project(ProjectEnvironment::WouldReplace(..)) => SyncAction::Replace, + SyncEnvironment::Project(ProjectEnvironment::Replaced(..)) => SyncAction::Update, + SyncEnvironment::Script(ScriptEnvironment::Existing(..)) => SyncAction::Check, + SyncEnvironment::Script(ScriptEnvironment::Created(..)) => SyncAction::Create, + SyncEnvironment::Script(ScriptEnvironment::WouldCreate(..)) => SyncAction::Create, + SyncEnvironment::Script(ScriptEnvironment::WouldReplace(..)) => SyncAction::Replace, + SyncEnvironment::Script(ScriptEnvironment::Replaced(..)) => SyncAction::Update, + } + } +} + +impl SyncAction { + fn message(&self, target: TargetName, dry_run: bool) -> Option<&'static str> { + let message = if dry_run { + match self { + SyncAction::Check => "Would use", + SyncAction::Update => "Would update", + SyncAction::Replace => "Would replace", + SyncAction::Create => "Would create", + } + } else { + // For projects, we omit some of these messages when we're not in dry-run mode + let is_project = matches!(target, TargetName::Project); + match self { + SyncAction::Check | SyncAction::Update | SyncAction::Create if is_project => { + return None; + } + SyncAction::Check => "Using", + SyncAction::Update => "Updating", + SyncAction::Replace => "Replacing", + SyncAction::Create => "Creating", + } + }; + Some(message) + } +} + +/// Represents the action taken during a lock. +#[derive(Serialize, Debug)] +#[serde(rename_all = "snake_case")] +enum LockAction { + /// The lockfile was used without checking. + Use, + /// The lockfile was checked and required no updates. + Check, + /// The lockfile was updated. + Update, + /// A new lockfile was created. + Create, +} + +impl LockAction { + fn message(&self, dry_run: bool) -> Option<&'static str> { + let message = if dry_run { + match self { + LockAction::Use => return None, + LockAction::Check => "Found up-to-date", + LockAction::Update => "Would update", + LockAction::Create => "Would create", + } + } else { + return None; + }; + Some(message) + } +} + +#[derive(Serialize, Debug)] +struct PythonReport { + path: PortablePathBuf, + version: uv_pep508::StringVersion, + implementation: String, +} + +impl From<&uv_python::Interpreter> for PythonReport { + fn from(interpreter: &uv_python::Interpreter) -> Self { + Self { + path: interpreter.sys_executable().into(), + version: interpreter.python_full_version().clone(), + implementation: interpreter.implementation_name().to_string(), + } + } +} + +impl PythonReport { + /// Set the path for this Python report. + #[must_use] + fn with_path(mut self, path: PortablePathBuf) -> Self { + self.path = path; + self + } +} + +#[derive(Serialize, Debug)] +struct EnvironmentReport { + /// The path to the environment. + path: PortablePathBuf, + /// The Python interpreter for the environment. + python: PythonReport, +} + +impl From<&PythonEnvironment> for EnvironmentReport { + fn from(env: &PythonEnvironment) -> Self { + Self { + python: PythonReport::from(env.interpreter()), + path: env.root().into(), + } + } +} + +impl From<&SyncEnvironment> for EnvironmentReport { + fn from(env: &SyncEnvironment) -> Self { + let report = EnvironmentReport::from(&**env); + // Replace the path if necessary; we construct a temporary virtual environment during dry + // run invocations and want to report the path we _would_ use. + if let Some(path) = env.dry_run_target() { + report.with_path(path.into()) + } else { + report + } + } +} + +impl EnvironmentReport { + /// Set the path for this environment report. + #[must_use] + fn with_path(mut self, path: PortablePathBuf) -> Self { + let python_path = &self.python.path; + if let Ok(python_path) = python_path.as_ref().strip_prefix(self.path) { + let new_path = path.as_ref().to_path_buf().join(python_path); + self.python = self.python.with_path(new_path.as_path().into()); + } + self.path = path; + self + } +} + +/// The report for a sync operation. +#[derive(Serialize, Debug)] +struct SyncReport { + /// The environment. + environment: EnvironmentReport, + /// The action performed during the sync, e.g., what was done to the environment. + action: SyncAction, + + // We store these fields so the report can format itself self-contained, but the outer + // [`Report`] is intended to include these in user-facing output + #[serde(skip)] + dry_run: bool, + #[serde(skip)] + target: TargetName, +} + +impl SyncReport { + fn format(&self, output_format: SyncFormat) -> Option { + match output_format { + // This is an intermediate report, when using JSON, it's only rendered at the end + SyncFormat::Json => None, + SyncFormat::Text => self.to_human_readable_string(), + } + } + + fn to_human_readable_string(&self) -> Option { + let Self { + environment, + action, + dry_run, + target, + } = self; + + let action = action.message(*target, *dry_run)?; + + let message = format!( + "{action} {target} environment at: {path}", + path = environment.path.user_display().cyan(), + ); + if *dry_run { + return Some(message.dimmed().to_string()); + } + + Some(message) + } +} + +/// The report for a lock operation. +#[derive(Debug, Serialize)] +struct LockReport { + /// The path to the lockfile + path: PortablePathBuf, + /// Whether the lockfile was preserved, created, or updated. + action: LockAction, + + // We store this field so the report can format itself self-contained, but the outer + // [`Report`] is intended to include this in user-facing output + #[serde(skip)] + dry_run: bool, +} + +impl From<(&LockTarget<'_>, &LockMode<'_>, &Outcome)> for LockReport { + fn from((target, mode, outcome): (&LockTarget, &LockMode, &Outcome)) -> Self { + Self { + path: target.lock_path().deref().into(), + action: match outcome { + Outcome::Success(result) => { + match result { + LockResult::Unchanged(..) => match mode { + // When `--frozen` is used, we don't check the lockfile + LockMode::Frozen => LockAction::Use, + LockMode::DryRun(_) | LockMode::Locked(_) | LockMode::Write(_) => { + LockAction::Check + } + }, + LockResult::Changed(None, ..) => LockAction::Create, + LockResult::Changed(Some(_), ..) => LockAction::Update, + } + } + // TODO(zanieb): We don't have a way to report the outcome of the lock yet + Outcome::LockMismatch(_) => LockAction::Check, + }, + dry_run: matches!(mode, LockMode::DryRun(_)), + } + } +} + +impl LockReport { + fn format(&self, output_format: SyncFormat) -> Option { + match output_format { + SyncFormat::Json => None, + SyncFormat::Text => self.to_human_readable_string(), + } + } + + fn to_human_readable_string(&self) -> Option { + let Self { + path, + action, + dry_run, + } = self; + + let action = action.message(*dry_run)?; + + let message = format!( + "{action} lockfile at: {path}", + path = path.user_display().cyan(), + ); + if *dry_run { + return Some(message.dimmed().to_string()); + } + + Some(message) + } +} + +impl Report { + fn format(&self, output_format: SyncFormat) -> Option { + match output_format { + SyncFormat::Json => serde_json::to_string_pretty(self).ok(), + SyncFormat::Text => None, + } + } +} diff --git a/crates/uv/src/lib.rs b/crates/uv/src/lib.rs index 2a163d32c..0b4d0bb82 100644 --- a/crates/uv/src/lib.rs +++ b/crates/uv/src/lib.rs @@ -1818,6 +1818,7 @@ async fn run_project( &cache, printer, globals.preview, + args.output_format, )) .await } diff --git a/crates/uv/src/settings.rs b/crates/uv/src/settings.rs index f89704d45..8a325d538 100644 --- a/crates/uv/src/settings.rs +++ b/crates/uv/src/settings.rs @@ -11,8 +11,8 @@ use uv_cli::{ PipCheckArgs, PipCompileArgs, PipFreezeArgs, PipInstallArgs, PipListArgs, PipShowArgs, PipSyncArgs, PipTreeArgs, PipUninstallArgs, PythonFindArgs, PythonInstallArgs, PythonListArgs, PythonListFormat, PythonPinArgs, PythonUninstallArgs, PythonUpgradeArgs, RemoveArgs, RunArgs, - SyncArgs, ToolDirArgs, ToolInstallArgs, ToolListArgs, ToolRunArgs, ToolUninstallArgs, TreeArgs, - VenvArgs, VersionArgs, VersionBump, VersionFormat, + SyncArgs, SyncFormat, ToolDirArgs, ToolInstallArgs, ToolListArgs, ToolRunArgs, + ToolUninstallArgs, TreeArgs, VenvArgs, VersionArgs, VersionBump, VersionFormat, }; use uv_cli::{ AuthorFrom, BuildArgs, ExportArgs, PublishArgs, PythonDirArgs, ResolverInstallerArgs, @@ -1154,6 +1154,7 @@ pub(crate) struct SyncSettings { pub(crate) install_mirrors: PythonInstallMirrors, pub(crate) refresh: Refresh, pub(crate) settings: ResolverInstallerSettings, + pub(crate) output_format: SyncFormat, } impl SyncSettings { @@ -1194,6 +1195,7 @@ impl SyncSettings { python_platform, check, no_check, + output_format, } = args; let install_mirrors = filesystem .clone() @@ -1213,6 +1215,7 @@ impl SyncSettings { }; Self { + output_format, locked, frozen, dry_run, diff --git a/crates/uv/tests/it/common/mod.rs b/crates/uv/tests/it/common/mod.rs index 90f436f6f..2dc72fa1d 100644 --- a/crates/uv/tests/it/common/mod.rs +++ b/crates/uv/tests/it/common/mod.rs @@ -210,7 +210,7 @@ impl TestContext { pub fn with_filtered_python_names(mut self) -> Self { if cfg!(windows) { self.filters - .push(("python.exe".to_string(), "python".to_string())); + .push((r"python\.exe".to_string(), "python".to_string())); } else { self.filters .push((r"python\d.\d\d".to_string(), "python".to_string())); diff --git a/crates/uv/tests/it/sync.rs b/crates/uv/tests/it/sync.rs index d4479296a..7063035f9 100644 --- a/crates/uv/tests/it/sync.rs +++ b/crates/uv/tests/it/sync.rs @@ -27,7 +27,7 @@ fn sync() -> Result<()> { )?; // Running `uv sync` should generate a lockfile. - uv_snapshot!(context.filters(), context.sync(), @r###" + uv_snapshot!(context.filters(), context.sync(), @r" success: true exit_code: 0 ----- stdout ----- @@ -37,7 +37,7 @@ fn sync() -> Result<()> { Prepared 1 package in [TIME] Installed 1 package in [TIME] + iniconfig==2.0.0 - "###); + "); assert!(context.temp_dir.child("uv.lock").exists()); @@ -60,14 +60,14 @@ fn locked() -> Result<()> { )?; // Running with `--locked` should error, if no lockfile is present. - uv_snapshot!(context.filters(), context.sync().arg("--locked"), @r###" + uv_snapshot!(context.filters(), context.sync().arg("--locked"), @r" success: false exit_code: 2 ----- stdout ----- ----- stderr ----- error: Unable to find lockfile at `uv.lock`. To create a lockfile, run `uv lock` or `uv sync`. - "###); + "); // Lock the initial requirements. context.lock().assert().success(); @@ -86,7 +86,7 @@ fn locked() -> Result<()> { )?; // Running with `--locked` should error. - uv_snapshot!(context.filters(), context.sync().arg("--locked"), @r###" + uv_snapshot!(context.filters(), context.sync().arg("--locked"), @r" success: false exit_code: 2 ----- stdout ----- @@ -94,7 +94,7 @@ fn locked() -> Result<()> { ----- stderr ----- Resolved 2 packages in [TIME] error: The lockfile at `uv.lock` needs to be updated, but `--locked` was provided. To update the lockfile, run `uv lock`. - "###); + "); let updated = context.read("uv.lock"); @@ -120,14 +120,14 @@ fn frozen() -> Result<()> { )?; // Running with `--frozen` should error, if no lockfile is present. - uv_snapshot!(context.filters(), context.sync().arg("--frozen"), @r###" + uv_snapshot!(context.filters(), context.sync().arg("--frozen"), @r" success: false exit_code: 2 ----- stdout ----- ----- stderr ----- error: Unable to find lockfile at `uv.lock`. To create a lockfile, run `uv lock` or `uv sync`. - "###); + "); context.lock().assert().success(); @@ -143,7 +143,7 @@ fn frozen() -> Result<()> { )?; // Running with `--frozen` should install the stale lockfile. - uv_snapshot!(context.filters(), context.sync().arg("--frozen"), @r###" + uv_snapshot!(context.filters(), context.sync().arg("--frozen"), @r" success: true exit_code: 0 ----- stdout ----- @@ -154,7 +154,7 @@ fn frozen() -> Result<()> { + anyio==3.7.0 + idna==3.6 + sniffio==1.3.1 - "###); + "); Ok(()) } @@ -172,7 +172,7 @@ fn empty() -> Result<()> { )?; // Running `uv sync` should generate an empty lockfile. - uv_snapshot!(context.filters(), context.sync(), @r###" + uv_snapshot!(context.filters(), context.sync(), @r" success: true exit_code: 0 ----- stdout ----- @@ -181,12 +181,12 @@ fn empty() -> Result<()> { warning: No `requires-python` value found in the workspace. Defaulting to `>=3.12`. Resolved in [TIME] Audited in [TIME] - "###); + "); assert!(context.temp_dir.child("uv.lock").exists()); // Running `uv sync` again should succeed. - uv_snapshot!(context.filters(), context.sync(), @r###" + uv_snapshot!(context.filters(), context.sync(), @r" success: true exit_code: 0 ----- stdout ----- @@ -195,7 +195,7 @@ fn empty() -> Result<()> { warning: No `requires-python` value found in the workspace. Defaulting to `>=3.12`. Resolved in [TIME] Audited in [TIME] - "###); + "); Ok(()) } @@ -252,7 +252,7 @@ fn package() -> Result<()> { let init = src.child("__init__.py"); init.touch()?; - uv_snapshot!(context.filters(), context.sync().arg("--package").arg("child"), @r###" + uv_snapshot!(context.filters(), context.sync().arg("--package").arg("child"), @r" success: true exit_code: 0 ----- stdout ----- @@ -263,7 +263,239 @@ fn package() -> Result<()> { Installed 2 packages in [TIME] + child==0.1.0 (from file://[TEMP_DIR]/child) + iniconfig==2.0.0 - "###); + "); + + Ok(()) +} + +/// Test json output +#[test] +fn sync_json() -> Result<()> { + let context = TestContext::new("3.12") + .with_filtered_python_names() + .with_filtered_virtualenv_bin(); + + let pyproject_toml = context.temp_dir.child("pyproject.toml"); + pyproject_toml.write_str( + r#" + [project] + name = "project" + version = "0.1.0" + requires-python = ">=3.12" + dependencies = ["iniconfig"] + "#, + )?; + + uv_snapshot!(context.filters(), context.sync() + .arg("--output-format").arg("json"), @r#" + success: true + exit_code: 0 + ----- stdout ----- + { + "schema": { + "version": "preview" + }, + "target": "project", + "project": { + "path": "[TEMP_DIR]/", + "workspace": { + "path": "[TEMP_DIR]/" + } + }, + "sync": { + "environment": { + "path": "[VENV]/", + "python": { + "path": "[VENV]/[BIN]/python", + "version": "3.12.[X]", + "implementation": "cpython" + } + }, + "action": "check" + }, + "lock": { + "path": "[TEMP_DIR]/uv.lock", + "action": "create" + }, + "dry_run": false + } + + ----- stderr ----- + Resolved 2 packages in [TIME] + Prepared 1 package in [TIME] + Installed 1 package in [TIME] + + iniconfig==2.0.0 + "#); + + assert!(context.temp_dir.child("uv.lock").exists()); + + uv_snapshot!(context.filters(), context.sync() + .arg("--frozen") + .arg("--output-format").arg("json"), @r#" + success: true + exit_code: 0 + ----- stdout ----- + { + "schema": { + "version": "preview" + }, + "target": "project", + "project": { + "path": "[TEMP_DIR]/", + "workspace": { + "path": "[TEMP_DIR]/" + } + }, + "sync": { + "environment": { + "path": "[VENV]/", + "python": { + "path": "[VENV]/[BIN]/python", + "version": "3.12.[X]", + "implementation": "cpython" + } + }, + "action": "check" + }, + "lock": { + "path": "[TEMP_DIR]/uv.lock", + "action": "use" + }, + "dry_run": false + } + + ----- stderr ----- + Audited 1 package in [TIME] + "#); + + uv_snapshot!(context.filters(), context.sync() + .arg("--locked") + .arg("--output-format").arg("json"), @r#" + success: true + exit_code: 0 + ----- stdout ----- + { + "schema": { + "version": "preview" + }, + "target": "project", + "project": { + "path": "[TEMP_DIR]/", + "workspace": { + "path": "[TEMP_DIR]/" + } + }, + "sync": { + "environment": { + "path": "[VENV]/", + "python": { + "path": "[VENV]/[BIN]/python", + "version": "3.12.[X]", + "implementation": "cpython" + } + }, + "action": "check" + }, + "lock": { + "path": "[TEMP_DIR]/uv.lock", + "action": "check" + }, + "dry_run": false + } + + ----- stderr ----- + Resolved 2 packages in [TIME] + Audited 1 package in [TIME] + "#); + + // Invalidate the lockfile by changing the requirements. + let pyproject_toml = context.temp_dir.child("pyproject.toml"); + pyproject_toml.write_str( + r#" + [project] + name = "project" + version = "0.1.0" + requires-python = ">=3.12" + dependencies = ["iniconfig<2"] + "#, + )?; + + uv_snapshot!(context.filters(), context.sync() + .arg("--locked") + .arg("--output-format").arg("json"), @r" + success: false + exit_code: 2 + ----- stdout ----- + + ----- stderr ----- + Resolved 2 packages in [TIME] + error: The lockfile at `uv.lock` needs to be updated, but `--locked` was provided. To update the lockfile, run `uv lock`. + "); + + Ok(()) +} + +/// Test --dry json output +#[test] +fn sync_dry_json() -> Result<()> { + let context = TestContext::new_with_versions(&["3.12"]) + .with_filtered_python_names() + .with_filtered_virtualenv_bin(); + + let pyproject_toml = context.temp_dir.child("pyproject.toml"); + pyproject_toml.write_str( + r#" + [project] + name = "project" + version = "0.1.0" + requires-python = ">=3.12" + dependencies = ["iniconfig"] + "#, + )?; + + // Running `uv sync` should report intent to create the environment and lockfile + uv_snapshot!(context.filters(), context.sync() + .arg("--output-format").arg("json") + .arg("--dry-run"), @r#" + success: true + exit_code: 0 + ----- stdout ----- + { + "schema": { + "version": "preview" + }, + "target": "project", + "project": { + "path": "[TEMP_DIR]/", + "workspace": { + "path": "[TEMP_DIR]/" + } + }, + "sync": { + "environment": { + "path": "[VENV]/", + "python": { + "path": "[VENV]/[BIN]/python", + "version": "3.12.[X]", + "implementation": "cpython" + } + }, + "action": "create" + }, + "lock": { + "path": "[TEMP_DIR]/uv.lock", + "action": "create" + }, + "dry_run": true + } + + ----- stderr ----- + Using CPython 3.12.[X] interpreter at: [PYTHON-3.12] + Resolved 2 packages in [TIME] + Would download 1 package + Would install 1 package + + iniconfig==2.0.0 + "#); Ok(()) } @@ -322,7 +554,7 @@ fn mixed_requires_python() -> Result<()> { )?; // Running `uv sync` should succeed, locking for Python 3.12. - uv_snapshot!(context.filters(), context.sync().arg("-p").arg("3.12"), @r###" + uv_snapshot!(context.filters(), context.sync().arg("-p").arg("3.12"), @r" success: true exit_code: 0 ----- stdout ----- @@ -337,7 +569,7 @@ fn mixed_requires_python() -> Result<()> { + bird-feeder==0.1.0 (from file://[TEMP_DIR]/packages/bird-feeder) + idna==3.6 + sniffio==1.3.1 - "###); + "); // Running `uv sync` again should fail. uv_snapshot!(context.filters(), context.sync().arg("-p").arg("3.9"), @r" @@ -660,23 +892,23 @@ fn check() -> Result<()> { )?; // Running `uv sync --check` should fail. - uv_snapshot!(context.filters(), context.sync().arg("--check"), @r###" + uv_snapshot!(context.filters(), context.sync().arg("--check"), @r" success: false exit_code: 2 ----- stdout ----- ----- stderr ----- - Discovered existing environment at: .venv + Would use project environment at: .venv Resolved 2 packages in [TIME] Would create lockfile at: uv.lock Would download 1 package Would install 1 package + iniconfig==2.0.0 error: The environment is outdated; run `uv sync` to update the environment - "###); + "); // Sync the environment. - uv_snapshot!(context.filters(), context.sync(), @r###" + uv_snapshot!(context.filters(), context.sync(), @r" success: true exit_code: 0 ----- stdout ----- @@ -686,23 +918,23 @@ fn check() -> Result<()> { Prepared 1 package in [TIME] Installed 1 package in [TIME] + iniconfig==2.0.0 - "###); + "); assert!(context.temp_dir.child("uv.lock").exists()); // Running `uv sync --check` should pass now that the environment is up to date. - uv_snapshot!(context.filters(), context.sync().arg("--check"), @r###" + uv_snapshot!(context.filters(), context.sync().arg("--check"), @r" success: true exit_code: 0 ----- stdout ----- ----- stderr ----- - Discovered existing environment at: .venv + Would use project environment at: .venv Resolved 2 packages in [TIME] Found up-to-date lockfile at: uv.lock Audited 1 package in [TIME] Would make no changes - "###); + "); Ok(()) } @@ -750,7 +982,7 @@ fn sync_legacy_non_project_dev_dependencies() -> Result<()> { .touch()?; // Syncing with `--no-dev` should omit all dependencies except `iniconfig`. - uv_snapshot!(context.filters(), context.sync().arg("--no-dev"), @r###" + uv_snapshot!(context.filters(), context.sync().arg("--no-dev"), @r" success: true exit_code: 0 ----- stdout ----- @@ -761,11 +993,11 @@ fn sync_legacy_non_project_dev_dependencies() -> Result<()> { Installed 2 packages in [TIME] + child==0.1.0 (from file://[TEMP_DIR]/child) + iniconfig==2.0.0 - "###); + "); // Syncing without `--no-dev` should include `anyio`, `requests`, `pysocks`, and their // dependencies, but not `typing-extensions`. - uv_snapshot!(context.filters(), context.sync(), @r###" + uv_snapshot!(context.filters(), context.sync(), @r" success: true exit_code: 0 ----- stdout ----- @@ -782,7 +1014,7 @@ fn sync_legacy_non_project_dev_dependencies() -> Result<()> { + requests==2.31.0 + sniffio==1.3.1 + urllib3==2.2.1 - "###); + "); Ok(()) } @@ -830,7 +1062,7 @@ fn sync_legacy_non_project_frozen() -> Result<()> { context.lock().assert().success(); - uv_snapshot!(context.filters(), context.sync().arg("--frozen").arg("--package").arg("foo"), @r###" + uv_snapshot!(context.filters(), context.sync().arg("--frozen").arg("--package").arg("foo"), @r" success: true exit_code: 0 ----- stdout ----- @@ -839,9 +1071,9 @@ fn sync_legacy_non_project_frozen() -> Result<()> { Prepared 1 package in [TIME] Installed 1 package in [TIME] + iniconfig==2.0.0 - "###); + "); - uv_snapshot!(context.filters(), context.sync().arg("--frozen"), @r###" + uv_snapshot!(context.filters(), context.sync().arg("--frozen"), @r" success: true exit_code: 0 ----- stdout ----- @@ -850,7 +1082,7 @@ fn sync_legacy_non_project_frozen() -> Result<()> { Prepared 1 package in [TIME] Installed 1 package in [TIME] + typing-extensions==4.10.0 - "###); + "); Ok(()) } @@ -903,7 +1135,7 @@ fn sync_legacy_non_project_group() -> Result<()> { .child("__init__.py") .touch()?; - uv_snapshot!(context.filters(), context.sync(), @r###" + uv_snapshot!(context.filters(), context.sync(), @r" success: true exit_code: 0 ----- stdout ----- @@ -914,9 +1146,9 @@ fn sync_legacy_non_project_group() -> Result<()> { Installed 2 packages in [TIME] + child==0.1.0 (from file://[TEMP_DIR]/child) + iniconfig==2.0.0 - "###); + "); - uv_snapshot!(context.filters(), context.sync().arg("--group").arg("foo"), @r###" + uv_snapshot!(context.filters(), context.sync().arg("--group").arg("foo"), @r" success: true exit_code: 0 ----- stdout ----- @@ -928,9 +1160,9 @@ fn sync_legacy_non_project_group() -> Result<()> { + anyio==4.3.0 + idna==3.6 + sniffio==1.3.1 - "###); + "); - uv_snapshot!(context.filters(), context.sync().arg("--only-group").arg("bar"), @r###" + uv_snapshot!(context.filters(), context.sync().arg("--only-group").arg("bar"), @r" success: true exit_code: 0 ----- stdout ----- @@ -946,9 +1178,9 @@ fn sync_legacy_non_project_group() -> Result<()> { - iniconfig==2.0.0 - sniffio==1.3.1 + typing-extensions==4.10.0 - "###); + "); - uv_snapshot!(context.filters(), context.sync().arg("--group").arg("baz"), @r###" + uv_snapshot!(context.filters(), context.sync().arg("--group").arg("baz"), @r" success: true exit_code: 0 ----- stdout ----- @@ -958,9 +1190,9 @@ fn sync_legacy_non_project_group() -> Result<()> { Installed 2 packages in [TIME] + child==0.1.0 (from file://[TEMP_DIR]/child) + iniconfig==2.0.0 - "###); + "); - uv_snapshot!(context.filters(), context.sync().arg("--group").arg("bop"), @r###" + uv_snapshot!(context.filters(), context.sync().arg("--group").arg("bop"), @r" success: false exit_code: 2 ----- stdout ----- @@ -968,7 +1200,7 @@ fn sync_legacy_non_project_group() -> Result<()> { ----- stderr ----- Resolved 6 packages in [TIME] error: Group `bop` is not defined in any project's `dependency-groups` table - "###); + "); Ok(()) } @@ -993,7 +1225,7 @@ fn sync_legacy_non_project_frozen_modification() -> Result<()> { context.lock().assert().success(); - uv_snapshot!(context.filters(), context.sync().arg("--frozen").arg("--group").arg("async"), @r###" + uv_snapshot!(context.filters(), context.sync().arg("--frozen").arg("--group").arg("async"), @r" success: true exit_code: 0 ----- stdout ----- @@ -1004,7 +1236,7 @@ fn sync_legacy_non_project_frozen_modification() -> Result<()> { + anyio==4.3.0 + idna==3.6 + sniffio==1.3.1 - "###); + "); // Modify the "live" dependency groups. pyproject_toml.write_str( @@ -1018,14 +1250,14 @@ fn sync_legacy_non_project_frozen_modification() -> Result<()> { )?; // This should succeed. - uv_snapshot!(context.filters(), context.sync().arg("--frozen").arg("--group").arg("async"), @r###" + uv_snapshot!(context.filters(), context.sync().arg("--frozen").arg("--group").arg("async"), @r" success: true exit_code: 0 ----- stdout ----- ----- stderr ----- Audited 3 packages in [TIME] - "###); + "); Ok(()) } @@ -1074,7 +1306,7 @@ fn sync_build_isolation() -> Result<()> { "###); // Running `uv sync` should succeed. - uv_snapshot!(context.filters(), context.sync().arg("--no-build-isolation"), @r###" + uv_snapshot!(context.filters(), context.sync().arg("--no-build-isolation"), @r" success: true exit_code: 0 ----- stdout ----- @@ -1092,7 +1324,7 @@ fn sync_build_isolation() -> Result<()> { + source-distribution==0.0.1 (from https://files.pythonhosted.org/packages/10/1f/57aa4cce1b1abf6b433106676e15f9fa2c92ed2bd4cf77c3b50a9e9ac773/source_distribution-0.0.1.tar.gz) - trove-classifiers==2024.3.3 - wheel==0.43.0 - "###); + "); assert!(context.temp_dir.child("uv.lock").exists()); @@ -1122,7 +1354,7 @@ fn sync_build_isolation_package() -> Result<()> { )?; // Running `uv sync` should fail for iniconfig. - uv_snapshot!(context.filters(), context.sync().arg("--no-build-isolation-package").arg("source-distribution"), @r###" + uv_snapshot!(context.filters(), context.sync().arg("--no-build-isolation-package").arg("source-distribution"), @r#" success: false exit_code: 1 ----- stdout ----- @@ -1140,7 +1372,7 @@ fn sync_build_isolation_package() -> Result<()> { hint: This usually indicates a problem with the package or the build environment. help: `source-distribution` was included because `project` (v0.1.0) depends on `source-distribution` - "###); + "#); // Install `hatchling` for `source-distribution`. uv_snapshot!(context.filters(), context.pip_install().arg("hatchling"), @r###" @@ -1160,7 +1392,7 @@ fn sync_build_isolation_package() -> Result<()> { "###); // Running `uv sync` should succeed. - uv_snapshot!(context.filters(), context.sync().arg("--no-build-isolation-package").arg("source-distribution"), @r###" + uv_snapshot!(context.filters(), context.sync().arg("--no-build-isolation-package").arg("source-distribution"), @r" success: true exit_code: 0 ----- stdout ----- @@ -1177,7 +1409,7 @@ fn sync_build_isolation_package() -> Result<()> { + project==0.1.0 (from file://[TEMP_DIR]/) + source-distribution==0.0.1 (from https://files.pythonhosted.org/packages/10/1f/57aa4cce1b1abf6b433106676e15f9fa2c92ed2bd4cf77c3b50a9e9ac773/source_distribution-0.0.1.tar.gz) - trove-classifiers==2024.3.3 - "###); + "); assert!(context.temp_dir.child("uv.lock").exists()); @@ -1212,7 +1444,7 @@ fn sync_build_isolation_extra() -> Result<()> { )?; // Running `uv sync` should fail for the `compile` extra. - uv_snapshot!(context.filters(), context.sync().arg("--extra").arg("compile"), @r###" + uv_snapshot!(context.filters(), context.sync().arg("--extra").arg("compile"), @r#" success: false exit_code: 1 ----- stdout ----- @@ -1230,10 +1462,10 @@ fn sync_build_isolation_extra() -> Result<()> { hint: This usually indicates a problem with the package or the build environment. help: `source-distribution` was included because `project[compile]` (v0.1.0) depends on `source-distribution` - "###); + "#); // Running `uv sync` with `--all-extras` should also fail. - uv_snapshot!(context.filters(), context.sync().arg("--all-extras"), @r###" + uv_snapshot!(context.filters(), context.sync().arg("--all-extras"), @r#" success: false exit_code: 1 ----- stdout ----- @@ -1251,10 +1483,10 @@ fn sync_build_isolation_extra() -> Result<()> { hint: This usually indicates a problem with the package or the build environment. help: `source-distribution` was included because `project[compile]` (v0.1.0) depends on `source-distribution` - "###); + "#); // Install the build dependencies. - uv_snapshot!(context.filters(), context.sync().arg("--extra").arg("build"), @r###" + uv_snapshot!(context.filters(), context.sync().arg("--extra").arg("build"), @r" success: true exit_code: 0 ----- stdout ----- @@ -1269,10 +1501,10 @@ fn sync_build_isolation_extra() -> Result<()> { + pluggy==1.4.0 + project==0.1.0 (from file://[TEMP_DIR]/) + trove-classifiers==2024.3.3 - "###); + "); // Running `uv sync` for the `compile` extra should succeed, and remove the build dependencies. - uv_snapshot!(context.filters(), context.sync().arg("--extra").arg("compile"), @r###" + uv_snapshot!(context.filters(), context.sync().arg("--extra").arg("compile"), @r" success: true exit_code: 0 ----- stdout ----- @@ -1288,7 +1520,7 @@ fn sync_build_isolation_extra() -> Result<()> { - pluggy==1.4.0 + source-distribution==0.0.1 (from https://files.pythonhosted.org/packages/10/1f/57aa4cce1b1abf6b433106676e15f9fa2c92ed2bd4cf77c3b50a9e9ac773/source_distribution-0.0.1.tar.gz) - trove-classifiers==2024.3.3 - "###); + "); assert!(context.temp_dir.child("uv.lock").exists()); @@ -1342,7 +1574,7 @@ fn sync_reset_state() -> Result<()> { init.touch()?; // Running `uv sync` should succeed. - uv_snapshot!(context.filters(), context.sync(), @r###" + uv_snapshot!(context.filters(), context.sync(), @r" success: true exit_code: 0 ----- stdout ----- @@ -1354,7 +1586,7 @@ fn sync_reset_state() -> Result<()> { + project==0.1.0 (from file://[TEMP_DIR]/) + pydantic-core==2.17.0 + typing-extensions==4.10.0 - "###); + "); assert!(context.temp_dir.child("uv.lock").exists()); @@ -1396,7 +1628,7 @@ fn sync_relative_wheel() -> Result<()> { context.temp_dir.join("wheels/ok-1.0.0-py3-none-any.whl"), )?; - uv_snapshot!(context.filters(), context.sync(), @r###" + uv_snapshot!(context.filters(), context.sync(), @r" success: true exit_code: 0 ----- stdout ----- @@ -1407,7 +1639,7 @@ fn sync_relative_wheel() -> Result<()> { Installed 2 packages in [TIME] + ok==1.0.0 (from file://[TEMP_DIR]/wheels/ok-1.0.0-py3-none-any.whl) + relative-wheel==0.1.0 (from file://[TEMP_DIR]/) - "###); + "); let lock = context.read("uv.lock"); @@ -1449,7 +1681,7 @@ fn sync_relative_wheel() -> Result<()> { ); // Check that we can re-read the lockfile. - uv_snapshot!(context.filters(), context.sync(), @r###" + uv_snapshot!(context.filters(), context.sync(), @r" success: true exit_code: 0 ----- stdout ----- @@ -1457,7 +1689,7 @@ fn sync_relative_wheel() -> Result<()> { ----- stderr ----- Resolved 2 packages in [TIME] Audited 2 packages in [TIME] - "###); + "); Ok(()) } @@ -1481,7 +1713,7 @@ fn sync_environment() -> Result<()> { "#, )?; - uv_snapshot!(context.filters(), context.sync(), @r###" + uv_snapshot!(context.filters(), context.sync(), @r" success: false exit_code: 2 ----- stdout ----- @@ -1489,7 +1721,7 @@ fn sync_environment() -> Result<()> { ----- stderr ----- Resolved 2 packages in [TIME] error: The current Python platform is not compatible with the lockfile's supported environments: `python_full_version < '3.11'` - "###); + "); assert!(context.temp_dir.child("uv.lock").exists()); @@ -1516,7 +1748,7 @@ fn sync_dev() -> Result<()> { context.lock().assert().success(); - uv_snapshot!(context.filters(), context.sync().arg("--only-dev"), @r###" + uv_snapshot!(context.filters(), context.sync().arg("--only-dev"), @r" success: true exit_code: 0 ----- stdout ----- @@ -1528,9 +1760,9 @@ fn sync_dev() -> Result<()> { + anyio==4.3.0 + idna==3.6 + sniffio==1.3.1 - "###); + "); - uv_snapshot!(context.filters(), context.sync().arg("--no-dev"), @r###" + uv_snapshot!(context.filters(), context.sync().arg("--no-dev"), @r" success: true exit_code: 0 ----- stdout ----- @@ -1544,9 +1776,9 @@ fn sync_dev() -> Result<()> { - idna==3.6 - sniffio==1.3.1 + typing-extensions==4.10.0 - "###); + "); - uv_snapshot!(context.filters(), context.sync(), @r###" + uv_snapshot!(context.filters(), context.sync(), @r" success: true exit_code: 0 ----- stdout ----- @@ -1557,10 +1789,10 @@ fn sync_dev() -> Result<()> { + anyio==4.3.0 + idna==3.6 + sniffio==1.3.1 - "###); + "); // Using `--no-default-groups` should remove dev dependencies - uv_snapshot!(context.filters(), context.sync().arg("--no-default-groups"), @r###" + uv_snapshot!(context.filters(), context.sync().arg("--no-default-groups"), @r" success: true exit_code: 0 ----- stdout ----- @@ -1571,7 +1803,7 @@ fn sync_dev() -> Result<()> { - anyio==4.3.0 - idna==3.6 - sniffio==1.3.1 - "###); + "); Ok(()) } @@ -1600,7 +1832,7 @@ fn sync_group() -> Result<()> { context.lock().assert().success(); - uv_snapshot!(context.filters(), context.sync(), @r###" + uv_snapshot!(context.filters(), context.sync(), @r" success: true exit_code: 0 ----- stdout ----- @@ -1611,9 +1843,9 @@ fn sync_group() -> Result<()> { Installed 2 packages in [TIME] + iniconfig==2.0.0 + typing-extensions==4.10.0 - "###); + "); - uv_snapshot!(context.filters(), context.sync().arg("--group").arg("foo"), @r###" + uv_snapshot!(context.filters(), context.sync().arg("--group").arg("foo"), @r" success: true exit_code: 0 ----- stdout ----- @@ -1625,9 +1857,9 @@ fn sync_group() -> Result<()> { + anyio==4.3.0 + idna==3.6 + sniffio==1.3.1 - "###); + "); - uv_snapshot!(context.filters(), context.sync().arg("--only-group").arg("bar"), @r###" + uv_snapshot!(context.filters(), context.sync().arg("--only-group").arg("bar"), @r" success: true exit_code: 0 ----- stdout ----- @@ -1645,9 +1877,9 @@ fn sync_group() -> Result<()> { - sniffio==1.3.1 - typing-extensions==4.10.0 + urllib3==2.2.1 - "###); + "); - uv_snapshot!(context.filters(), context.sync().arg("--group").arg("foo").arg("--group").arg("bar"), @r###" + uv_snapshot!(context.filters(), context.sync().arg("--group").arg("foo").arg("--group").arg("bar"), @r" success: true exit_code: 0 ----- stdout ----- @@ -1659,9 +1891,9 @@ fn sync_group() -> Result<()> { + iniconfig==2.0.0 + sniffio==1.3.1 + typing-extensions==4.10.0 - "###); + "); - uv_snapshot!(context.filters(), context.sync().arg("--all-groups"), @r###" + uv_snapshot!(context.filters(), context.sync().arg("--all-groups"), @r" success: true exit_code: 0 ----- stdout ----- @@ -1669,9 +1901,9 @@ fn sync_group() -> Result<()> { ----- stderr ----- Resolved 10 packages in [TIME] Audited 9 packages in [TIME] - "###); + "); - uv_snapshot!(context.filters(), context.sync().arg("--all-groups").arg("--no-group").arg("bar"), @r###" + uv_snapshot!(context.filters(), context.sync().arg("--all-groups").arg("--no-group").arg("bar"), @r" success: true exit_code: 0 ----- stdout ----- @@ -1683,9 +1915,9 @@ fn sync_group() -> Result<()> { - charset-normalizer==3.3.2 - requests==2.31.0 - urllib3==2.2.1 - "###); + "); - uv_snapshot!(context.filters(), context.sync().arg("--all-groups").arg("--no-dev"), @r###" + uv_snapshot!(context.filters(), context.sync().arg("--all-groups").arg("--no-dev"), @r" success: true exit_code: 0 ----- stdout ----- @@ -1699,9 +1931,9 @@ fn sync_group() -> Result<()> { - iniconfig==2.0.0 + requests==2.31.0 + urllib3==2.2.1 - "###); + "); - uv_snapshot!(context.filters(), context.sync().arg("--dev"), @r###" + uv_snapshot!(context.filters(), context.sync().arg("--dev"), @r" success: true exit_code: 0 ----- stdout ----- @@ -1718,9 +1950,9 @@ fn sync_group() -> Result<()> { - requests==2.31.0 - sniffio==1.3.1 - urllib3==2.2.1 - "###); + "); - uv_snapshot!(context.filters(), context.sync().arg("--dev").arg("--no-group").arg("dev"), @r###" + uv_snapshot!(context.filters(), context.sync().arg("--dev").arg("--no-group").arg("dev"), @r" success: true exit_code: 0 ----- stdout ----- @@ -1729,9 +1961,9 @@ fn sync_group() -> Result<()> { Resolved 10 packages in [TIME] Uninstalled 1 package in [TIME] - iniconfig==2.0.0 - "###); + "); - uv_snapshot!(context.filters(), context.sync().arg("--group").arg("dev").arg("--no-dev"), @r###" + uv_snapshot!(context.filters(), context.sync().arg("--group").arg("dev").arg("--no-dev"), @r" success: true exit_code: 0 ----- stdout ----- @@ -1739,9 +1971,9 @@ fn sync_group() -> Result<()> { ----- stderr ----- Resolved 10 packages in [TIME] Audited 1 package in [TIME] - "###); + "); - uv_snapshot!(context.filters(), context.sync().arg("--all-groups"), @r###" + uv_snapshot!(context.filters(), context.sync().arg("--all-groups"), @r" success: true exit_code: 0 ----- stdout ----- @@ -1757,10 +1989,10 @@ fn sync_group() -> Result<()> { + requests==2.31.0 + sniffio==1.3.1 + urllib3==2.2.1 - "###); + "); // Using `--no-default-groups` should exclude all groups - uv_snapshot!(context.filters(), context.sync().arg("--no-default-groups"), @r###" + uv_snapshot!(context.filters(), context.sync().arg("--no-default-groups"), @r" success: true exit_code: 0 ----- stdout ----- @@ -1776,9 +2008,9 @@ fn sync_group() -> Result<()> { - requests==2.31.0 - sniffio==1.3.1 - urllib3==2.2.1 - "###); + "); - uv_snapshot!(context.filters(), context.sync().arg("--all-groups"), @r###" + uv_snapshot!(context.filters(), context.sync().arg("--all-groups"), @r" success: true exit_code: 0 ----- stdout ----- @@ -1794,11 +2026,11 @@ fn sync_group() -> Result<()> { + requests==2.31.0 + sniffio==1.3.1 + urllib3==2.2.1 - "###); + "); // Using `--no-default-groups` with `--group foo` and `--group bar` should include those groups, // excluding the remaining `dev` group. - uv_snapshot!(context.filters(), context.sync().arg("--no-default-groups").arg("--group").arg("foo").arg("--group").arg("bar"), @r###" + uv_snapshot!(context.filters(), context.sync().arg("--no-default-groups").arg("--group").arg("foo").arg("--group").arg("bar"), @r" success: true exit_code: 0 ----- stdout ----- @@ -1807,7 +2039,7 @@ fn sync_group() -> Result<()> { Resolved 10 packages in [TIME] Uninstalled 1 package in [TIME] - iniconfig==2.0.0 - "###); + "); Ok(()) } @@ -1833,7 +2065,7 @@ fn sync_include_group() -> Result<()> { context.lock().assert().success(); - uv_snapshot!(context.filters(), context.sync(), @r###" + uv_snapshot!(context.filters(), context.sync(), @r" success: true exit_code: 0 ----- stdout ----- @@ -1843,9 +2075,9 @@ fn sync_include_group() -> Result<()> { Prepared 1 package in [TIME] Installed 1 package in [TIME] + typing-extensions==4.10.0 - "###); + "); - uv_snapshot!(context.filters(), context.sync().arg("--group").arg("foo"), @r###" + uv_snapshot!(context.filters(), context.sync().arg("--group").arg("foo"), @r" success: true exit_code: 0 ----- stdout ----- @@ -1858,9 +2090,9 @@ fn sync_include_group() -> Result<()> { + idna==3.6 + iniconfig==2.0.0 + sniffio==1.3.1 - "###); + "); - uv_snapshot!(context.filters(), context.sync().arg("--only-group").arg("bar"), @r###" + uv_snapshot!(context.filters(), context.sync().arg("--only-group").arg("bar"), @r" success: true exit_code: 0 ----- stdout ----- @@ -1872,9 +2104,9 @@ fn sync_include_group() -> Result<()> { - idna==3.6 - sniffio==1.3.1 - typing-extensions==4.10.0 - "###); + "); - uv_snapshot!(context.filters(), context.sync().arg("--group").arg("foo").arg("--group").arg("bar"), @r###" + uv_snapshot!(context.filters(), context.sync().arg("--group").arg("foo").arg("--group").arg("bar"), @r" success: true exit_code: 0 ----- stdout ----- @@ -1886,9 +2118,9 @@ fn sync_include_group() -> Result<()> { + idna==3.6 + sniffio==1.3.1 + typing-extensions==4.10.0 - "###); + "); - uv_snapshot!(context.filters(), context.sync().arg("--only-group").arg("foo"), @r###" + uv_snapshot!(context.filters(), context.sync().arg("--only-group").arg("foo"), @r" success: true exit_code: 0 ----- stdout ----- @@ -1897,9 +2129,9 @@ fn sync_include_group() -> Result<()> { Resolved 6 packages in [TIME] Uninstalled 1 package in [TIME] - typing-extensions==4.10.0 - "###); + "); - uv_snapshot!(context.filters(), context.sync().arg("--all-groups"), @r###" + uv_snapshot!(context.filters(), context.sync().arg("--all-groups"), @r" success: true exit_code: 0 ----- stdout ----- @@ -1908,9 +2140,9 @@ fn sync_include_group() -> Result<()> { Resolved 6 packages in [TIME] Installed 1 package in [TIME] + typing-extensions==4.10.0 - "###); + "); - uv_snapshot!(context.filters(), context.sync().arg("--no-default-groups"), @r###" + uv_snapshot!(context.filters(), context.sync().arg("--no-default-groups"), @r" success: true exit_code: 0 ----- stdout ----- @@ -1922,9 +2154,9 @@ fn sync_include_group() -> Result<()> { - idna==3.6 - iniconfig==2.0.0 - sniffio==1.3.1 - "###); + "); - uv_snapshot!(context.filters(), context.sync().arg("--all-groups"), @r###" + uv_snapshot!(context.filters(), context.sync().arg("--all-groups"), @r" success: true exit_code: 0 ----- stdout ----- @@ -1936,9 +2168,9 @@ fn sync_include_group() -> Result<()> { + idna==3.6 + iniconfig==2.0.0 + sniffio==1.3.1 - "###); + "); - uv_snapshot!(context.filters(), context.sync().arg("--no-default-groups").arg("--group").arg("foo"), @r###" + uv_snapshot!(context.filters(), context.sync().arg("--no-default-groups").arg("--group").arg("foo"), @r" success: true exit_code: 0 ----- stdout ----- @@ -1946,7 +2178,7 @@ fn sync_include_group() -> Result<()> { ----- stderr ----- Resolved 6 packages in [TIME] Audited 5 packages in [TIME] - "###); + "); Ok(()) } @@ -1972,7 +2204,7 @@ fn sync_exclude_group() -> Result<()> { context.lock().assert().success(); - uv_snapshot!(context.filters(), context.sync().arg("--group").arg("foo"), @r###" + uv_snapshot!(context.filters(), context.sync().arg("--group").arg("foo"), @r" success: true exit_code: 0 ----- stdout ----- @@ -1986,9 +2218,9 @@ fn sync_exclude_group() -> Result<()> { + iniconfig==2.0.0 + sniffio==1.3.1 + typing-extensions==4.10.0 - "###); + "); - uv_snapshot!(context.filters(), context.sync().arg("--group").arg("foo").arg("--no-group").arg("foo"), @r###" + uv_snapshot!(context.filters(), context.sync().arg("--group").arg("foo").arg("--no-group").arg("foo"), @r" success: true exit_code: 0 ----- stdout ----- @@ -2000,9 +2232,9 @@ fn sync_exclude_group() -> Result<()> { - idna==3.6 - iniconfig==2.0.0 - sniffio==1.3.1 - "###); + "); - uv_snapshot!(context.filters(), context.sync().arg("--only-group").arg("bar"), @r###" + uv_snapshot!(context.filters(), context.sync().arg("--only-group").arg("bar"), @r" success: true exit_code: 0 ----- stdout ----- @@ -2013,9 +2245,9 @@ fn sync_exclude_group() -> Result<()> { Installed 1 package in [TIME] + iniconfig==2.0.0 - typing-extensions==4.10.0 - "###); + "); - uv_snapshot!(context.filters(), context.sync().arg("--only-group").arg("bar").arg("--no-group").arg("bar"), @r###" + uv_snapshot!(context.filters(), context.sync().arg("--only-group").arg("bar").arg("--no-group").arg("bar"), @r" success: true exit_code: 0 ----- stdout ----- @@ -2024,7 +2256,7 @@ fn sync_exclude_group() -> Result<()> { Resolved 6 packages in [TIME] Uninstalled 1 package in [TIME] - iniconfig==2.0.0 - "###); + "); Ok(()) } @@ -2052,7 +2284,7 @@ fn sync_dev_group() -> Result<()> { context.lock().assert().success(); - uv_snapshot!(context.filters(), context.sync(), @r###" + uv_snapshot!(context.filters(), context.sync(), @r" success: true exit_code: 0 ----- stdout ----- @@ -2066,7 +2298,7 @@ fn sync_dev_group() -> Result<()> { + iniconfig==2.0.0 + sniffio==1.3.1 + typing-extensions==4.10.0 - "###); + "); Ok(()) } @@ -2093,7 +2325,7 @@ fn sync_non_existent_group() -> Result<()> { context.lock().assert().success(); // Requesting a non-existent group should fail. - uv_snapshot!(context.filters(), context.sync().arg("--group").arg("baz"), @r###" + uv_snapshot!(context.filters(), context.sync().arg("--group").arg("baz"), @r" success: false exit_code: 2 ----- stdout ----- @@ -2101,9 +2333,9 @@ fn sync_non_existent_group() -> Result<()> { ----- stderr ----- Resolved 7 packages in [TIME] error: Group `baz` is not defined in the project's `dependency-groups` table - "###); + "); - uv_snapshot!(context.filters(), context.sync().arg("--no-group").arg("baz"), @r###" + uv_snapshot!(context.filters(), context.sync().arg("--no-group").arg("baz"), @r" success: false exit_code: 2 ----- stdout ----- @@ -2111,10 +2343,10 @@ fn sync_non_existent_group() -> Result<()> { ----- stderr ----- Resolved 7 packages in [TIME] error: Group `baz` is not defined in the project's `dependency-groups` table - "###); + "); // Requesting an empty group should succeed. - uv_snapshot!(context.filters(), context.sync().arg("--group").arg("foo"), @r###" + uv_snapshot!(context.filters(), context.sync().arg("--group").arg("foo"), @r" success: true exit_code: 0 ----- stdout ----- @@ -2124,11 +2356,11 @@ fn sync_non_existent_group() -> Result<()> { Prepared 1 package in [TIME] Installed 1 package in [TIME] + typing-extensions==4.10.0 - "###); + "); // Requesting with `--frozen` should respect the groups in the lockfile, rather than the // `pyproject.toml`. - uv_snapshot!(context.filters(), context.sync().arg("--frozen").arg("--group").arg("bar"), @r###" + uv_snapshot!(context.filters(), context.sync().arg("--frozen").arg("--group").arg("bar"), @r" success: true exit_code: 0 ----- stdout ----- @@ -2141,7 +2373,7 @@ fn sync_non_existent_group() -> Result<()> { + idna==3.6 + requests==2.31.0 + urllib3==2.2.1 - "###); + "); // Replace `bar` with `baz`. pyproject_toml.write_str( @@ -2157,23 +2389,23 @@ fn sync_non_existent_group() -> Result<()> { "#, )?; - uv_snapshot!(context.filters(), context.sync().arg("--frozen").arg("--group").arg("bar"), @r###" + uv_snapshot!(context.filters(), context.sync().arg("--frozen").arg("--group").arg("bar"), @r" success: true exit_code: 0 ----- stdout ----- ----- stderr ----- Audited 6 packages in [TIME] - "###); + "); - uv_snapshot!(context.filters(), context.sync().arg("--frozen").arg("--group").arg("baz"), @r###" + uv_snapshot!(context.filters(), context.sync().arg("--frozen").arg("--group").arg("baz"), @r" success: false exit_code: 2 ----- stdout ----- ----- stderr ----- error: Group `baz` is not defined in the project's `dependency-groups` table - "###); + "); Ok(()) } @@ -2453,7 +2685,7 @@ fn sync_default_groups() -> Result<()> { context.lock().assert().success(); // The `dev` group should be synced by default. - uv_snapshot!(context.filters(), context.sync(), @r###" + uv_snapshot!(context.filters(), context.sync(), @r" success: true exit_code: 0 ----- stdout ----- @@ -2464,7 +2696,7 @@ fn sync_default_groups() -> Result<()> { Installed 2 packages in [TIME] + iniconfig==2.0.0 + typing-extensions==4.10.0 - "###); + "); // If we remove it from the `default-groups` list, it should be removed. pyproject_toml.write_str( @@ -2485,7 +2717,7 @@ fn sync_default_groups() -> Result<()> { "#, )?; - uv_snapshot!(context.filters(), context.sync(), @r###" + uv_snapshot!(context.filters(), context.sync(), @r" success: true exit_code: 0 ----- stdout ----- @@ -2494,7 +2726,7 @@ fn sync_default_groups() -> Result<()> { Resolved 10 packages in [TIME] Uninstalled 1 package in [TIME] - iniconfig==2.0.0 - "###); + "); // If we set a different default group, it should be synced instead. pyproject_toml.write_str( @@ -2515,7 +2747,7 @@ fn sync_default_groups() -> Result<()> { "#, )?; - uv_snapshot!(context.filters(), context.sync(), @r###" + uv_snapshot!(context.filters(), context.sync(), @r" success: true exit_code: 0 ----- stdout ----- @@ -2527,7 +2759,7 @@ fn sync_default_groups() -> Result<()> { + anyio==4.3.0 + idna==3.6 + sniffio==1.3.1 - "###); + "); // `--no-group` should remove from the defaults. pyproject_toml.write_str( @@ -2548,7 +2780,7 @@ fn sync_default_groups() -> Result<()> { "#, )?; - uv_snapshot!(context.filters(), context.sync().arg("--no-group").arg("foo"), @r###" + uv_snapshot!(context.filters(), context.sync().arg("--no-group").arg("foo"), @r" success: true exit_code: 0 ----- stdout ----- @@ -2559,10 +2791,10 @@ fn sync_default_groups() -> Result<()> { - anyio==4.3.0 - idna==3.6 - sniffio==1.3.1 - "###); + "); // Using `--group` should include the defaults - uv_snapshot!(context.filters(), context.sync().arg("--group").arg("dev"), @r###" + uv_snapshot!(context.filters(), context.sync().arg("--group").arg("dev"), @r" success: true exit_code: 0 ----- stdout ----- @@ -2574,10 +2806,10 @@ fn sync_default_groups() -> Result<()> { + idna==3.6 + iniconfig==2.0.0 + sniffio==1.3.1 - "###); + "); // Using `--all-groups` should include the defaults - uv_snapshot!(context.filters(), context.sync().arg("--all-groups"), @r###" + uv_snapshot!(context.filters(), context.sync().arg("--all-groups"), @r" success: true exit_code: 0 ----- stdout ----- @@ -2590,10 +2822,10 @@ fn sync_default_groups() -> Result<()> { + charset-normalizer==3.3.2 + requests==2.31.0 + urllib3==2.2.1 - "###); + "); // Using `--only-group` should exclude the defaults - uv_snapshot!(context.filters(), context.sync().arg("--only-group").arg("dev"), @r###" + uv_snapshot!(context.filters(), context.sync().arg("--only-group").arg("dev"), @r" success: true exit_code: 0 ----- stdout ----- @@ -2609,9 +2841,9 @@ fn sync_default_groups() -> Result<()> { - sniffio==1.3.1 - typing-extensions==4.10.0 - urllib3==2.2.1 - "###); + "); - uv_snapshot!(context.filters(), context.sync().arg("--all-groups"), @r###" + uv_snapshot!(context.filters(), context.sync().arg("--all-groups"), @r" success: true exit_code: 0 ----- stdout ----- @@ -2627,10 +2859,10 @@ fn sync_default_groups() -> Result<()> { + sniffio==1.3.1 + typing-extensions==4.10.0 + urllib3==2.2.1 - "###); + "); // Using `--no-default-groups` should exclude all groups - uv_snapshot!(context.filters(), context.sync().arg("--no-default-groups"), @r###" + uv_snapshot!(context.filters(), context.sync().arg("--no-default-groups"), @r" success: true exit_code: 0 ----- stdout ----- @@ -2646,9 +2878,9 @@ fn sync_default_groups() -> Result<()> { - requests==2.31.0 - sniffio==1.3.1 - urllib3==2.2.1 - "###); + "); - uv_snapshot!(context.filters(), context.sync().arg("--all-groups"), @r###" + uv_snapshot!(context.filters(), context.sync().arg("--all-groups"), @r" success: true exit_code: 0 ----- stdout ----- @@ -2664,11 +2896,11 @@ fn sync_default_groups() -> Result<()> { + requests==2.31.0 + sniffio==1.3.1 + urllib3==2.2.1 - "###); + "); // Using `--no-default-groups` with `--group foo` and `--group bar` should include those groups, // excluding the remaining `dev` group. - uv_snapshot!(context.filters(), context.sync().arg("--no-default-groups").arg("--group").arg("foo").arg("--group").arg("bar"), @r###" + uv_snapshot!(context.filters(), context.sync().arg("--no-default-groups").arg("--group").arg("foo").arg("--group").arg("bar"), @r" success: true exit_code: 0 ----- stdout ----- @@ -2677,7 +2909,7 @@ fn sync_default_groups() -> Result<()> { Resolved 10 packages in [TIME] Uninstalled 1 package in [TIME] - iniconfig==2.0.0 - "###); + "); Ok(()) } @@ -2749,7 +2981,7 @@ fn sync_default_groups_all() -> Result<()> { "); // Using `--all-groups` should be redundant and work fine - uv_snapshot!(context.filters(), context.sync().arg("--all-groups"), @r###" + uv_snapshot!(context.filters(), context.sync().arg("--all-groups"), @r" success: true exit_code: 0 ----- stdout ----- @@ -2765,7 +2997,7 @@ fn sync_default_groups_all() -> Result<()> { + requests==2.31.0 + sniffio==1.3.1 + urllib3==2.2.1 - "###); + "); // Using `--no-dev` should exclude just the dev group uv_snapshot!(context.filters(), context.sync().arg("--no-dev"), @r" @@ -2900,7 +3132,7 @@ fn sync_group_member() -> Result<()> { // Generate a lockfile. context.lock().assert().success(); - uv_snapshot!(context.filters(), context.sync().arg("--only-group").arg("foo"), @r###" + uv_snapshot!(context.filters(), context.sync().arg("--only-group").arg("foo"), @r" success: true exit_code: 0 ----- stdout ----- @@ -2912,7 +3144,7 @@ fn sync_group_member() -> Result<()> { + child==0.1.0 (from file://[TEMP_DIR]/child) + iniconfig==2.0.0 + typing-extensions==4.10.0 - "###); + "); Ok(()) } @@ -3023,7 +3255,7 @@ fn sync_group_legacy_non_project_member() -> Result<()> { ); }); - uv_snapshot!(context.filters(), context.sync().arg("--only-group").arg("foo"), @r###" + uv_snapshot!(context.filters(), context.sync().arg("--only-group").arg("foo"), @r" success: true exit_code: 0 ----- stdout ----- @@ -3035,7 +3267,7 @@ fn sync_group_legacy_non_project_member() -> Result<()> { + child==0.1.0 (from file://[TEMP_DIR]/child) + iniconfig==2.0.0 + typing-extensions==4.10.0 - "###); + "); Ok(()) } @@ -3157,7 +3389,7 @@ fn sync_group_self() -> Result<()> { ); }); - uv_snapshot!(context.filters(), context.sync().arg("--only-group").arg("foo"), @r###" + uv_snapshot!(context.filters(), context.sync().arg("--only-group").arg("foo"), @r" success: true exit_code: 0 ----- stdout ----- @@ -3169,9 +3401,9 @@ fn sync_group_self() -> Result<()> { + iniconfig==2.0.0 + project==0.1.0 (from file://[TEMP_DIR]/) + typing-extensions==4.10.0 - "###); + "); - uv_snapshot!(context.filters(), context.sync().arg("--only-group").arg("bar"), @r###" + uv_snapshot!(context.filters(), context.sync().arg("--only-group").arg("bar"), @r" success: true exit_code: 0 ----- stdout ----- @@ -3183,7 +3415,7 @@ fn sync_group_self() -> Result<()> { Installed 1 package in [TIME] + idna==3.6 - typing-extensions==4.10.0 - "###); + "); Ok(()) } @@ -3208,7 +3440,7 @@ fn sync_non_existent_extra() -> Result<()> { context.lock().assert().success(); // Requesting a non-existent extra should fail. - uv_snapshot!(context.filters(), context.sync().arg("--extra").arg("baz"), @r###" + uv_snapshot!(context.filters(), context.sync().arg("--extra").arg("baz"), @r" success: false exit_code: 2 ----- stdout ----- @@ -3216,10 +3448,10 @@ fn sync_non_existent_extra() -> Result<()> { ----- stderr ----- Resolved 4 packages in [TIME] error: Extra `baz` is not defined in the project's `optional-dependencies` table - "###); + "); // Excluding a non-existing extra when requesting all extras should fail. - uv_snapshot!(context.filters(), context.sync().arg("--all-extras").arg("--no-extra").arg("baz"), @r###" + uv_snapshot!(context.filters(), context.sync().arg("--all-extras").arg("--no-extra").arg("baz"), @r" success: false exit_code: 2 ----- stdout ----- @@ -3227,7 +3459,7 @@ fn sync_non_existent_extra() -> Result<()> { ----- stderr ----- Resolved 4 packages in [TIME] error: Extra `baz` is not defined in the project's `optional-dependencies` table - "###); + "); Ok(()) } @@ -3249,7 +3481,7 @@ fn sync_non_existent_extra_no_optional_dependencies() -> Result<()> { context.lock().assert().success(); // Requesting a non-existent extra should fail. - uv_snapshot!(context.filters(), context.sync().arg("--extra").arg("baz"), @r###" + uv_snapshot!(context.filters(), context.sync().arg("--extra").arg("baz"), @r" success: false exit_code: 2 ----- stdout ----- @@ -3257,10 +3489,10 @@ fn sync_non_existent_extra_no_optional_dependencies() -> Result<()> { ----- stderr ----- Resolved 1 package in [TIME] error: Extra `baz` is not defined in the project's `optional-dependencies` table - "###); + "); // Excluding a non-existing extra when requesting all extras should fail. - uv_snapshot!(context.filters(), context.sync().arg("--all-extras").arg("--no-extra").arg("baz"), @r###" + uv_snapshot!(context.filters(), context.sync().arg("--all-extras").arg("--no-extra").arg("baz"), @r" success: false exit_code: 2 ----- stdout ----- @@ -3268,7 +3500,7 @@ fn sync_non_existent_extra_no_optional_dependencies() -> Result<()> { ----- stderr ----- Resolved 1 package in [TIME] error: Extra `baz` is not defined in the project's `optional-dependencies` table - "###); + "); Ok(()) } @@ -3321,14 +3553,14 @@ fn sync_ignore_extras_check_when_no_provides_extras() -> Result<()> { "#})?; // Requesting a non-existent extra should not fail, as no validation should be performed. - uv_snapshot!(context.filters(), context.sync().arg("--frozen").arg("--extra").arg("baz"), @r###" + uv_snapshot!(context.filters(), context.sync().arg("--frozen").arg("--extra").arg("baz"), @r" success: true exit_code: 0 ----- stdout ----- ----- stderr ----- Audited in [TIME] - "###); + "); Ok(()) } @@ -3376,7 +3608,7 @@ fn sync_non_existent_extra_workspace_member() -> Result<()> { context.lock().assert().success(); // Requesting an extra that only exists in the child should fail. - uv_snapshot!(context.filters(), context.sync().arg("--extra").arg("async"), @r###" + uv_snapshot!(context.filters(), context.sync().arg("--extra").arg("async"), @r" success: false exit_code: 2 ----- stdout ----- @@ -3384,10 +3616,10 @@ fn sync_non_existent_extra_workspace_member() -> Result<()> { ----- stderr ----- Resolved 5 packages in [TIME] error: Extra `async` is not defined in the project's `optional-dependencies` table - "###); + "); // Unless we sync from the child directory. - uv_snapshot!(context.filters(), context.sync().arg("--package").arg("child").arg("--extra").arg("async"), @r###" + uv_snapshot!(context.filters(), context.sync().arg("--package").arg("child").arg("--extra").arg("async"), @r" success: true exit_code: 0 ----- stdout ----- @@ -3399,7 +3631,7 @@ fn sync_non_existent_extra_workspace_member() -> Result<()> { + anyio==4.3.0 + idna==3.6 + sniffio==1.3.1 - "###); + "); Ok(()) } @@ -3449,7 +3681,7 @@ fn sync_non_existent_extra_non_project_workspace() -> Result<()> { // Requesting an extra that only exists in the child should succeed, since we sync all members // by default. - uv_snapshot!(context.filters(), context.sync().arg("--extra").arg("async"), @r###" + uv_snapshot!(context.filters(), context.sync().arg("--extra").arg("async"), @r" success: true exit_code: 0 ----- stdout ----- @@ -3461,10 +3693,10 @@ fn sync_non_existent_extra_non_project_workspace() -> Result<()> { + anyio==4.3.0 + idna==3.6 + sniffio==1.3.1 - "###); + "); // Syncing from the child should also succeed. - uv_snapshot!(context.filters(), context.sync().arg("--package").arg("child").arg("--extra").arg("async"), @r###" + uv_snapshot!(context.filters(), context.sync().arg("--package").arg("child").arg("--extra").arg("async"), @r" success: true exit_code: 0 ----- stdout ----- @@ -3472,10 +3704,10 @@ fn sync_non_existent_extra_non_project_workspace() -> Result<()> { ----- stderr ----- Resolved 5 packages in [TIME] Audited 3 packages in [TIME] - "###); + "); // Syncing from an unrelated child should fail. - uv_snapshot!(context.filters(), context.sync().arg("--package").arg("other").arg("--extra").arg("async"), @r###" + uv_snapshot!(context.filters(), context.sync().arg("--package").arg("other").arg("--extra").arg("async"), @r" success: false exit_code: 2 ----- stdout ----- @@ -3483,7 +3715,7 @@ fn sync_non_existent_extra_non_project_workspace() -> Result<()> { ----- stderr ----- Resolved 5 packages in [TIME] error: Extra `async` is not defined in the project's `optional-dependencies` table - "###); + "); Ok(()) } @@ -3551,7 +3783,7 @@ fn no_install_project() -> Result<()> { context.lock().assert().success(); // Running with `--no-install-project` should install `anyio`, but not `project`. - uv_snapshot!(context.filters(), context.sync().arg("--no-install-project"), @r###" + uv_snapshot!(context.filters(), context.sync().arg("--no-install-project"), @r" success: true exit_code: 0 ----- stdout ----- @@ -3563,7 +3795,7 @@ fn no_install_project() -> Result<()> { + anyio==3.7.0 + idna==3.6 + sniffio==1.3.1 - "###); + "); // However, we do require the `pyproject.toml`. fs_err::remove_file(pyproject_toml)?; @@ -3633,7 +3865,7 @@ fn no_install_workspace() -> Result<()> { // Running with `--no-install-workspace` should install `anyio` and `iniconfig`, but not // `project` or `child`. - uv_snapshot!(context.filters(), context.sync().arg("--no-install-workspace"), @r###" + uv_snapshot!(context.filters(), context.sync().arg("--no-install-workspace"), @r" success: true exit_code: 0 ----- stdout ----- @@ -3646,7 +3878,7 @@ fn no_install_workspace() -> Result<()> { + idna==3.6 + iniconfig==2.0.0 + sniffio==1.3.1 - "###); + "); // Remove the virtual environment. fs_err::remove_dir_all(&context.venv)?; @@ -3654,7 +3886,7 @@ fn no_install_workspace() -> Result<()> { // We don't require the `pyproject.toml` for non-root members, if `--frozen` is provided. fs_err::remove_file(child.join("pyproject.toml"))?; - uv_snapshot!(context.filters(), context.sync().arg("--no-install-workspace").arg("--frozen"), @r###" + uv_snapshot!(context.filters(), context.sync().arg("--no-install-workspace").arg("--frozen"), @r" success: true exit_code: 0 ----- stdout ----- @@ -3667,10 +3899,10 @@ fn no_install_workspace() -> Result<()> { + idna==3.6 + iniconfig==2.0.0 + sniffio==1.3.1 - "###); + "); // Even if `--package` is used. - uv_snapshot!(context.filters(), context.sync().arg("--package").arg("child").arg("--no-install-workspace").arg("--frozen"), @r###" + uv_snapshot!(context.filters(), context.sync().arg("--package").arg("child").arg("--no-install-workspace").arg("--frozen"), @r" success: true exit_code: 0 ----- stdout ----- @@ -3680,20 +3912,20 @@ fn no_install_workspace() -> Result<()> { - anyio==3.7.0 - idna==3.6 - sniffio==1.3.1 - "###); + "); // Unless the package doesn't exist. - uv_snapshot!(context.filters(), context.sync().arg("--package").arg("fake").arg("--no-install-workspace").arg("--frozen"), @r###" + uv_snapshot!(context.filters(), context.sync().arg("--package").arg("fake").arg("--no-install-workspace").arg("--frozen"), @r" success: false exit_code: 2 ----- stdout ----- ----- stderr ----- error: Could not find root package `fake` - "###); + "); // Even if `--all-packages` is used. - uv_snapshot!(context.filters(), context.sync().arg("--all-packages").arg("--no-install-workspace").arg("--frozen"), @r###" + uv_snapshot!(context.filters(), context.sync().arg("--all-packages").arg("--no-install-workspace").arg("--frozen"), @r" success: true exit_code: 0 ----- stdout ----- @@ -3703,7 +3935,7 @@ fn no_install_workspace() -> Result<()> { + anyio==3.7.0 + idna==3.6 + sniffio==1.3.1 - "###); + "); // But we do require the root `pyproject.toml`. fs_err::remove_file(context.temp_dir.join("pyproject.toml"))?; @@ -3744,7 +3976,7 @@ fn no_install_package() -> Result<()> { context.lock().assert().success(); // Running with `--no-install-package anyio` should skip anyio but include everything else - uv_snapshot!(context.filters(), context.sync().arg("--no-install-package").arg("anyio"), @r###" + uv_snapshot!(context.filters(), context.sync().arg("--no-install-package").arg("anyio"), @r" success: true exit_code: 0 ----- stdout ----- @@ -3756,11 +3988,11 @@ fn no_install_package() -> Result<()> { + idna==3.6 + project==0.1.0 (from file://[TEMP_DIR]/) + sniffio==1.3.1 - "###); + "); // Running with `--no-install-package project` should skip the project itself (not as a special // case, that's just the name of the project) - uv_snapshot!(context.filters(), context.sync().arg("--no-install-package").arg("project"), @r###" + uv_snapshot!(context.filters(), context.sync().arg("--no-install-package").arg("project"), @r" success: true exit_code: 0 ----- stdout ----- @@ -3772,7 +4004,7 @@ fn no_install_package() -> Result<()> { Installed 1 package in [TIME] + anyio==3.7.0 - project==0.1.0 (from file://[TEMP_DIR]/) - "###); + "); Ok(()) } @@ -3801,7 +4033,7 @@ fn no_install_project_no_build() -> Result<()> { context.lock().assert().success(); // `--no-build` should raise an error, since we try to install the project. - uv_snapshot!(context.filters(), context.sync().arg("--no-build"), @r###" + uv_snapshot!(context.filters(), context.sync().arg("--no-build"), @r" success: false exit_code: 2 ----- stdout ----- @@ -3809,11 +4041,11 @@ fn no_install_project_no_build() -> Result<()> { ----- stderr ----- Resolved 4 packages in [TIME] error: Distribution `project==0.1.0 @ editable+.` can't be installed because it is marked as `--no-build` but has no binary distribution - "###); + "); // But it's fine to combine `--no-install-project` with `--no-build`. We shouldn't error, since // we aren't building the project. - uv_snapshot!(context.filters(), context.sync().arg("--no-install-project").arg("--no-build").arg("--locked"), @r###" + uv_snapshot!(context.filters(), context.sync().arg("--no-install-project").arg("--no-build").arg("--locked"), @r" success: true exit_code: 0 ----- stdout ----- @@ -3825,7 +4057,7 @@ fn no_install_project_no_build() -> Result<()> { + anyio==3.7.0 + idna==3.6 + sniffio==1.3.1 - "###); + "); Ok(()) } @@ -3978,7 +4210,7 @@ fn convert_to_virtual() -> Result<()> { )?; // Running `uv sync` should install the project itself. - uv_snapshot!(context.filters(), context.sync(), @r###" + uv_snapshot!(context.filters(), context.sync(), @r" success: true exit_code: 0 ----- stdout ----- @@ -3989,7 +4221,7 @@ fn convert_to_virtual() -> Result<()> { Installed 2 packages in [TIME] + iniconfig==2.0.0 + project==0.1.0 (from file://[TEMP_DIR]/) - "###); + "); let lock = context.read("uv.lock"); @@ -4040,7 +4272,7 @@ fn convert_to_virtual() -> Result<()> { )?; // Running `uv sync` should remove the project itself. - uv_snapshot!(context.filters(), context.sync(), @r###" + uv_snapshot!(context.filters(), context.sync(), @r" success: true exit_code: 0 ----- stdout ----- @@ -4049,7 +4281,7 @@ fn convert_to_virtual() -> Result<()> { Resolved 2 packages in [TIME] Uninstalled 1 package in [TIME] - project==0.1.0 (from file://[TEMP_DIR]/) - "###); + "); let lock = context.read("uv.lock"); @@ -4108,7 +4340,7 @@ fn convert_to_package() -> Result<()> { )?; // Running `uv sync` should not install the project itself. - uv_snapshot!(context.filters(), context.sync(), @r###" + uv_snapshot!(context.filters(), context.sync(), @r" success: true exit_code: 0 ----- stdout ----- @@ -4118,7 +4350,7 @@ fn convert_to_package() -> Result<()> { Prepared 1 package in [TIME] Installed 1 package in [TIME] + iniconfig==2.0.0 - "###); + "); let lock = context.read("uv.lock"); @@ -4173,7 +4405,7 @@ fn convert_to_package() -> Result<()> { )?; // Running `uv sync` should install the project itself. - uv_snapshot!(context.filters(), context.sync(), @r###" + uv_snapshot!(context.filters(), context.sync(), @r" success: true exit_code: 0 ----- stdout ----- @@ -4183,7 +4415,7 @@ fn convert_to_package() -> Result<()> { Prepared 1 package in [TIME] Installed 1 package in [TIME] + project==0.1.0 (from file://[TEMP_DIR]/) - "###); + "); let lock = context.read("uv.lock"); @@ -4243,7 +4475,7 @@ fn sync_custom_environment_path() -> Result<()> { )?; // Running `uv sync` should create `.venv` by default - uv_snapshot!(context.filters(), context.sync(), @r###" + uv_snapshot!(context.filters(), context.sync(), @r" success: true exit_code: 0 ----- stdout ----- @@ -4255,7 +4487,7 @@ fn sync_custom_environment_path() -> Result<()> { Prepared 1 package in [TIME] Installed 1 package in [TIME] + iniconfig==2.0.0 - "###); + "); context .temp_dir @@ -4263,7 +4495,7 @@ fn sync_custom_environment_path() -> Result<()> { .assert(predicate::path::is_dir()); // Running `uv sync` should create `foo` in the project directory when customized - uv_snapshot!(context.filters(), context.sync().env(EnvVars::UV_PROJECT_ENVIRONMENT, "foo"), @r###" + uv_snapshot!(context.filters(), context.sync().env(EnvVars::UV_PROJECT_ENVIRONMENT, "foo"), @r" success: true exit_code: 0 ----- stdout ----- @@ -4274,7 +4506,7 @@ fn sync_custom_environment_path() -> Result<()> { Resolved 2 packages in [TIME] Installed 1 package in [TIME] + iniconfig==2.0.0 - "###); + "); context .temp_dir @@ -4288,7 +4520,7 @@ fn sync_custom_environment_path() -> Result<()> { .assert(predicate::path::is_dir()); // An absolute path can be provided - uv_snapshot!(context.filters(), context.sync().env(EnvVars::UV_PROJECT_ENVIRONMENT, "foobar/.venv"), @r###" + uv_snapshot!(context.filters(), context.sync().env(EnvVars::UV_PROJECT_ENVIRONMENT, "foobar/.venv"), @r" success: true exit_code: 0 ----- stdout ----- @@ -4299,7 +4531,7 @@ fn sync_custom_environment_path() -> Result<()> { Resolved 2 packages in [TIME] Installed 1 package in [TIME] + iniconfig==2.0.0 - "###); + "); context .temp_dir @@ -4313,7 +4545,7 @@ fn sync_custom_environment_path() -> Result<()> { .assert(predicate::path::is_dir()); // An absolute path can be provided - uv_snapshot!(context.filters(), context.sync().env(EnvVars::UV_PROJECT_ENVIRONMENT, context.temp_dir.join("bar")), @r###" + uv_snapshot!(context.filters(), context.sync().env(EnvVars::UV_PROJECT_ENVIRONMENT, context.temp_dir.join("bar")), @r" success: true exit_code: 0 ----- stdout ----- @@ -4324,7 +4556,7 @@ fn sync_custom_environment_path() -> Result<()> { Resolved 2 packages in [TIME] Installed 1 package in [TIME] + iniconfig==2.0.0 - "###); + "); context .temp_dir @@ -4334,7 +4566,7 @@ fn sync_custom_environment_path() -> Result<()> { // And, it can be outside the project let tempdir = tempdir_in(TestContext::test_bucket_dir())?; context = context.with_filtered_path(tempdir.path(), "OTHER_TEMPDIR"); - uv_snapshot!(context.filters(), context.sync().env(EnvVars::UV_PROJECT_ENVIRONMENT, tempdir.path().join(".venv")), @r###" + uv_snapshot!(context.filters(), context.sync().env(EnvVars::UV_PROJECT_ENVIRONMENT, tempdir.path().join(".venv")), @r" success: true exit_code: 0 ----- stdout ----- @@ -4345,7 +4577,7 @@ fn sync_custom_environment_path() -> Result<()> { Resolved 2 packages in [TIME] Installed 1 package in [TIME] + iniconfig==2.0.0 - "###); + "); ChildPath::new(tempdir.path()) .child(".venv") @@ -4382,7 +4614,7 @@ fn sync_custom_environment_path() -> Result<()> { fs_err::write(context.temp_dir.join("foo").join("file"), b"")?; // We can delete and use it - uv_snapshot!(context.filters(), context.sync().env(EnvVars::UV_PROJECT_ENVIRONMENT, "foo"), @r###" + uv_snapshot!(context.filters(), context.sync().env(EnvVars::UV_PROJECT_ENVIRONMENT, "foo"), @r" success: true exit_code: 0 ----- stdout ----- @@ -4394,7 +4626,7 @@ fn sync_custom_environment_path() -> Result<()> { Resolved 2 packages in [TIME] Installed 1 package in [TIME] + iniconfig==2.0.0 - "###); + "); Ok(()) } @@ -4417,7 +4649,7 @@ fn sync_active_project_environment() -> Result<()> { )?; // Running `uv sync` with `VIRTUAL_ENV` should warn - uv_snapshot!(context.filters(), context.sync().env(EnvVars::VIRTUAL_ENV, "foo"), @r###" + uv_snapshot!(context.filters(), context.sync().env(EnvVars::VIRTUAL_ENV, "foo"), @r" success: true exit_code: 0 ----- stdout ----- @@ -4430,7 +4662,7 @@ fn sync_active_project_environment() -> Result<()> { Prepared 1 package in [TIME] Installed 1 package in [TIME] + iniconfig==2.0.0 - "###); + "); context .temp_dir @@ -4443,7 +4675,7 @@ fn sync_active_project_environment() -> Result<()> { .assert(predicate::path::missing()); // Using `--active` should create the environment - uv_snapshot!(context.filters(), context.sync().env(EnvVars::VIRTUAL_ENV, "foo").arg("--active"), @r###" + uv_snapshot!(context.filters(), context.sync().env(EnvVars::VIRTUAL_ENV, "foo").arg("--active"), @r" success: true exit_code: 0 ----- stdout ----- @@ -4454,7 +4686,7 @@ fn sync_active_project_environment() -> Result<()> { Resolved 2 packages in [TIME] Installed 1 package in [TIME] + iniconfig==2.0.0 - "###); + "); context .temp_dir @@ -4462,7 +4694,7 @@ fn sync_active_project_environment() -> Result<()> { .assert(predicate::path::is_dir()); // A subsequent sync will re-use the environment - uv_snapshot!(context.filters(), context.sync().env(EnvVars::VIRTUAL_ENV, "foo").arg("--active"), @r###" + uv_snapshot!(context.filters(), context.sync().env(EnvVars::VIRTUAL_ENV, "foo").arg("--active"), @r" success: true exit_code: 0 ----- stdout ----- @@ -4470,13 +4702,13 @@ fn sync_active_project_environment() -> Result<()> { ----- stderr ----- Resolved 2 packages in [TIME] Audited 1 package in [TIME] - "###); + "); // Setting both the `VIRTUAL_ENV` and `UV_PROJECT_ENVIRONMENT` is fine if they agree uv_snapshot!(context.filters(), context.sync() .arg("--active") .env(EnvVars::VIRTUAL_ENV, "foo") - .env(EnvVars::UV_PROJECT_ENVIRONMENT, "foo"), @r###" + .env(EnvVars::UV_PROJECT_ENVIRONMENT, "foo"), @r" success: true exit_code: 0 ----- stdout ----- @@ -4484,13 +4716,13 @@ fn sync_active_project_environment() -> Result<()> { ----- stderr ----- Resolved 2 packages in [TIME] Audited 1 package in [TIME] - "###); + "); // If they disagree, we use `VIRTUAL_ENV` because of `--active` uv_snapshot!(context.filters(), context.sync() .arg("--active") .env(EnvVars::VIRTUAL_ENV, "foo") - .env(EnvVars::UV_PROJECT_ENVIRONMENT, "bar"), @r###" + .env(EnvVars::UV_PROJECT_ENVIRONMENT, "bar"), @r" success: true exit_code: 0 ----- stdout ----- @@ -4498,7 +4730,7 @@ fn sync_active_project_environment() -> Result<()> { ----- stderr ----- Resolved 2 packages in [TIME] Audited 1 package in [TIME] - "###); + "); context .temp_dir @@ -4507,7 +4739,7 @@ fn sync_active_project_environment() -> Result<()> { // Requesting another Python version will invalidate the environment uv_snapshot!(context.filters(), context.sync() - .env(EnvVars::VIRTUAL_ENV, "foo").arg("--active").arg("-p").arg("3.12"), @r###" + .env(EnvVars::VIRTUAL_ENV, "foo").arg("--active").arg("-p").arg("3.12"), @r" success: true exit_code: 0 ----- stdout ----- @@ -4519,7 +4751,7 @@ fn sync_active_project_environment() -> Result<()> { Resolved 2 packages in [TIME] Installed 1 package in [TIME] + iniconfig==2.0.0 - "###); + "); Ok(()) } @@ -4553,7 +4785,7 @@ fn sync_active_script_environment() -> Result<()> { .collect::>(); // Running `uv sync --script` with `VIRTUAL_ENV` should warn - uv_snapshot!(&filters, context.sync().arg("--script").arg("script.py").env(EnvVars::VIRTUAL_ENV, "foo"), @r###" + uv_snapshot!(&filters, context.sync().arg("--script").arg("script.py").env(EnvVars::VIRTUAL_ENV, "foo"), @r" success: true exit_code: 0 ----- stdout ----- @@ -4567,7 +4799,7 @@ fn sync_active_script_environment() -> Result<()> { + anyio==4.3.0 + idna==3.6 + sniffio==1.3.1 - "###); + "); context .temp_dir @@ -4575,7 +4807,7 @@ fn sync_active_script_environment() -> Result<()> { .assert(predicate::path::missing()); // Using `--active` should create the environment - uv_snapshot!(&filters, context.sync().arg("--script").arg("script.py").env(EnvVars::VIRTUAL_ENV, "foo").arg("--active"), @r###" + uv_snapshot!(&filters, context.sync().arg("--script").arg("script.py").env(EnvVars::VIRTUAL_ENV, "foo").arg("--active"), @r" success: true exit_code: 0 ----- stdout ----- @@ -4587,7 +4819,7 @@ fn sync_active_script_environment() -> Result<()> { + anyio==4.3.0 + idna==3.6 + sniffio==1.3.1 - "###); + "); context .temp_dir @@ -4595,7 +4827,7 @@ fn sync_active_script_environment() -> Result<()> { .assert(predicate::path::is_dir()); // A subsequent sync will re-use the environment - uv_snapshot!(&filters, context.sync().arg("--script").arg("script.py").env(EnvVars::VIRTUAL_ENV, "foo").arg("--active"), @r###" + uv_snapshot!(&filters, context.sync().arg("--script").arg("script.py").env(EnvVars::VIRTUAL_ENV, "foo").arg("--active"), @r" success: true exit_code: 0 ----- stdout ----- @@ -4604,7 +4836,7 @@ fn sync_active_script_environment() -> Result<()> { Using script environment at: foo Resolved 3 packages in [TIME] Audited 3 packages in [TIME] - "###); + "); // Requesting another Python version will invalidate the environment uv_snapshot!(&filters, context.sync() @@ -4613,19 +4845,198 @@ fn sync_active_script_environment() -> Result<()> { .env(EnvVars::VIRTUAL_ENV, "foo") .arg("--active") .arg("-p") - .arg("3.12"), @r###" + .arg("3.12"), @r" success: true exit_code: 0 ----- stdout ----- ----- stderr ----- - Recreating script environment at: foo + Updating script environment at: foo Resolved 3 packages in [TIME] Installed 3 packages in [TIME] + anyio==4.3.0 + idna==3.6 + sniffio==1.3.1 - "###); + "); + + Ok(()) +} + +#[test] +fn sync_active_script_environment_json() -> Result<()> { + let context = TestContext::new_with_versions(&["3.11", "3.12"]) + .with_filtered_virtualenv_bin() + .with_filtered_python_names(); + + let script = context.temp_dir.child("script.py"); + script.write_str(indoc! { r#" + # /// script + # requires-python = ">=3.11" + # dependencies = [ + # "anyio", + # ] + # /// + + import anyio + "# + })?; + + let filters = context + .filters() + .into_iter() + .chain(vec![ + ( + r"environments-v2/script-[a-z0-9]+", + "environments-v2/script-[HASH]", + ), + ("bin/python3", "[PYTHON]"), + ("Scripts/python.exe", "[PYTHON]"), + ]) + .collect::>(); + + // Running `uv sync --script` with `VIRTUAL_ENV` should warn + uv_snapshot!(&filters, context.sync() + .arg("--script").arg("script.py") + .arg("--output-format").arg("json") + .env(EnvVars::VIRTUAL_ENV, "foo"), @r#" + success: true + exit_code: 0 + ----- stdout ----- + { + "schema": { + "version": "preview" + }, + "target": "script", + "script": { + "path": "[TEMP_DIR]/script.py" + }, + "sync": { + "environment": { + "path": "[CACHE_DIR]/environments-v2/script-[HASH]", + "python": { + "path": "[CACHE_DIR]/environments-v2/script-[HASH]/[BIN]/python", + "version": "3.11.[X]", + "implementation": "cpython" + } + }, + "action": "create" + }, + "lock": null, + "dry_run": false + } + + ----- stderr ----- + warning: `VIRTUAL_ENV=foo` does not match the script environment path `[CACHE_DIR]/environments-v2/script-[HASH]` and will be ignored; use `--active` to target the active environment instead + Resolved 3 packages in [TIME] + Prepared 3 packages in [TIME] + Installed 3 packages in [TIME] + + anyio==4.3.0 + + idna==3.6 + + sniffio==1.3.1 + "#); + + context + .temp_dir + .child("foo") + .assert(predicate::path::missing()); + + // Using `--active` should create the environment + uv_snapshot!(&filters, context.sync() + .arg("--script").arg("script.py") + .arg("--output-format").arg("json") + .env(EnvVars::VIRTUAL_ENV, "foo").arg("--active"), @r#" + success: true + exit_code: 0 + ----- stdout ----- + { + "schema": { + "version": "preview" + }, + "target": "script", + "script": { + "path": "[TEMP_DIR]/script.py" + }, + "sync": { + "environment": { + "path": "[TEMP_DIR]/foo", + "python": { + "path": "[TEMP_DIR]/foo/[BIN]/python", + "version": "3.11.[X]", + "implementation": "cpython" + } + }, + "action": "create" + }, + "lock": null, + "dry_run": false + } + + ----- stderr ----- + Resolved 3 packages in [TIME] + Installed 3 packages in [TIME] + + anyio==4.3.0 + + idna==3.6 + + sniffio==1.3.1 + "#); + + context + .temp_dir + .child("foo") + .assert(predicate::path::is_dir()); + + // A subsequent sync will re-use the environment + uv_snapshot!(&filters, context.sync().arg("--script").arg("script.py").env(EnvVars::VIRTUAL_ENV, "foo").arg("--active"), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Using script environment at: foo + Resolved 3 packages in [TIME] + Audited 3 packages in [TIME] + "); + + // Requesting another Python version will invalidate the environment + uv_snapshot!(&filters, context.sync() + .arg("--script").arg("script.py") + .arg("--output-format").arg("json") + .env(EnvVars::VIRTUAL_ENV, "foo") + .arg("--active") + .arg("-p") + .arg("3.12"), @r#" + success: true + exit_code: 0 + ----- stdout ----- + { + "schema": { + "version": "preview" + }, + "target": "script", + "script": { + "path": "[TEMP_DIR]/script.py" + }, + "sync": { + "environment": { + "path": "[TEMP_DIR]/foo", + "python": { + "path": "[TEMP_DIR]/foo/[BIN]/python", + "version": "3.12.[X]", + "implementation": "cpython" + } + }, + "action": "update" + }, + "lock": null, + "dry_run": false + } + + ----- stderr ----- + Resolved 3 packages in [TIME] + Installed 3 packages in [TIME] + + anyio==4.3.0 + + idna==3.6 + + sniffio==1.3.1 + "#); Ok(()) } @@ -4650,7 +5061,7 @@ fn sync_workspace_custom_environment_path() -> Result<()> { context.init().arg("child").assert().success(); // Running `uv sync` should create `.venv` in the workspace root - uv_snapshot!(context.filters(), context.sync(), @r###" + uv_snapshot!(context.filters(), context.sync(), @r" success: true exit_code: 0 ----- stdout ----- @@ -4660,7 +5071,7 @@ fn sync_workspace_custom_environment_path() -> Result<()> { Prepared 1 package in [TIME] Installed 1 package in [TIME] + iniconfig==2.0.0 - "###); + "); context .temp_dir @@ -4668,7 +5079,7 @@ fn sync_workspace_custom_environment_path() -> Result<()> { .assert(predicate::path::is_dir()); // Similarly, `uv sync` from the child project uses `.venv` in the workspace root - uv_snapshot!(context.filters(), context.sync().current_dir(context.temp_dir.join("child")), @r###" + uv_snapshot!(context.filters(), context.sync().current_dir(context.temp_dir.join("child")), @r" success: true exit_code: 0 ----- stdout ----- @@ -4677,7 +5088,7 @@ fn sync_workspace_custom_environment_path() -> Result<()> { Resolved 3 packages in [TIME] Uninstalled 1 package in [TIME] - iniconfig==2.0.0 - "###); + "); context .temp_dir @@ -4691,7 +5102,7 @@ fn sync_workspace_custom_environment_path() -> Result<()> { .assert(predicate::path::missing()); // Running `uv sync` should create `foo` in the workspace root when customized - uv_snapshot!(context.filters(), context.sync().env(EnvVars::UV_PROJECT_ENVIRONMENT, "foo"), @r###" + uv_snapshot!(context.filters(), context.sync().env(EnvVars::UV_PROJECT_ENVIRONMENT, "foo"), @r" success: true exit_code: 0 ----- stdout ----- @@ -4702,7 +5113,7 @@ fn sync_workspace_custom_environment_path() -> Result<()> { Resolved 3 packages in [TIME] Installed 1 package in [TIME] + iniconfig==2.0.0 - "###); + "); context .temp_dir @@ -4716,7 +5127,7 @@ fn sync_workspace_custom_environment_path() -> Result<()> { .assert(predicate::path::is_dir()); // Similarly, `uv sync` from the child project uses `foo` relative to the workspace root - uv_snapshot!(context.filters(), context.sync().env(EnvVars::UV_PROJECT_ENVIRONMENT, "foo").current_dir(context.temp_dir.join("child")), @r###" + uv_snapshot!(context.filters(), context.sync().env(EnvVars::UV_PROJECT_ENVIRONMENT, "foo").current_dir(context.temp_dir.join("child")), @r" success: true exit_code: 0 ----- stdout ----- @@ -4725,7 +5136,7 @@ fn sync_workspace_custom_environment_path() -> Result<()> { Resolved 3 packages in [TIME] Uninstalled 1 package in [TIME] - iniconfig==2.0.0 - "###); + "); context .temp_dir @@ -4739,7 +5150,7 @@ fn sync_workspace_custom_environment_path() -> Result<()> { .assert(predicate::path::missing()); // And, `uv sync --package child` uses `foo` relative to the workspace root - uv_snapshot!(context.filters(), context.sync().arg("--package").arg("child").env(EnvVars::UV_PROJECT_ENVIRONMENT, "foo"), @r###" + uv_snapshot!(context.filters(), context.sync().arg("--package").arg("child").env(EnvVars::UV_PROJECT_ENVIRONMENT, "foo"), @r" success: true exit_code: 0 ----- stdout ----- @@ -4747,7 +5158,7 @@ fn sync_workspace_custom_environment_path() -> Result<()> { ----- stderr ----- Resolved 3 packages in [TIME] Audited in [TIME] - "###); + "); context .temp_dir @@ -4782,7 +5193,7 @@ fn sync_empty_virtual_environment() -> Result<()> { )?; // Running `uv sync` should work - uv_snapshot!(context.filters(), context.sync(), @r###" + uv_snapshot!(context.filters(), context.sync(), @r" success: true exit_code: 0 ----- stdout ----- @@ -4794,7 +5205,7 @@ fn sync_empty_virtual_environment() -> Result<()> { Prepared 1 package in [TIME] Installed 1 package in [TIME] + iniconfig==2.0.0 - "###); + "); Ok(()) } @@ -4816,7 +5227,7 @@ fn sync_legacy_non_project_warning() -> Result<()> { )?; // We should not warn if it matches the project environment - uv_snapshot!(context.filters(), context.sync().env(EnvVars::VIRTUAL_ENV, context.temp_dir.join(".venv")), @r###" + uv_snapshot!(context.filters(), context.sync().env(EnvVars::VIRTUAL_ENV, context.temp_dir.join(".venv")), @r" success: true exit_code: 0 ----- stdout ----- @@ -4826,10 +5237,10 @@ fn sync_legacy_non_project_warning() -> Result<()> { Prepared 1 package in [TIME] Installed 1 package in [TIME] + iniconfig==2.0.0 - "###); + "); // Including if it's a relative path that matches - uv_snapshot!(context.filters(), context.sync().env(EnvVars::VIRTUAL_ENV, ".venv"), @r###" + uv_snapshot!(context.filters(), context.sync().env(EnvVars::VIRTUAL_ENV, ".venv"), @r" success: true exit_code: 0 ----- stdout ----- @@ -4837,7 +5248,7 @@ fn sync_legacy_non_project_warning() -> Result<()> { ----- stderr ----- Resolved 2 packages in [TIME] Audited 1 package in [TIME] - "###); + "); // Or, if it's a link that resolves to the same path #[cfg(unix)] @@ -4847,7 +5258,7 @@ fn sync_legacy_non_project_warning() -> Result<()> { let link = context.temp_dir.join("link"); symlink(context.temp_dir.join(".venv"), &link)?; - uv_snapshot!(context.filters(), context.sync().env(EnvVars::VIRTUAL_ENV, link), @r###" + uv_snapshot!(context.filters(), context.sync().env(EnvVars::VIRTUAL_ENV, link), @r" success: true exit_code: 0 ----- stdout ----- @@ -4855,11 +5266,11 @@ fn sync_legacy_non_project_warning() -> Result<()> { ----- stderr ----- Resolved 2 packages in [TIME] Audited 1 package in [TIME] - "###); + "); } // But we should warn if it's a different path - uv_snapshot!(context.filters(), context.sync().env(EnvVars::VIRTUAL_ENV, "foo"), @r###" + uv_snapshot!(context.filters(), context.sync().env(EnvVars::VIRTUAL_ENV, "foo"), @r" success: true exit_code: 0 ----- stdout ----- @@ -4868,10 +5279,10 @@ fn sync_legacy_non_project_warning() -> Result<()> { warning: `VIRTUAL_ENV=foo` does not match the project environment path `.venv` and will be ignored; use `--active` to target the active environment instead Resolved 2 packages in [TIME] Audited 1 package in [TIME] - "###); + "); // Including absolute paths - uv_snapshot!(context.filters(), context.sync().env(EnvVars::VIRTUAL_ENV, context.temp_dir.join("foo")), @r###" + uv_snapshot!(context.filters(), context.sync().env(EnvVars::VIRTUAL_ENV, context.temp_dir.join("foo")), @r" success: true exit_code: 0 ----- stdout ----- @@ -4880,10 +5291,10 @@ fn sync_legacy_non_project_warning() -> Result<()> { warning: `VIRTUAL_ENV=foo` does not match the project environment path `.venv` and will be ignored; use `--active` to target the active environment instead Resolved 2 packages in [TIME] Audited 1 package in [TIME] - "###); + "); // We should not warn if the project environment has been customized and matches - uv_snapshot!(context.filters(), context.sync().env(EnvVars::VIRTUAL_ENV, "foo").env(EnvVars::UV_PROJECT_ENVIRONMENT, "foo"), @r###" + uv_snapshot!(context.filters(), context.sync().env(EnvVars::VIRTUAL_ENV, "foo").env(EnvVars::UV_PROJECT_ENVIRONMENT, "foo"), @r" success: true exit_code: 0 ----- stdout ----- @@ -4894,10 +5305,10 @@ fn sync_legacy_non_project_warning() -> Result<()> { Resolved 2 packages in [TIME] Installed 1 package in [TIME] + iniconfig==2.0.0 - "###); + "); // But we should warn if they don't match still - uv_snapshot!(context.filters(), context.sync().env(EnvVars::VIRTUAL_ENV, "foo").env(EnvVars::UV_PROJECT_ENVIRONMENT, "bar"), @r###" + uv_snapshot!(context.filters(), context.sync().env(EnvVars::VIRTUAL_ENV, "foo").env(EnvVars::UV_PROJECT_ENVIRONMENT, "bar"), @r" success: true exit_code: 0 ----- stdout ----- @@ -4909,14 +5320,14 @@ fn sync_legacy_non_project_warning() -> Result<()> { Resolved 2 packages in [TIME] Installed 1 package in [TIME] + iniconfig==2.0.0 - "###); + "); let child = context.temp_dir.child("child"); child.create_dir_all()?; // And `VIRTUAL_ENV` is resolved relative to the project root so with relative paths we should // warn from a child too - uv_snapshot!(context.filters(), context.sync().env(EnvVars::VIRTUAL_ENV, "foo").env(EnvVars::UV_PROJECT_ENVIRONMENT, "foo").current_dir(&child), @r###" + uv_snapshot!(context.filters(), context.sync().env(EnvVars::VIRTUAL_ENV, "foo").env(EnvVars::UV_PROJECT_ENVIRONMENT, "foo").current_dir(&child), @r" success: true exit_code: 0 ----- stdout ----- @@ -4925,10 +5336,10 @@ fn sync_legacy_non_project_warning() -> Result<()> { warning: `VIRTUAL_ENV=foo` does not match the project environment path `[TEMP_DIR]/foo` and will be ignored; use `--active` to target the active environment instead Resolved 2 packages in [TIME] Audited 1 package in [TIME] - "###); + "); // But, a matching absolute path shouldn't warn - uv_snapshot!(context.filters(), context.sync().env(EnvVars::VIRTUAL_ENV, context.temp_dir.join("foo")).env(EnvVars::UV_PROJECT_ENVIRONMENT, "foo").current_dir(&child), @r###" + uv_snapshot!(context.filters(), context.sync().env(EnvVars::VIRTUAL_ENV, context.temp_dir.join("foo")).env(EnvVars::UV_PROJECT_ENVIRONMENT, "foo").current_dir(&child), @r" success: true exit_code: 0 ----- stdout ----- @@ -4936,7 +5347,7 @@ fn sync_legacy_non_project_warning() -> Result<()> { ----- stderr ----- Resolved 2 packages in [TIME] Audited 1 package in [TIME] - "###); + "); Ok(()) } @@ -4956,7 +5367,7 @@ fn sync_update_project() -> Result<()> { "#, )?; - uv_snapshot!(context.filters(), context.sync(), @r###" + uv_snapshot!(context.filters(), context.sync(), @r" success: true exit_code: 0 ----- stdout ----- @@ -4968,7 +5379,7 @@ fn sync_update_project() -> Result<()> { Prepared 1 package in [TIME] Installed 1 package in [TIME] + iniconfig==2.0.0 - "###); + "); // Bump the project version. pyproject_toml.write_str( @@ -4985,7 +5396,7 @@ fn sync_update_project() -> Result<()> { "#, )?; - uv_snapshot!(context.filters(), context.sync(), @r###" + uv_snapshot!(context.filters(), context.sync(), @r" success: true exit_code: 0 ----- stdout ----- @@ -4995,7 +5406,7 @@ fn sync_update_project() -> Result<()> { Prepared 1 package in [TIME] Installed 1 package in [TIME] + my-project==0.2.0 (from file://[TEMP_DIR]/) - "###); + "); Ok(()) } @@ -5016,7 +5427,7 @@ fn sync_environment_prompt() -> Result<()> { )?; // Running `uv sync` should create `.venv` - uv_snapshot!(context.filters(), context.sync(), @r###" + uv_snapshot!(context.filters(), context.sync(), @r" success: true exit_code: 0 ----- stdout ----- @@ -5028,7 +5439,7 @@ fn sync_environment_prompt() -> Result<()> { Prepared 1 package in [TIME] Installed 1 package in [TIME] + iniconfig==2.0.0 - "###); + "); // The `pyvenv.cfg` should contain the prompt matching the project name let pyvenv_cfg = context.read(".venv/pyvenv.cfg"); @@ -5055,7 +5466,7 @@ fn no_binary() -> Result<()> { context.lock().assert().success(); - uv_snapshot!(context.filters(), context.sync().arg("--no-binary-package").arg("iniconfig"), @r###" + uv_snapshot!(context.filters(), context.sync().arg("--no-binary-package").arg("iniconfig"), @r" success: true exit_code: 0 ----- stdout ----- @@ -5065,11 +5476,11 @@ fn no_binary() -> Result<()> { Prepared 1 package in [TIME] Installed 1 package in [TIME] + iniconfig==2.0.0 - "###); + "); assert!(context.temp_dir.child("uv.lock").exists()); - uv_snapshot!(context.filters(), context.sync().arg("--reinstall").arg("--no-binary"), @r###" + uv_snapshot!(context.filters(), context.sync().arg("--reinstall").arg("--no-binary"), @r" success: true exit_code: 0 ----- stdout ----- @@ -5080,9 +5491,9 @@ fn no_binary() -> Result<()> { Uninstalled 1 package in [TIME] Installed 1 package in [TIME] ~ iniconfig==2.0.0 - "###); + "); - uv_snapshot!(context.filters(), context.sync().arg("--reinstall").env("UV_NO_BINARY_PACKAGE", "iniconfig"), @r###" + uv_snapshot!(context.filters(), context.sync().arg("--reinstall").env("UV_NO_BINARY_PACKAGE", "iniconfig"), @r" success: true exit_code: 0 ----- stdout ----- @@ -5093,9 +5504,9 @@ fn no_binary() -> Result<()> { Uninstalled 1 package in [TIME] Installed 1 package in [TIME] ~ iniconfig==2.0.0 - "###); + "); - uv_snapshot!(context.filters(), context.sync().arg("--reinstall").env("UV_NO_BINARY", "1"), @r###" + uv_snapshot!(context.filters(), context.sync().arg("--reinstall").env("UV_NO_BINARY", "1"), @r" success: true exit_code: 0 ----- stdout ----- @@ -5106,7 +5517,7 @@ fn no_binary() -> Result<()> { Uninstalled 1 package in [TIME] Installed 1 package in [TIME] ~ iniconfig==2.0.0 - "###); + "); uv_snapshot!(context.filters(), context.sync().arg("--reinstall").env("UV_NO_BINARY", "iniconfig"), @r###" success: false @@ -5139,7 +5550,7 @@ fn no_binary_error() -> Result<()> { context.lock().assert().success(); - uv_snapshot!(context.filters(), context.sync().arg("--no-binary-package").arg("odrive"), @r###" + uv_snapshot!(context.filters(), context.sync().arg("--no-binary-package").arg("odrive"), @r" success: false exit_code: 2 ----- stdout ----- @@ -5147,7 +5558,7 @@ fn no_binary_error() -> Result<()> { ----- stderr ----- Resolved 31 packages in [TIME] error: Distribution `odrive==0.6.8 @ registry+https://pypi.org/simple` can't be installed because it is marked as `--no-binary` but has no source distribution - "###); + "); assert!(context.temp_dir.child("uv.lock").exists()); @@ -5171,7 +5582,7 @@ fn no_build() -> Result<()> { context.lock().assert().success(); - uv_snapshot!(context.filters(), context.sync().arg("--no-build-package").arg("iniconfig"), @r###" + uv_snapshot!(context.filters(), context.sync().arg("--no-build-package").arg("iniconfig"), @r" success: true exit_code: 0 ----- stdout ----- @@ -5181,11 +5592,11 @@ fn no_build() -> Result<()> { Prepared 1 package in [TIME] Installed 1 package in [TIME] + iniconfig==2.0.0 - "###); + "); assert!(context.temp_dir.child("uv.lock").exists()); - uv_snapshot!(context.filters(), context.sync().arg("--reinstall").env("UV_NO_BUILD_PACKAGE", "iniconfig"), @r###" + uv_snapshot!(context.filters(), context.sync().arg("--reinstall").env("UV_NO_BUILD_PACKAGE", "iniconfig"), @r" success: true exit_code: 0 ----- stdout ----- @@ -5196,7 +5607,7 @@ fn no_build() -> Result<()> { Uninstalled 1 package in [TIME] Installed 1 package in [TIME] ~ iniconfig==2.0.0 - "###); + "); Ok(()) } @@ -5218,7 +5629,7 @@ fn no_build_error() -> Result<()> { context.lock().assert().success(); - uv_snapshot!(context.filters(), context.sync().arg("--no-build-package").arg("django-allauth"), @r###" + uv_snapshot!(context.filters(), context.sync().arg("--no-build-package").arg("django-allauth"), @r" success: false exit_code: 2 ----- stdout ----- @@ -5226,7 +5637,7 @@ fn no_build_error() -> Result<()> { ----- stderr ----- Resolved 19 packages in [TIME] error: Distribution `django-allauth==0.51.0 @ registry+https://pypi.org/simple` can't be installed because it is marked as `--no-build` but has no binary distribution - "###); + "); uv_snapshot!(context.filters(), context.sync().arg("--no-build"), @r" success: false @@ -5248,7 +5659,7 @@ fn no_build_error() -> Result<()> { error: Distribution `django-allauth==0.51.0 @ registry+https://pypi.org/simple` can't be installed because it is marked as `--no-build` but has no binary distribution "); - uv_snapshot!(context.filters(), context.sync().arg("--reinstall").env("UV_NO_BUILD_PACKAGE", "django-allauth"), @r###" + uv_snapshot!(context.filters(), context.sync().arg("--reinstall").env("UV_NO_BUILD_PACKAGE", "django-allauth"), @r" success: false exit_code: 2 ----- stdout ----- @@ -5256,7 +5667,7 @@ fn no_build_error() -> Result<()> { ----- stderr ----- Resolved 19 packages in [TIME] error: Distribution `django-allauth==0.51.0 @ registry+https://pypi.org/simple` can't be installed because it is marked as `--no-build` but has no binary distribution - "###); + "); uv_snapshot!(context.filters(), context.sync().arg("--reinstall").env("UV_NO_BUILD", "django-allauth"), @r###" success: false @@ -5300,7 +5711,7 @@ fn sync_wheel_url_source_error() -> Result<()> { Resolved 3 packages in [TIME] "###); - uv_snapshot!(context.filters(), context.sync(), @r###" + uv_snapshot!(context.filters(), context.sync(), @r" success: false exit_code: 2 ----- stdout ----- @@ -5310,7 +5721,7 @@ fn sync_wheel_url_source_error() -> Result<()> { error: Distribution `cffi==1.17.1 @ direct+https://files.pythonhosted.org/packages/08/fd/cc2fedbd887223f9f5d170c96e57cbf655df9831a6546c1727ae13fa977a/cffi-1.17.1-cp310-cp310-macosx_11_0_arm64.whl` can't be installed because the binary distribution is incompatible with the current platform hint: You're using CPython 3.12 (`cp312`), but `cffi` (v1.17.1) only has wheels with the following Python ABI tag: `cp310` - "###); + "); Ok(()) } @@ -5351,7 +5762,7 @@ fn sync_wheel_path_source_error() -> Result<()> { Resolved 3 packages in [TIME] "###); - uv_snapshot!(context.filters(), context.sync(), @r###" + uv_snapshot!(context.filters(), context.sync(), @r" success: false exit_code: 2 ----- stdout ----- @@ -5361,7 +5772,7 @@ fn sync_wheel_path_source_error() -> Result<()> { error: Distribution `cffi==1.17.1 @ path+cffi-1.17.1-cp310-cp310-macosx_11_0_arm64.whl` can't be installed because the binary distribution is incompatible with the current platform hint: You're using CPython 3.12 (`cp312`), but `cffi` (v1.17.1) only has wheels with the following Python ABI tag: `cp310` - "###); + "); Ok(()) } @@ -5423,7 +5834,7 @@ fn sync_override_package() -> Result<()> { .touch()?; // Syncing the project should _not_ install `core`. - uv_snapshot!(context.filters(), context.sync(), @r###" + uv_snapshot!(context.filters(), context.sync(), @r" success: true exit_code: 0 ----- stdout ----- @@ -5433,7 +5844,7 @@ fn sync_override_package() -> Result<()> { Prepared 1 package in [TIME] Installed 1 package in [TIME] + project==0.0.0 (from file://[TEMP_DIR]/) - "###); + "); // Mark the source as `package = true`. let pyproject_toml = context.temp_dir.child("pyproject.toml"); @@ -5455,7 +5866,7 @@ fn sync_override_package() -> Result<()> { )?; // Syncing the project _should_ install `core`. - uv_snapshot!(context.filters(), context.sync(), @r###" + uv_snapshot!(context.filters(), context.sync(), @r" success: true exit_code: 0 ----- stdout ----- @@ -5467,7 +5878,7 @@ fn sync_override_package() -> Result<()> { Installed 2 packages in [TIME] + core==0.1.0 (from file://[TEMP_DIR]/core) ~ project==0.0.0 (from file://[TEMP_DIR]/) - "###); + "); // Remove `package = false`. let pyproject_toml = context.temp_dir.child("core").child("pyproject.toml"); @@ -5485,7 +5896,7 @@ fn sync_override_package() -> Result<()> { )?; // Syncing the project _should_ install `core`. - uv_snapshot!(context.filters(), context.sync(), @r###" + uv_snapshot!(context.filters(), context.sync(), @r" success: true exit_code: 0 ----- stdout ----- @@ -5496,7 +5907,7 @@ fn sync_override_package() -> Result<()> { Uninstalled 1 package in [TIME] Installed 1 package in [TIME] ~ core==0.1.0 (from file://[TEMP_DIR]/core) - "###); + "); // Mark the source as `package = false`. let pyproject_toml = context.temp_dir.child("pyproject.toml"); @@ -5518,7 +5929,7 @@ fn sync_override_package() -> Result<()> { )?; // Syncing the project should _not_ install `core`. - uv_snapshot!(context.filters(), context.sync(), @r###" + uv_snapshot!(context.filters(), context.sync(), @r" success: true exit_code: 0 ----- stdout ----- @@ -5530,7 +5941,7 @@ fn sync_override_package() -> Result<()> { Installed 1 package in [TIME] - core==0.1.0 (from file://[TEMP_DIR]/core) ~ project==0.0.0 (from file://[TEMP_DIR]/) - "###); + "); Ok(()) } @@ -5592,7 +6003,7 @@ fn transitive_dev() -> Result<()> { let init = src.child("__init__.py"); init.touch()?; - uv_snapshot!(context.filters(), context.sync().arg("--dev"), @r###" + uv_snapshot!(context.filters(), context.sync().arg("--dev"), @r" success: true exit_code: 0 ----- stdout ----- @@ -5605,7 +6016,7 @@ fn transitive_dev() -> Result<()> { + child==0.1.0 (from file://[TEMP_DIR]/child) + idna==3.6 + sniffio==1.3.1 - "###); + "); Ok(()) } @@ -5665,7 +6076,7 @@ fn sync_no_editable() -> Result<()> { let init = src.child("__init__.py"); init.touch()?; - uv_snapshot!(context.filters(), context.sync().arg("--no-editable"), @r###" + uv_snapshot!(context.filters(), context.sync().arg("--no-editable"), @r" success: true exit_code: 0 ----- stdout ----- @@ -5676,7 +6087,7 @@ fn sync_no_editable() -> Result<()> { Installed 2 packages in [TIME] + child==0.1.0 (from file://[TEMP_DIR]/child) + root==0.1.0 (from file://[TEMP_DIR]/) - "###); + "); uv_snapshot!(context.filters(), context.sync().env(EnvVars::UV_NO_EDITABLE, "1"), @r" success: true @@ -5731,7 +6142,7 @@ fn sync_scripts_without_build_system() -> Result<()> { "#, )?; - uv_snapshot!(context.filters(), context.sync(), @r###" + uv_snapshot!(context.filters(), context.sync(), @r" success: true exit_code: 0 ----- stdout ----- @@ -5740,7 +6151,7 @@ fn sync_scripts_without_build_system() -> Result<()> { warning: Skipping installation of entry points (`project.scripts`) because this project is not packaged; to install entry points, set `tool.uv.package = true` or define a `build-system` Resolved 1 package in [TIME] Audited in [TIME] - "###); + "); Ok(()) } @@ -5780,7 +6191,7 @@ fn sync_scripts_project_not_packaged() -> Result<()> { "#, )?; - uv_snapshot!(context.filters(), context.sync(), @r###" + uv_snapshot!(context.filters(), context.sync(), @r" success: true exit_code: 0 ----- stdout ----- @@ -5789,7 +6200,7 @@ fn sync_scripts_project_not_packaged() -> Result<()> { warning: Skipping installation of entry points (`project.scripts`) because this project is not packaged; to install entry points, set `tool.uv.package = true` or define a `build-system` Resolved 1 package in [TIME] Audited in [TIME] - "###); + "); Ok(()) } @@ -5822,7 +6233,7 @@ fn sync_dynamic_extra() -> Result<()> { .child("requirements-dev.txt") .write_str("typing-extensions")?; - uv_snapshot!(context.filters(), context.sync().arg("--extra").arg("dev"), @r###" + uv_snapshot!(context.filters(), context.sync().arg("--extra").arg("dev"), @r" success: true exit_code: 0 ----- stdout ----- @@ -5834,7 +6245,7 @@ fn sync_dynamic_extra() -> Result<()> { + iniconfig==2.0.0 + project==0.1.0 (from file://[TEMP_DIR]/) + typing-extensions==4.10.0 - "###); + "); let lock = context.read("uv.lock"); @@ -5895,7 +6306,7 @@ fn sync_dynamic_extra() -> Result<()> { ); // Check that we can re-read the lockfile. - uv_snapshot!(context.filters(), context.sync().arg("--locked"), @r###" + uv_snapshot!(context.filters(), context.sync().arg("--locked"), @r" success: true exit_code: 0 ----- stdout ----- @@ -5904,7 +6315,7 @@ fn sync_dynamic_extra() -> Result<()> { Resolved 3 packages in [TIME] Uninstalled 1 package in [TIME] - typing-extensions==4.10.0 - "###); + "); Ok(()) } @@ -5974,7 +6385,7 @@ fn build_system_requires_workspace() -> Result<()> { ", })?; - uv_snapshot!(context.filters(), context.sync().current_dir(context.temp_dir.child("project")), @r###" + uv_snapshot!(context.filters(), context.sync().current_dir(context.temp_dir.child("project")), @r" success: true exit_code: 0 ----- stdout ----- @@ -5987,7 +6398,7 @@ fn build_system_requires_workspace() -> Result<()> { Installed 2 packages in [TIME] + iniconfig==2.0.0 + project==0.1.0 (from file://[TEMP_DIR]/project) - "###); + "); Ok(()) } @@ -6054,7 +6465,7 @@ fn build_system_requires_path() -> Result<()> { ", })?; - uv_snapshot!(context.filters(), context.sync().current_dir(context.temp_dir.child("project")), @r###" + uv_snapshot!(context.filters(), context.sync().current_dir(context.temp_dir.child("project")), @r" success: true exit_code: 0 ----- stdout ----- @@ -6067,7 +6478,7 @@ fn build_system_requires_path() -> Result<()> { Installed 2 packages in [TIME] + iniconfig==2.0.0 + project==0.1.0 (from file://[TEMP_DIR]/project) - "###); + "); Ok(()) } @@ -6119,7 +6530,7 @@ fn sync_invalid_environment() -> Result<()> { fs_err::write(context.temp_dir.join(".venv").join("file"), b"")?; // We can delete and use it - uv_snapshot!(context.filters(), context.sync(), @r###" + uv_snapshot!(context.filters(), context.sync(), @r" success: true exit_code: 0 ----- stdout ----- @@ -6132,7 +6543,7 @@ fn sync_invalid_environment() -> Result<()> { Prepared 1 package in [TIME] Installed 1 package in [TIME] + iniconfig==2.0.0 - "###); + "); let bin = venv_bin_path(context.temp_dir.join(".venv")); @@ -6141,7 +6552,7 @@ fn sync_invalid_environment() -> Result<()> { { fs_err::remove_file(bin.join("python"))?; fs_err::os::unix::fs::symlink(context.temp_dir.join("does-not-exist"), bin.join("python"))?; - uv_snapshot!(context.filters(), context.sync(), @r###" + uv_snapshot!(context.filters(), context.sync(), @r" success: true exit_code: 0 ----- stdout ----- @@ -6154,7 +6565,7 @@ fn sync_invalid_environment() -> Result<()> { Resolved 2 packages in [TIME] Installed 1 package in [TIME] + iniconfig==2.0.0 - "###); + "); } // But if the Python executable is missing entirely we should also fail @@ -6242,7 +6653,7 @@ fn sync_no_sources_missing_member() -> Result<()> { let init = src.child("__init__.py"); init.touch()?; - uv_snapshot!(context.filters(), context.sync().arg("--no-sources"), @r###" + uv_snapshot!(context.filters(), context.sync().arg("--no-sources"), @r" success: true exit_code: 0 ----- stdout ----- @@ -6254,7 +6665,7 @@ fn sync_no_sources_missing_member() -> Result<()> { + anyio==4.3.0 + idna==3.6 + sniffio==1.3.1 - "###); + "); Ok(()) } @@ -6273,7 +6684,7 @@ fn sync_python_version() -> Result<()> { "#})?; // We should respect the project's required version, not the first on the path - uv_snapshot!(context.filters(), context.sync(), @r###" + uv_snapshot!(context.filters(), context.sync(), @r" success: true exit_code: 0 ----- stdout ----- @@ -6287,7 +6698,7 @@ fn sync_python_version() -> Result<()> { + anyio==3.7.0 + idna==3.6 + sniffio==1.3.1 - "###); + "); // Unless explicitly requested... uv_snapshot!(context.filters(), context.sync().arg("--python").arg("3.10"), @r" @@ -6310,7 +6721,7 @@ fn sync_python_version() -> Result<()> { ----- stderr ----- "###); - uv_snapshot!(context.filters(), context.sync(), @r###" + uv_snapshot!(context.filters(), context.sync(), @r" success: true exit_code: 0 ----- stdout ----- @@ -6324,7 +6735,7 @@ fn sync_python_version() -> Result<()> { + anyio==3.7.0 + idna==3.6 + sniffio==1.3.1 - "###); + "); // Create a pin that's incompatible with the project uv_snapshot!(context.filters(), context.python_pin().arg("3.10").arg("--no-workspace"), @r###" @@ -6363,7 +6774,7 @@ fn sync_python_version() -> Result<()> { "#}) .unwrap(); - uv_snapshot!(context.filters(), context.sync().current_dir(&child_dir), @r###" + uv_snapshot!(context.filters(), context.sync().current_dir(&child_dir), @r" success: true exit_code: 0 ----- stdout ----- @@ -6376,7 +6787,7 @@ fn sync_python_version() -> Result<()> { + anyio==3.7.0 + idna==3.6 + sniffio==1.3.1 - "###); + "); Ok(()) } @@ -6406,7 +6817,7 @@ fn sync_explicit() -> Result<()> { "#, )?; - uv_snapshot!(context.filters(), context.sync(), @r###" + uv_snapshot!(context.filters(), context.sync(), @r" success: true exit_code: 0 ----- stdout ----- @@ -6416,13 +6827,13 @@ fn sync_explicit() -> Result<()> { Prepared 1 package in [TIME] Installed 1 package in [TIME] + idna==2.7 - "###); + "); // Clear the environment. fs_err::remove_dir_all(&context.venv)?; // The package should be drawn from the cache. - uv_snapshot!(context.filters(), context.sync(), @r###" + uv_snapshot!(context.filters(), context.sync(), @r" success: true exit_code: 0 ----- stdout ----- @@ -6433,7 +6844,7 @@ fn sync_explicit() -> Result<()> { Resolved 2 packages in [TIME] Installed 1 package in [TIME] + idna==2.7 - "###); + "); Ok(()) } @@ -6495,7 +6906,7 @@ fn sync_all() -> Result<()> { context.lock().assert().success(); // Sync all workspace members. - uv_snapshot!(context.filters(), context.sync().arg("--all-packages"), @r###" + uv_snapshot!(context.filters(), context.sync().arg("--all-packages"), @r" success: true exit_code: 0 ----- stdout ----- @@ -6510,7 +6921,7 @@ fn sync_all() -> Result<()> { + iniconfig==2.0.0 + project==0.1.0 (from file://[TEMP_DIR]/) + sniffio==1.3.1 - "###); + "); Ok(()) } @@ -6576,7 +6987,7 @@ fn sync_all_extras() -> Result<()> { context.lock().assert().success(); // Sync an extra that exists in both the parent and child. - uv_snapshot!(context.filters(), context.sync().arg("--all-packages").arg("--extra").arg("types"), @r###" + uv_snapshot!(context.filters(), context.sync().arg("--all-packages").arg("--extra").arg("types"), @r" success: true exit_code: 0 ----- stdout ----- @@ -6589,10 +7000,10 @@ fn sync_all_extras() -> Result<()> { + iniconfig==2.0.0 + sniffio==1.3.1 + typing-extensions==4.10.0 - "###); + "); // Sync an extra that only exists in the child. - uv_snapshot!(context.filters(), context.sync().arg("--all-packages").arg("--extra").arg("testing"), @r###" + uv_snapshot!(context.filters(), context.sync().arg("--all-packages").arg("--extra").arg("testing"), @r" success: true exit_code: 0 ----- stdout ----- @@ -6605,10 +7016,10 @@ fn sync_all_extras() -> Result<()> { + packaging==24.0 - sniffio==1.3.1 - typing-extensions==4.10.0 - "###); + "); // Sync all extras. - uv_snapshot!(context.filters(), context.sync().arg("--all-packages").arg("--all-extras"), @r###" + uv_snapshot!(context.filters(), context.sync().arg("--all-packages").arg("--all-extras"), @r" success: true exit_code: 0 ----- stdout ----- @@ -6621,10 +7032,10 @@ fn sync_all_extras() -> Result<()> { + idna==3.6 + sniffio==1.3.1 + typing-extensions==4.10.0 - "###); + "); // Sync all extras excluding an extra that exists in both the parent and child. - uv_snapshot!(context.filters(), context.sync().arg("--all-packages").arg("--all-extras").arg("--no-extra").arg("types"), @r###" + uv_snapshot!(context.filters(), context.sync().arg("--all-packages").arg("--all-extras").arg("--no-extra").arg("types"), @r" success: true exit_code: 0 ----- stdout ----- @@ -6633,10 +7044,10 @@ fn sync_all_extras() -> Result<()> { Resolved 8 packages in [TIME] Uninstalled 1 package in [TIME] - typing-extensions==4.10.0 - "###); + "); // Sync an extra that doesn't exist. - uv_snapshot!(context.filters(), context.sync().arg("--all-packages").arg("--extra").arg("foo"), @r###" + uv_snapshot!(context.filters(), context.sync().arg("--all-packages").arg("--extra").arg("foo"), @r" success: false exit_code: 2 ----- stdout ----- @@ -6644,10 +7055,10 @@ fn sync_all_extras() -> Result<()> { ----- stderr ----- Resolved 8 packages in [TIME] error: Extra `foo` is not defined in any project's `optional-dependencies` table - "###); + "); // Sync all extras excluding an extra that doesn't exist. - uv_snapshot!(context.filters(), context.sync().arg("--all-packages").arg("--all-extras").arg("--no-extra").arg("foo"), @r###" + uv_snapshot!(context.filters(), context.sync().arg("--all-packages").arg("--all-extras").arg("--no-extra").arg("foo"), @r" success: false exit_code: 2 ----- stdout ----- @@ -6655,7 +7066,7 @@ fn sync_all_extras() -> Result<()> { ----- stderr ----- Resolved 8 packages in [TIME] error: Extra `foo` is not defined in any project's `optional-dependencies` table - "###); + "); Ok(()) } @@ -6731,7 +7142,7 @@ fn sync_all_extras_dynamic() -> Result<()> { context.lock().assert().success(); // Sync an extra that exists in the parent. - uv_snapshot!(context.filters(), context.sync().arg("--all-packages").arg("--extra").arg("types"), @r###" + uv_snapshot!(context.filters(), context.sync().arg("--all-packages").arg("--extra").arg("types"), @r" success: true exit_code: 0 ----- stdout ----- @@ -6743,10 +7154,10 @@ fn sync_all_extras_dynamic() -> Result<()> { + child==0.1.0 (from file://[TEMP_DIR]/child) + project==0.1.0 (from file://[TEMP_DIR]/) + sniffio==1.3.1 - "###); + "); // Sync a dynamic extra that exists in the child. - uv_snapshot!(context.filters(), context.sync().arg("--all-packages").arg("--extra").arg("dev"), @r###" + uv_snapshot!(context.filters(), context.sync().arg("--all-packages").arg("--extra").arg("dev"), @r" success: true exit_code: 0 ----- stdout ----- @@ -6758,10 +7169,10 @@ fn sync_all_extras_dynamic() -> Result<()> { Installed 1 package in [TIME] - sniffio==1.3.1 + typing-extensions==4.10.0 - "###); + "); // Sync a dynamic extra that doesn't exist in the child. - uv_snapshot!(context.filters(), context.sync().arg("--all-packages").arg("--extra").arg("foo"), @r###" + uv_snapshot!(context.filters(), context.sync().arg("--all-packages").arg("--extra").arg("foo"), @r" success: false exit_code: 2 ----- stdout ----- @@ -6769,7 +7180,7 @@ fn sync_all_extras_dynamic() -> Result<()> { ----- stderr ----- Resolved 6 packages in [TIME] error: Extra `foo` is not defined in any project's `optional-dependencies` table - "###); + "); Ok(()) } @@ -6836,7 +7247,7 @@ fn sync_all_groups() -> Result<()> { context.lock().assert().success(); // Sync a group that exists in both the parent and child. - uv_snapshot!(context.filters(), context.sync().arg("--all-packages").arg("--group").arg("types"), @r###" + uv_snapshot!(context.filters(), context.sync().arg("--all-packages").arg("--group").arg("types"), @r" success: true exit_code: 0 ----- stdout ----- @@ -6849,10 +7260,10 @@ fn sync_all_groups() -> Result<()> { + iniconfig==2.0.0 + sniffio==1.3.1 + typing-extensions==4.10.0 - "###); + "); // Sync a group that only exists in the child. - uv_snapshot!(context.filters(), context.sync().arg("--all-packages").arg("--group").arg("testing"), @r###" + uv_snapshot!(context.filters(), context.sync().arg("--all-packages").arg("--group").arg("testing"), @r" success: true exit_code: 0 ----- stdout ----- @@ -6865,10 +7276,10 @@ fn sync_all_groups() -> Result<()> { + packaging==24.0 - sniffio==1.3.1 - typing-extensions==4.10.0 - "###); + "); // Sync a group that doesn't exist. - uv_snapshot!(context.filters(), context.sync().arg("--all-packages").arg("--group").arg("foo"), @r###" + uv_snapshot!(context.filters(), context.sync().arg("--all-packages").arg("--group").arg("foo"), @r" success: false exit_code: 2 ----- stdout ----- @@ -6876,10 +7287,10 @@ fn sync_all_groups() -> Result<()> { ----- stderr ----- Resolved 8 packages in [TIME] error: Group `foo` is not defined in any project's `dependency-groups` table - "###); + "); // Sync an empty group. - uv_snapshot!(context.filters(), context.sync().arg("--group").arg("empty"), @r###" + uv_snapshot!(context.filters(), context.sync().arg("--group").arg("empty"), @r" success: true exit_code: 0 ----- stdout ----- @@ -6888,7 +7299,7 @@ fn sync_all_groups() -> Result<()> { Resolved 8 packages in [TIME] Uninstalled 1 package in [TIME] - packaging==24.0 - "###); + "); Ok(()) } @@ -6940,7 +7351,7 @@ fn sync_multiple_sources_index_disjoint_extras() -> Result<()> { // Generate a lockfile. context.lock().assert().success(); - uv_snapshot!(context.filters(), context.sync().arg("--extra").arg("cu124"), @r###" + uv_snapshot!(context.filters(), context.sync().arg("--extra").arg("cu124"), @r" success: true exit_code: 0 ----- stdout ----- @@ -6951,7 +7362,7 @@ fn sync_multiple_sources_index_disjoint_extras() -> Result<()> { Installed 2 packages in [TIME] + jinja2==3.1.3 + markupsafe==2.1.5 - "###); + "); Ok(()) } @@ -6982,7 +7393,7 @@ fn sync_derivation_chain() -> Result<()> { .chain([(r"/.*/src", "/[TMP]/src")]) .collect::>(); - uv_snapshot!(filters, context.sync(), @r###" + uv_snapshot!(filters, context.sync(), @r#" success: false exit_code: 1 ----- stdout ----- @@ -7013,7 +7424,7 @@ fn sync_derivation_chain() -> Result<()> { hint: This usually indicates a problem with the package or the build environment. help: `wsgiref` (v0.1.2) was included because `project` (v0.1.0) depends on `wsgiref` - "###); + "#); Ok(()) } @@ -7045,7 +7456,7 @@ fn sync_derivation_chain_extra() -> Result<()> { .chain([(r"/.*/src", "/[TMP]/src")]) .collect::>(); - uv_snapshot!(filters, context.sync().arg("--extra").arg("wsgi"), @r###" + uv_snapshot!(filters, context.sync().arg("--extra").arg("wsgi"), @r#" success: false exit_code: 1 ----- stdout ----- @@ -7076,7 +7487,7 @@ fn sync_derivation_chain_extra() -> Result<()> { hint: This usually indicates a problem with the package or the build environment. help: `wsgiref` (v0.1.2) was included because `project[wsgi]` (v0.1.0) depends on `wsgiref` - "###); + "#); Ok(()) } @@ -7110,7 +7521,7 @@ fn sync_derivation_chain_group() -> Result<()> { .chain([(r"/.*/src", "/[TMP]/src")]) .collect::>(); - uv_snapshot!(filters, context.sync().arg("--group").arg("wsgi"), @r###" + uv_snapshot!(filters, context.sync().arg("--group").arg("wsgi"), @r#" success: false exit_code: 1 ----- stdout ----- @@ -7141,7 +7552,7 @@ fn sync_derivation_chain_group() -> Result<()> { hint: This usually indicates a problem with the package or the build environment. help: `wsgiref` (v0.1.2) was included because `project:wsgi` (v0.1.0) depends on `wsgiref` - "###); + "#); Ok(()) } @@ -7235,7 +7646,7 @@ fn sync_stale_egg_info() -> Result<()> { } ); - uv_snapshot!(context.filters(), context.sync(), @r###" + uv_snapshot!(context.filters(), context.sync(), @r" success: true exit_code: 0 ----- stdout ----- @@ -7247,7 +7658,7 @@ fn sync_stale_egg_info() -> Result<()> { + member==0.1.dev5+gfea1041 (from git+https://github.com/astral-sh/uv-stale-egg-info-test.git@fea10416b9c479ac88fb217e14e40249b63bfbee#subdirectory=member) + root==0.1.dev5+gfea1041 (from git+https://github.com/astral-sh/uv-stale-egg-info-test.git@fea10416b9c479ac88fb217e14e40249b63bfbee) + setuptools==69.2.0 - "###); + "); Ok(()) } @@ -7330,7 +7741,7 @@ fn sync_git_repeated_member_static_metadata() -> Result<()> { } ); - uv_snapshot!(context.filters(), context.sync(), @r###" + uv_snapshot!(context.filters(), context.sync(), @r" success: true exit_code: 0 ----- stdout ----- @@ -7341,7 +7752,7 @@ fn sync_git_repeated_member_static_metadata() -> Result<()> { Installed 2 packages in [TIME] + uv-git-workspace-in-root==0.1.0 (from git+https://github.com/astral-sh/workspace-in-root-test.git@d3ab48d2338296d47e28dbb2fb327c5e2ac4ac68) + workspace-member-in-subdir==0.1.0 (from git+https://github.com/astral-sh/workspace-in-root-test.git@d3ab48d2338296d47e28dbb2fb327c5e2ac4ac68#subdirectory=workspace-member-in-subdir) - "###); + "); Ok(()) } @@ -7446,7 +7857,7 @@ fn sync_git_repeated_member_dynamic_metadata() -> Result<()> { } ); - uv_snapshot!(context.filters(), context.sync(), @r###" + uv_snapshot!(context.filters(), context.sync(), @r" success: true exit_code: 0 ----- stdout ----- @@ -7459,7 +7870,7 @@ fn sync_git_repeated_member_dynamic_metadata() -> Result<()> { + iniconfig==2.0.0 + package==0.1.0 (from git+https://github.com/astral-sh/uv-dynamic-metadata-test.git@6c5aa0a65db737c9e7e2e60dc865bd8087012e64) + typing-extensions==4.10.0 - "###); + "); Ok(()) } @@ -7542,7 +7953,7 @@ fn sync_git_repeated_member_backwards_path() -> Result<()> { } ); - uv_snapshot!(context.filters(), context.sync(), @r###" + uv_snapshot!(context.filters(), context.sync(), @r" success: true exit_code: 0 ----- stdout ----- @@ -7553,7 +7964,7 @@ fn sync_git_repeated_member_backwards_path() -> Result<()> { Installed 2 packages in [TIME] + dependency==0.1.0 (from git+https://github.com/astral-sh/uv-backwards-path-test@4bcc7fcd2e548c2ab7ba6b97b1c4e3ababccc7a9#subdirectory=dependency) + package==0.1.0 (from git+https://github.com/astral-sh/uv-backwards-path-test@4bcc7fcd2e548c2ab7ba6b97b1c4e3ababccc7a9#subdirectory=root) - "###); + "); Ok(()) } @@ -7578,7 +7989,7 @@ fn mismatched_name_self_editable() -> Result<()> { "#, )?; - uv_snapshot!(context.filters(), context.sync(), @r###" + uv_snapshot!(context.filters(), context.sync(), @r" success: false exit_code: 1 ----- stdout ----- @@ -7588,7 +7999,7 @@ fn mismatched_name_self_editable() -> Result<()> { × Failed to build `foo @ file://[TEMP_DIR]/` ╰─▶ Package metadata name `project` does not match given name `foo` help: `foo` was included because `project` (v0.1.0) depends on `foo` - "###); + "); Ok(()) } @@ -7610,7 +8021,7 @@ fn mismatched_name_cached_wheel() -> Result<()> { "#, )?; - uv_snapshot!(context.filters(), context.sync(), @r###" + uv_snapshot!(context.filters(), context.sync(), @r" success: true exit_code: 0 ----- stdout ----- @@ -7620,7 +8031,7 @@ fn mismatched_name_cached_wheel() -> Result<()> { Prepared 1 package in [TIME] Installed 1 package in [TIME] + iniconfig==2.0.0 (from https://files.pythonhosted.org/packages/d7/4b/cbd8e699e64a6f16ca3a8220661b5f83792b3017d0f79807cb8708d33913/iniconfig-2.0.0.tar.gz) - "###); + "); pyproject_toml.write_str( r#" @@ -7632,7 +8043,7 @@ fn mismatched_name_cached_wheel() -> Result<()> { "#, )?; - uv_snapshot!(context.filters(), context.sync(), @r###" + uv_snapshot!(context.filters(), context.sync(), @r" success: false exit_code: 1 ----- stdout ----- @@ -7640,7 +8051,7 @@ fn mismatched_name_cached_wheel() -> Result<()> { ----- stderr ----- × Failed to download and build `foo @ https://files.pythonhosted.org/packages/d7/4b/cbd8e699e64a6f16ca3a8220661b5f83792b3017d0f79807cb8708d33913/iniconfig-2.0.0.tar.gz` ╰─▶ Package metadata name `iniconfig` does not match given name `foo` - "###); + "); Ok(()) } @@ -7720,7 +8131,7 @@ fn sync_git_path_dependency() -> Result<()> { } ); - uv_snapshot!(context.filters(), context.sync(), @r###" + uv_snapshot!(context.filters(), context.sync(), @r" success: true exit_code: 0 ----- stdout ----- @@ -7731,7 +8142,7 @@ fn sync_git_path_dependency() -> Result<()> { Installed 2 packages in [TIME] + package1==0.1.0 (from git+https://github.com/astral-sh/uv-path-dependency-test.git@28781b32cf1f260cdb2c8040628079eb265202bd#subdirectory=package1) + package2==0.1.0 (from git+https://github.com/astral-sh/uv-path-dependency-test.git@28781b32cf1f260cdb2c8040628079eb265202bd#subdirectory=package2) - "###); + "); Ok(()) } @@ -7835,7 +8246,7 @@ fn sync_build_tag() -> Result<()> { "###); // Install from the lockfile. - uv_snapshot!(context.filters(), context.sync().arg("--frozen"), @r###" + uv_snapshot!(context.filters(), context.sync().arg("--frozen"), @r" success: true exit_code: 0 ----- stdout ----- @@ -7844,7 +8255,7 @@ fn sync_build_tag() -> Result<()> { Prepared 1 package in [TIME] Installed 1 package in [TIME] + build-tag==1.0.0 - "###); + "); // Ensure that we choose the highest build tag (5). uv_snapshot!(context.filters(), context.run().arg("--no-sync").arg("python").arg("-c").arg("import build_tag; build_tag.main()"), @r###" @@ -7904,7 +8315,7 @@ fn url_hash_mismatch() -> Result<()> { "#})?; // Running `uv sync` should fail. - uv_snapshot!(context.filters(), context.sync(), @r###" + uv_snapshot!(context.filters(), context.sync(), @r" success: false exit_code: 1 ----- stdout ----- @@ -7920,7 +8331,7 @@ fn url_hash_mismatch() -> Result<()> { Computed: sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3 help: `iniconfig` was included because `project` (v0.1.0) depends on `iniconfig` - "###); + "); Ok(()) } @@ -7977,7 +8388,7 @@ fn path_hash_mismatch() -> Result<()> { "#})?; // Running `uv sync` should fail. - uv_snapshot!(context.filters(), context.sync(), @r###" + uv_snapshot!(context.filters(), context.sync(), @r" success: false exit_code: 1 ----- stdout ----- @@ -7993,7 +8404,7 @@ fn path_hash_mismatch() -> Result<()> { Computed: sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3 help: `iniconfig` was included because `project` (v0.1.0) depends on `iniconfig` - "###); + "); Ok(()) } @@ -8029,7 +8440,7 @@ fn find_links_relative_in_config_works_from_subdir() -> Result<()> { subdir.create_dir_all()?; // Run `uv sync --offline` from subdir. We expect it to find the local wheel in ../packages/. - uv_snapshot!(context.filters(), context.sync().current_dir(&subdir).arg("--offline"), @r###" + uv_snapshot!(context.filters(), context.sync().current_dir(&subdir).arg("--offline"), @r" success: true exit_code: 0 ----- stdout ----- @@ -8039,7 +8450,7 @@ fn find_links_relative_in_config_works_from_subdir() -> Result<()> { Prepared 1 package in [TIME] Installed 1 package in [TIME] + ok==1.0.0 - "###); + "); Ok(()) } @@ -8060,23 +8471,23 @@ fn sync_dry_run() -> Result<()> { )?; // Perform a `--dry-run`. - uv_snapshot!(context.filters(), context.sync().arg("--dry-run"), @r###" + uv_snapshot!(context.filters(), context.sync().arg("--dry-run"), @r" success: true exit_code: 0 ----- stdout ----- ----- stderr ----- Using CPython 3.12.[X] interpreter at: [PYTHON-3.12] - Would create virtual environment at: .venv + Would create project environment at: .venv Resolved 2 packages in [TIME] Would create lockfile at: uv.lock Would download 1 package Would install 1 package + iniconfig==2.0.0 - "###); + "); // Perform a full sync. - uv_snapshot!(context.filters(), context.sync(), @r###" + uv_snapshot!(context.filters(), context.sync(), @r" success: true exit_code: 0 ----- stdout ----- @@ -8088,7 +8499,7 @@ fn sync_dry_run() -> Result<()> { Prepared 1 package in [TIME] Installed 1 package in [TIME] + iniconfig==2.0.0 - "###); + "); // Update the requirements. pyproject_toml.write_str( @@ -8101,13 +8512,13 @@ fn sync_dry_run() -> Result<()> { "#, )?; - uv_snapshot!(context.filters(), context.sync().arg("--dry-run"), @r###" + uv_snapshot!(context.filters(), context.sync().arg("--dry-run"), @r" success: true exit_code: 0 ----- stdout ----- ----- stderr ----- - Discovered existing environment at: .venv + Would use project environment at: .venv Resolved 2 packages in [TIME] Would update lockfile at: uv.lock Would download 1 package @@ -8115,7 +8526,7 @@ fn sync_dry_run() -> Result<()> { Would install 1 package - iniconfig==2.0.0 + typing-extensions==4.10.0 - "###); + "); // Update the `requires-python`. pyproject_toml.write_str( @@ -8135,7 +8546,7 @@ fn sync_dry_run() -> Result<()> { ----- stderr ----- Using CPython 3.9.[X] interpreter at: [PYTHON-3.9] - Would replace existing virtual environment at: .venv + Would replace project environment at: .venv warning: Ignoring existing lockfile due to fork markers being disjoint with `requires-python`: `python_full_version >= '3.12'` vs `python_full_version == '3.9.*'` Resolved 2 packages in [TIME] Would update lockfile at: uv.lock @@ -8175,7 +8586,7 @@ fn sync_dry_run() -> Result<()> { ----- stdout ----- ----- stderr ----- - Discovered existing environment at: .venv + Would use project environment at: .venv Resolved 2 packages in [TIME] Found up-to-date lockfile at: uv.lock Audited 1 package in [TIME] @@ -8223,7 +8634,7 @@ fn sync_dry_run_and_locked() -> Result<()> { ----- stdout ----- ----- stderr ----- - Discovered existing environment at: .venv + Would use project environment at: .venv Resolved 2 packages in [TIME] Would download 1 package Would install 1 package @@ -8275,8 +8686,7 @@ fn sync_dry_run_and_frozen() -> Result<()> { ----- stdout ----- ----- stderr ----- - Discovered existing environment at: .venv - Found up-to-date lockfile at: uv.lock + Would use project environment at: .venv Would download 3 packages Would install 3 packages + anyio==3.7.0 @@ -8371,7 +8781,7 @@ fn sync_script() -> Result<()> { "# })?; - uv_snapshot!(&filters, context.sync().arg("--script").arg("script.py"), @r###" + uv_snapshot!(&filters, context.sync().arg("--script").arg("script.py"), @r" success: true exit_code: 0 ----- stdout ----- @@ -8381,7 +8791,7 @@ fn sync_script() -> Result<()> { Resolved 3 packages in [TIME] Uninstalled 1 package in [TIME] - iniconfig==2.0.0 - "###); + "); // Modify the `requires-python`. script.write_str(indoc! { r#" @@ -8396,13 +8806,13 @@ fn sync_script() -> Result<()> { "# })?; - uv_snapshot!(&filters, context.sync().arg("--script").arg("script.py"), @r###" + uv_snapshot!(&filters, context.sync().arg("--script").arg("script.py"), @r" success: true exit_code: 0 ----- stdout ----- ----- stderr ----- - Recreating script environment at: [CACHE_DIR]/environments-v2/script-[HASH] + Updating script environment at: [CACHE_DIR]/environments-v2/script-[HASH] Resolved 5 packages in [TIME] Prepared 2 packages in [TIME] Installed 5 packages in [TIME] @@ -8411,7 +8821,7 @@ fn sync_script() -> Result<()> { + idna==3.6 + sniffio==1.3.1 + typing-extensions==4.10.0 - "###); + "); // `--locked` and `--frozen` should fail with helpful error messages. uv_snapshot!(&filters, context.sync().arg("--script").arg("script.py").arg("--locked"), @r" @@ -8662,7 +9072,7 @@ fn sync_locked_script() -> Result<()> { ----- stdout ----- ----- stderr ----- - Recreating script environment at: [CACHE_DIR]/environments-v2/script-[HASH] + Updating script environment at: [CACHE_DIR]/environments-v2/script-[HASH] warning: Ignoring existing lockfile due to fork markers being disjoint with `requires-python`: `python_full_version >= '3.11'` vs `python_full_version >= '3.8' and python_full_version < '3.11'` Resolved 6 packages in [TIME] error: The lockfile at `uv.lock` needs to be updated, but `--locked` was provided. To update the lockfile, run `uv lock`. @@ -8722,7 +9132,7 @@ fn sync_script_with_compatible_build_constraints() -> Result<()> { )]) .collect::>(); - uv_snapshot!(&filters, context.sync().arg("--script").arg("script.py"), @r###" + uv_snapshot!(&filters, context.sync().arg("--script").arg("script.py"), @r" success: true exit_code: 0 ----- stdout ----- @@ -8738,7 +9148,7 @@ fn sync_script_with_compatible_build_constraints() -> Result<()> { + requests==1.2.0 + sniffio==1.3.1 + typing-extensions==4.10.0 - "###); + "); Ok(()) } @@ -8774,7 +9184,7 @@ fn sync_script_with_incompatible_build_constraints() -> Result<()> { "# })?; - uv_snapshot!(&filters, context.sync().arg("--script").arg("script.py"), @r###" + uv_snapshot!(&filters, context.sync().arg("--script").arg("script.py"), @r" success: false exit_code: 1 ----- stdout ----- @@ -8785,7 +9195,7 @@ fn sync_script_with_incompatible_build_constraints() -> Result<()> { ├─▶ Failed to resolve requirements from `setup.py` build ├─▶ No solution found when resolving: `setuptools>=40.8.0` ╰─▶ Because you require setuptools>=40.8.0 and setuptools==1, we can conclude that your requirements are unsatisfiable. - "###); + "); Ok(()) } @@ -8808,7 +9218,7 @@ fn unsupported_git_scheme() -> Result<()> { "#}, )?; - uv_snapshot!(context.filters(), context.sync(), @r###" + uv_snapshot!(context.filters(), context.sync(), @r" success: false exit_code: 1 ----- stdout ----- @@ -8819,7 +9229,7 @@ fn unsupported_git_scheme() -> Result<()> { × Failed to build `foo @ file://[TEMP_DIR]/` ├─▶ Failed to parse entry: `foo` ╰─▶ Unsupported Git URL scheme `c:` in `c:/home/ferris/projects/foo` (expected one of `https:`, `ssh:`, or `file:`) - "###); + "); Ok(()) } @@ -8858,7 +9268,7 @@ fn multiple_group_conflicts() -> Result<()> { "#, )?; - uv_snapshot!(context.filters(), context.sync(), @r###" + uv_snapshot!(context.filters(), context.sync(), @r" success: true exit_code: 0 ----- stdout ----- @@ -8866,9 +9276,9 @@ fn multiple_group_conflicts() -> Result<()> { ----- stderr ----- Resolved 3 packages in [TIME] Audited in [TIME] - "###); + "); - uv_snapshot!(context.filters(), context.sync().arg("--group").arg("baz"), @r###" + uv_snapshot!(context.filters(), context.sync().arg("--group").arg("baz"), @r" success: true exit_code: 0 ----- stdout ----- @@ -8878,9 +9288,9 @@ fn multiple_group_conflicts() -> Result<()> { Prepared 1 package in [TIME] Installed 1 package in [TIME] + iniconfig==2.0.0 - "###); + "); - uv_snapshot!(context.filters(), context.sync().arg("--group").arg("foo").arg("--group").arg("baz"), @r###" + uv_snapshot!(context.filters(), context.sync().arg("--group").arg("foo").arg("--group").arg("baz"), @r" success: true exit_code: 0 ----- stdout ----- @@ -8888,9 +9298,9 @@ fn multiple_group_conflicts() -> Result<()> { ----- stderr ----- Resolved 3 packages in [TIME] Audited 1 package in [TIME] - "###); + "); - uv_snapshot!(context.filters(), context.sync().arg("--group").arg("bar").arg("--group").arg("baz"), @r###" + uv_snapshot!(context.filters(), context.sync().arg("--group").arg("bar").arg("--group").arg("baz"), @r" success: true exit_code: 0 ----- stdout ----- @@ -8902,7 +9312,7 @@ fn multiple_group_conflicts() -> Result<()> { Installed 1 package in [TIME] - iniconfig==2.0.0 + iniconfig==1.1.1 - "###); + "); uv_snapshot!(context.filters(), context.sync().arg("--group").arg("foo").arg("--group").arg("bar"), @r" success: false @@ -9316,7 +9726,7 @@ fn prune_cache_url_subdirectory() -> Result<()> { context.prune().arg("--ci").assert().success(); // Install the project. - uv_snapshot!(context.filters(), context.sync(), @r###" + uv_snapshot!(context.filters(), context.sync(), @r" success: true exit_code: 0 ----- stdout ----- @@ -9329,7 +9739,7 @@ fn prune_cache_url_subdirectory() -> Result<()> { + idna==3.6 + root==0.0.1 (from https://github.com/user-attachments/files/18216295/subdirectory-test.tar.gz#subdirectory=packages/root) + sniffio==1.3.1 - "###); + "); Ok(()) } @@ -9745,7 +10155,7 @@ fn sync_upload_time() -> Result<()> { "#)?; // Install from the lockfile. - uv_snapshot!(context.filters(), context.sync().arg("--frozen"), @r###" + uv_snapshot!(context.filters(), context.sync().arg("--frozen"), @r" success: true exit_code: 0 ----- stdout ----- @@ -9756,17 +10166,17 @@ fn sync_upload_time() -> Result<()> { + anyio==3.7.0 + idna==3.6 + sniffio==1.3.1 - "###); + "); // Re-install from the lockfile. - uv_snapshot!(context.filters(), context.sync().arg("--frozen"), @r###" + uv_snapshot!(context.filters(), context.sync().arg("--frozen"), @r" success: true exit_code: 0 ----- stdout ----- ----- stderr ----- Audited 3 packages in [TIME] - "###); + "); Ok(()) } @@ -9994,7 +10404,7 @@ fn read_only() -> Result<()> { "#, )?; - uv_snapshot!(context.filters(), context.sync(), @r###" + uv_snapshot!(context.filters(), context.sync(), @r" success: true exit_code: 0 ----- stdout ----- @@ -10004,7 +10414,7 @@ fn read_only() -> Result<()> { Prepared 1 package in [TIME] Installed 1 package in [TIME] + iniconfig==2.0.0 - "###); + "); assert!(context.temp_dir.child("uv.lock").exists()); diff --git a/docs/reference/cli.md b/docs/reference/cli.md index 0364703c2..13df63c19 100644 --- a/docs/reference/cli.md +++ b/docs/reference/cli.md @@ -1114,7 +1114,12 @@ uv sync [OPTIONS]
    --only-group only-group

    Only include dependencies from the specified dependency group.

    The project and its dependencies will be omitted.

    May be provided multiple times. Implies --no-default-groups.

    -
    --package package

    Sync for a specific package in the workspace.

    +
    --output-format output-format

    Select the output format

    +

    [default: text]

    Possible values:

    +
      +
    • text: Display the result in a human-readable format
    • +
    • json: Display the result in JSON format
    • +
    --package package

    Sync for a specific package in the workspace.

    The workspace's environment (.venv) is updated to reflect the subset of dependencies declared by the specified workspace member package.

    If the workspace member does not exist, uv will exit with an error.

    --prerelease prerelease

    The strategy to use when considering pre-release versions.

    From 4d82e8886340821b435f36e6eccccf95654ca23a Mon Sep 17 00:00:00 2001 From: Ivan Smirnov Date: Mon, 14 Jul 2025 16:35:34 +0100 Subject: [PATCH 028/130] Follow links when cache-key is a glob (#13438) ## Summary There's some inconsistent behaviour in handling symlinks when `cache-key` is a glob or a file path. This PR attempts to address that. - When cache-key is a path, [`Path::metadata()`](https://doc.rust-lang.org/std/path/struct.Path.html#method.metadata) is used to check if it's a file or not. According to the docs: > This function will traverse symbolic links to query information about the destination file. So, if the target file is a symlink, it will be resolved and the metadata will be queried for the underlying file. - When cache-key is a glob, `globwalk` is used, specifically allowing for symlinks: ```rust .file_type(globwalk::FileType::FILE | globwalk::FileType::SYMLINK) ``` - However, without enabling link following, `DirEntry::metadata()` will return an equivalent of `Path::symlink_metadata()` (and not `Path::metadata()`), which will have a file type that looks like ```rust FileType { is_file: false, is_dir: false, is_symlink: true, .. } ``` - Then, there's a check for `metadata.is_file()` which fails and complains that the target entry "is a directory when file was expected". - TLDR: glob cache-keys don't work with symlinks. ## Solutions Option 1 (current PR): follow symlinks. Option 2 (also doable): don't follow symlinks, but resolve the resulting target entry manually in case its file type is a symlink. However, this would be a little weird and unobvious in that we resolve files but not directories for some reason. Also, symlinking directories is pretty useful if you want to symlink directories of local dependencies that are not under the project's path. ## Test Plan This has been tested manually: ```rust fn main() { for follow_links in [false, true] { let walker = globwalk::GlobWalkerBuilder::from_patterns(".", &["a/*"]) .file_type(globwalk::FileType::FILE | globwalk::FileType::SYMLINK) .follow_links(follow_links) .build() .unwrap(); let entry = walker.into_iter().next().unwrap().unwrap(); dbg!(&entry); dbg!(entry.file_type()); dbg!(entry.path_is_symlink()); dbg!(entry.path()); let meta = entry.metadata().unwrap(); dbg!(meta.is_file()); } let path = std::path::PathBuf::from("./a/b"); dbg!(path.metadata().unwrap().file_type()); dbg!(path.symlink_metadata().unwrap().file_type()); } ``` Current behaviour (glob cache-key, don't follow links): ``` [src/main.rs:9:9] &entry = DirEntry("./a/b") [src/main.rs:10:9] entry.file_type() = FileType { is_file: false, is_dir: false, is_symlink: true, .. } [src/main.rs:11:9] entry.path_is_symlink() = true [src/main.rs:12:9] entry.path() = "./a/b" [src/main.rs:14:9] meta.is_file() = false ``` Glob cache-key, follow links: ``` [src/main.rs:9:9] &entry = DirEntry("./a/b") [src/main.rs:10:9] entry.file_type() = FileType { is_file: true, is_dir: false, is_symlink: false, .. } [src/main.rs:11:9] entry.path_is_symlink() = true [src/main.rs:12:9] entry.path() = "./a/b" [src/main.rs:14:9] meta.is_file() = true ``` Using `path.metadata()` for a non-glob cache key: ``` [src/main.rs:18:5] path.metadata().unwrap().file_type() = FileType { is_file: true, is_dir: false, is_symlink: false, .. } [src/main.rs:19:5] path.symlink_metadata().unwrap().file_type() = FileType { is_file: false, is_dir: false, is_symlink: true, .. } ``` --- Cargo.lock | 2 + crates/uv-cache-info/Cargo.toml | 4 + crates/uv-cache-info/src/cache_info.rs | 100 ++++++++++++++++++++++--- 3 files changed, 97 insertions(+), 9 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index f2bebefc9..0069cbb65 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4907,10 +4907,12 @@ dependencies = [ name = "uv-cache-info" version = "0.0.1" dependencies = [ + "anyhow", "fs-err 3.1.1", "globwalk", "schemars", "serde", + "tempfile", "thiserror 2.0.12", "toml", "tracing", diff --git a/crates/uv-cache-info/Cargo.toml b/crates/uv-cache-info/Cargo.toml index 6b10bbebe..83df384be 100644 --- a/crates/uv-cache-info/Cargo.toml +++ b/crates/uv-cache-info/Cargo.toml @@ -24,3 +24,7 @@ thiserror = { workspace = true } toml = { workspace = true } tracing = { workspace = true } walkdir = { workspace = true } + +[dev-dependencies] +anyhow = { workspace = true } +tempfile = { workspace = true } diff --git a/crates/uv-cache-info/src/cache_info.rs b/crates/uv-cache-info/src/cache_info.rs index 27a98ab54..d2f836c84 100644 --- a/crates/uv-cache-info/src/cache_info.rs +++ b/crates/uv-cache-info/src/cache_info.rs @@ -230,18 +230,32 @@ impl CacheInfo { continue; } }; - let metadata = match entry.metadata() { - Ok(metadata) => metadata, - Err(err) => { - warn!("Failed to read metadata for glob entry: {err}"); - continue; + let metadata = if entry.path_is_symlink() { + // resolve symlinks for leaf entries without following symlinks while globbing + match fs_err::metadata(entry.path()) { + Ok(metadata) => metadata, + Err(err) => { + warn!("Failed to resolve symlink for glob entry: {err}"); + continue; + } + } + } else { + match entry.metadata() { + Ok(metadata) => metadata, + Err(err) => { + warn!("Failed to read metadata for glob entry: {err}"); + continue; + } } }; if !metadata.is_file() { - warn!( - "Expected file for cache key, but found directory: `{}`", - entry.path().display() - ); + if !entry.path_is_symlink() { + // don't warn if it was a symlink - it may legitimately resolve to a directory + warn!( + "Expected file for cache key, but found directory: `{}`", + entry.path().display() + ); + } continue; } timestamp = max(timestamp, Some(Timestamp::from_metadata(&metadata))); @@ -346,3 +360,71 @@ enum DirectoryTimestamp { Timestamp(Timestamp), Inode(u64), } + +#[cfg(all(test, unix))] +mod tests_unix { + use anyhow::Result; + + use super::{CacheInfo, Timestamp}; + + #[test] + fn test_cache_info_symlink_resolve() -> Result<()> { + let dir = tempfile::tempdir()?; + let dir = dir.path().join("dir"); + fs_err::create_dir_all(&dir)?; + + let write_manifest = |cache_key: &str| { + fs_err::write( + dir.join("pyproject.toml"), + format!( + r#" + [tool.uv] + cache-keys = [ + "{cache_key}" + ] + "# + ), + ) + }; + + let touch = |path: &str| -> Result<_> { + let path = dir.join(path); + fs_err::create_dir_all(path.parent().unwrap())?; + fs_err::write(&path, "")?; + Ok(Timestamp::from_metadata(&path.metadata()?)) + }; + + let cache_timestamp = || -> Result<_> { Ok(CacheInfo::from_directory(&dir)?.timestamp) }; + + write_manifest("x/**")?; + assert_eq!(cache_timestamp()?, None); + let y = touch("x/y")?; + assert_eq!(cache_timestamp()?, Some(y)); + let z = touch("x/z")?; + assert_eq!(cache_timestamp()?, Some(z)); + + // leaf entry symlink should be resolved + let a = touch("../a")?; + fs_err::os::unix::fs::symlink(dir.join("../a"), dir.join("x/a"))?; + assert_eq!(cache_timestamp()?, Some(a)); + + // symlink directories should not be followed while globbing + let c = touch("../b/c")?; + fs_err::os::unix::fs::symlink(dir.join("../b"), dir.join("x/b"))?; + assert_eq!(cache_timestamp()?, Some(a)); + + // no globs, should work as expected + write_manifest("x/y")?; + assert_eq!(cache_timestamp()?, Some(y)); + write_manifest("x/a")?; + assert_eq!(cache_timestamp()?, Some(a)); + write_manifest("x/b/c")?; + assert_eq!(cache_timestamp()?, Some(c)); + + // symlink pointing to a directory + write_manifest("x/*b*")?; + assert_eq!(cache_timestamp()?, None); + + Ok(()) + } +} From 77c771c7f33df1040c854aeea462c18b74d39987 Mon Sep 17 00:00:00 2001 From: Geoffrey Thomas Date: Mon, 14 Jul 2025 14:01:28 -0400 Subject: [PATCH 029/130] Bump version to 0.7.21 (#14611) --- CHANGELOG.md | 35 +++++++++++++++++++++++++++ Cargo.lock | 6 ++--- crates/uv-build/Cargo.toml | 2 +- crates/uv-build/pyproject.toml | 2 +- crates/uv-version/Cargo.toml | 2 +- crates/uv/Cargo.toml | 2 +- docs/concepts/build-backend.md | 2 +- docs/getting-started/installation.md | 4 +-- docs/guides/integration/aws-lambda.md | 4 +-- docs/guides/integration/docker.md | 10 ++++---- docs/guides/integration/github.md | 2 +- docs/guides/integration/pre-commit.md | 10 ++++---- pyproject.toml | 2 +- 13 files changed, 59 insertions(+), 24 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 9fa93fb03..0c7d62c75 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -3,6 +3,41 @@ +## 0.7.21 + +### Python + +- Restore the SQLite `fts4`, `fts5`, `rtree`, and `geopoly` extensions on macOS and Linux + +See the +[`python-build-standalone` release notes](https://github.com/astral-sh/python-build-standalone/releases/tag/20250712) +for more details. + +### Enhancements + +- Add `--python-platform` to `uv sync` ([#14320](https://github.com/astral-sh/uv/pull/14320)) +- Support pre-releases in `uv version --bump` ([#13578](https://github.com/astral-sh/uv/pull/13578)) +- Add `-w` shorthand for `--with` ([#14530](https://github.com/astral-sh/uv/pull/14530)) +- Add an exception handler on Windows to display information on crash ([#14582](https://github.com/astral-sh/uv/pull/14582)) +- Add hint when Python downloads are disabled ([#14522](https://github.com/astral-sh/uv/pull/14522)) +- Add `UV_HTTP_RETRIES` to customize retry counts ([#14544](https://github.com/astral-sh/uv/pull/14544)) + +### Preview features + +- Add `uv sync --output-format json` ([#13689](https://github.com/astral-sh/uv/pull/13689)) + +### Bug fixes + +- Do not re-resolve with a new Python version in `uv tool` if it is incompatible with `--python` ([#14606](https://github.com/astral-sh/uv/pull/14606)) +- Fix handling of globs in `cache-key`: follow symlinks ([#13438](https://github.com/astral-sh/uv/pull/13438)) and `..` and improve performance ([#13469](https://github.com/astral-sh/uv/pull/13469)) + +### Documentation + +- Document how to nest dependency groups with `include-group` ([#14539](https://github.com/astral-sh/uv/pull/14539)) +- Fix repeated word in Pyodide doc ([#14554](https://github.com/astral-sh/uv/pull/14554)) +- Update CONTRIBUTING.md with instructions to format markdown files ([#14246](https://github.com/astral-sh/uv/pull/14246)) +- Fix version number for `setup-python` ([#14533](https://github.com/astral-sh/uv/pull/14533)) + ## 0.7.20 ### Python diff --git a/Cargo.lock b/Cargo.lock index 0069cbb65..c43f4872d 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4633,7 +4633,7 @@ dependencies = [ [[package]] name = "uv" -version = "0.7.20" +version = "0.7.21" dependencies = [ "anstream", "anyhow", @@ -4798,7 +4798,7 @@ dependencies = [ [[package]] name = "uv-build" -version = "0.7.20" +version = "0.7.21" dependencies = [ "anyhow", "uv-build-backend", @@ -5991,7 +5991,7 @@ dependencies = [ [[package]] name = "uv-version" -version = "0.7.20" +version = "0.7.21" [[package]] name = "uv-virtualenv" diff --git a/crates/uv-build/Cargo.toml b/crates/uv-build/Cargo.toml index ffbea0ea9..f943010ae 100644 --- a/crates/uv-build/Cargo.toml +++ b/crates/uv-build/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "uv-build" -version = "0.7.20" +version = "0.7.21" edition.workspace = true rust-version.workspace = true homepage.workspace = true diff --git a/crates/uv-build/pyproject.toml b/crates/uv-build/pyproject.toml index 13c21edd8..5a2209155 100644 --- a/crates/uv-build/pyproject.toml +++ b/crates/uv-build/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "uv-build" -version = "0.7.20" +version = "0.7.21" description = "The uv build backend" authors = [{ name = "Astral Software Inc.", email = "hey@astral.sh" }] requires-python = ">=3.8" diff --git a/crates/uv-version/Cargo.toml b/crates/uv-version/Cargo.toml index f1b47dd1d..a9fe788a5 100644 --- a/crates/uv-version/Cargo.toml +++ b/crates/uv-version/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "uv-version" -version = "0.7.20" +version = "0.7.21" edition = { workspace = true } rust-version = { workspace = true } homepage = { workspace = true } diff --git a/crates/uv/Cargo.toml b/crates/uv/Cargo.toml index 904cc8fc3..fe2f2200c 100644 --- a/crates/uv/Cargo.toml +++ b/crates/uv/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "uv" -version = "0.7.20" +version = "0.7.21" edition = { workspace = true } rust-version = { workspace = true } homepage = { workspace = true } diff --git a/docs/concepts/build-backend.md b/docs/concepts/build-backend.md index a34bc7658..69694f317 100644 --- a/docs/concepts/build-backend.md +++ b/docs/concepts/build-backend.md @@ -36,7 +36,7 @@ To use uv as a build backend in an existing project, add `uv_build` to the ```toml title="pyproject.toml" [build-system] -requires = ["uv_build>=0.7.20,<0.8.0"] +requires = ["uv_build>=0.7.21,<0.8.0"] build-backend = "uv_build" ``` diff --git a/docs/getting-started/installation.md b/docs/getting-started/installation.md index 12895b56e..fa68d210a 100644 --- a/docs/getting-started/installation.md +++ b/docs/getting-started/installation.md @@ -25,7 +25,7 @@ uv provides a standalone installer to download and install uv: Request a specific version by including it in the URL: ```console - $ curl -LsSf https://astral.sh/uv/0.7.20/install.sh | sh + $ curl -LsSf https://astral.sh/uv/0.7.21/install.sh | sh ``` === "Windows" @@ -41,7 +41,7 @@ uv provides a standalone installer to download and install uv: Request a specific version by including it in the URL: ```pwsh-session - PS> powershell -ExecutionPolicy ByPass -c "irm https://astral.sh/uv/0.7.20/install.ps1 | iex" + PS> powershell -ExecutionPolicy ByPass -c "irm https://astral.sh/uv/0.7.21/install.ps1 | iex" ``` !!! tip diff --git a/docs/guides/integration/aws-lambda.md b/docs/guides/integration/aws-lambda.md index 1e6c7c47a..4cdb75b7a 100644 --- a/docs/guides/integration/aws-lambda.md +++ b/docs/guides/integration/aws-lambda.md @@ -92,7 +92,7 @@ the second stage, we'll copy this directory over to the final image, omitting th other unnecessary files. ```dockerfile title="Dockerfile" -FROM ghcr.io/astral-sh/uv:0.7.20 AS uv +FROM ghcr.io/astral-sh/uv:0.7.21 AS uv # First, bundle the dependencies into the task root. FROM public.ecr.aws/lambda/python:3.13 AS builder @@ -334,7 +334,7 @@ And confirm that opening http://127.0.0.1:8000/ in a web browser displays, "Hell Finally, we'll update the Dockerfile to include the local library in the deployment package: ```dockerfile title="Dockerfile" -FROM ghcr.io/astral-sh/uv:0.7.20 AS uv +FROM ghcr.io/astral-sh/uv:0.7.21 AS uv # First, bundle the dependencies into the task root. FROM public.ecr.aws/lambda/python:3.13 AS builder diff --git a/docs/guides/integration/docker.md b/docs/guides/integration/docker.md index 0445b155c..bbea9b264 100644 --- a/docs/guides/integration/docker.md +++ b/docs/guides/integration/docker.md @@ -31,7 +31,7 @@ $ docker run --rm -it ghcr.io/astral-sh/uv:debian uv --help The following distroless images are available: - `ghcr.io/astral-sh/uv:latest` -- `ghcr.io/astral-sh/uv:{major}.{minor}.{patch}`, e.g., `ghcr.io/astral-sh/uv:0.7.20` +- `ghcr.io/astral-sh/uv:{major}.{minor}.{patch}`, e.g., `ghcr.io/astral-sh/uv:0.7.21` - `ghcr.io/astral-sh/uv:{major}.{minor}`, e.g., `ghcr.io/astral-sh/uv:0.7` (the latest patch version) @@ -75,7 +75,7 @@ And the following derived images are available: As with the distroless image, each derived image is published with uv version tags as `ghcr.io/astral-sh/uv:{major}.{minor}.{patch}-{base}` and -`ghcr.io/astral-sh/uv:{major}.{minor}-{base}`, e.g., `ghcr.io/astral-sh/uv:0.7.20-alpine`. +`ghcr.io/astral-sh/uv:{major}.{minor}-{base}`, e.g., `ghcr.io/astral-sh/uv:0.7.21-alpine`. For more details, see the [GitHub Container](https://github.com/astral-sh/uv/pkgs/container/uv) page. @@ -113,7 +113,7 @@ Note this requires `curl` to be available. In either case, it is best practice to pin to a specific uv version, e.g., with: ```dockerfile -COPY --from=ghcr.io/astral-sh/uv:0.7.20 /uv /uvx /bin/ +COPY --from=ghcr.io/astral-sh/uv:0.7.21 /uv /uvx /bin/ ``` !!! tip @@ -131,7 +131,7 @@ COPY --from=ghcr.io/astral-sh/uv:0.7.20 /uv /uvx /bin/ Or, with the installer: ```dockerfile -ADD https://astral.sh/uv/0.7.20/install.sh /uv-installer.sh +ADD https://astral.sh/uv/0.7.21/install.sh /uv-installer.sh ``` ### Installing a project @@ -557,5 +557,5 @@ Verified OK !!! tip These examples use `latest`, but best practice is to verify the attestation for a specific - version tag, e.g., `ghcr.io/astral-sh/uv:0.7.20`, or (even better) the specific image digest, + version tag, e.g., `ghcr.io/astral-sh/uv:0.7.21`, or (even better) the specific image digest, such as `ghcr.io/astral-sh/uv:0.5.27@sha256:5adf09a5a526f380237408032a9308000d14d5947eafa687ad6c6a2476787b4f`. diff --git a/docs/guides/integration/github.md b/docs/guides/integration/github.md index e7fea7b29..d206febd1 100644 --- a/docs/guides/integration/github.md +++ b/docs/guides/integration/github.md @@ -47,7 +47,7 @@ jobs: uses: astral-sh/setup-uv@v6 with: # Install a specific version of uv. - version: "0.7.20" + version: "0.7.21" ``` ## Setting up Python diff --git a/docs/guides/integration/pre-commit.md b/docs/guides/integration/pre-commit.md index 0495581c2..912ff0213 100644 --- a/docs/guides/integration/pre-commit.md +++ b/docs/guides/integration/pre-commit.md @@ -19,7 +19,7 @@ To make sure your `uv.lock` file is up to date even if your `pyproject.toml` fil repos: - repo: https://github.com/astral-sh/uv-pre-commit # uv version. - rev: 0.7.20 + rev: 0.7.21 hooks: - id: uv-lock ``` @@ -30,7 +30,7 @@ To keep a `requirements.txt` file in sync with your `uv.lock` file: repos: - repo: https://github.com/astral-sh/uv-pre-commit # uv version. - rev: 0.7.20 + rev: 0.7.21 hooks: - id: uv-export ``` @@ -41,7 +41,7 @@ To compile requirements files: repos: - repo: https://github.com/astral-sh/uv-pre-commit # uv version. - rev: 0.7.20 + rev: 0.7.21 hooks: # Compile requirements - id: pip-compile @@ -54,7 +54,7 @@ To compile alternative requirements files, modify `args` and `files`: repos: - repo: https://github.com/astral-sh/uv-pre-commit # uv version. - rev: 0.7.20 + rev: 0.7.21 hooks: # Compile requirements - id: pip-compile @@ -68,7 +68,7 @@ To run the hook over multiple files at the same time, add additional entries: repos: - repo: https://github.com/astral-sh/uv-pre-commit # uv version. - rev: 0.7.20 + rev: 0.7.21 hooks: # Compile requirements - id: pip-compile diff --git a/pyproject.toml b/pyproject.toml index df118d720..f3c9c4f64 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "maturin" [project] name = "uv" -version = "0.7.20" +version = "0.7.21" description = "An extremely fast Python package and project manager, written in Rust." authors = [{ name = "Astral Software Inc.", email = "hey@astral.sh" }] requires-python = ">=3.8" From b046e7f3be3b495f695a474ce0088f9b2280eb5c Mon Sep 17 00:00:00 2001 From: InSync Date: Tue, 15 Jul 2025 02:06:05 +0700 Subject: [PATCH 030/130] Add missing comma in `projects/dependencies.md` (#14613) ## Summary Diff: ```diff [dependency-groups] dev = [ - {include-group = "lint"} + {include-group = "lint"}, {include-group = "test"} ] ``` ## Test Plan None. --- docs/concepts/projects/dependencies.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/concepts/projects/dependencies.md b/docs/concepts/projects/dependencies.md index e5c64a3ee..2eabbf4dc 100644 --- a/docs/concepts/projects/dependencies.md +++ b/docs/concepts/projects/dependencies.md @@ -686,7 +686,7 @@ A dependency group can include other dependency groups, e.g.: ```toml title="pyproject.toml" [dependency-groups] dev = [ - {include-group = "lint"} + {include-group = "lint"}, {include-group = "test"} ] lint = [ From 9871bbdc7931c5ca664a798fec7df2da9b71e1ba Mon Sep 17 00:00:00 2001 From: Geoffrey Thomas Date: Mon, 14 Jul 2025 16:29:02 -0400 Subject: [PATCH 031/130] Fix 0.7.21 changelog (#14615) --- CHANGELOG.md | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 0c7d62c75..38be00d2d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -21,6 +21,9 @@ for more details. - Add an exception handler on Windows to display information on crash ([#14582](https://github.com/astral-sh/uv/pull/14582)) - Add hint when Python downloads are disabled ([#14522](https://github.com/astral-sh/uv/pull/14522)) - Add `UV_HTTP_RETRIES` to customize retry counts ([#14544](https://github.com/astral-sh/uv/pull/14544)) +- Follow leaf symlinks matched by globs in `cache-key` ([#13438](https://github.com/astral-sh/uv/pull/13438)) +- Support parent path components (`..`) in globs in `cache-key` ([#13469](https://github.com/astral-sh/uv/pull/13469)) +- Improve `cache-key` performance ([#13469](https://github.com/astral-sh/uv/pull/13469)) ### Preview features @@ -29,13 +32,12 @@ for more details. ### Bug fixes - Do not re-resolve with a new Python version in `uv tool` if it is incompatible with `--python` ([#14606](https://github.com/astral-sh/uv/pull/14606)) -- Fix handling of globs in `cache-key`: follow symlinks ([#13438](https://github.com/astral-sh/uv/pull/13438)) and `..` and improve performance ([#13469](https://github.com/astral-sh/uv/pull/13469)) ### Documentation - Document how to nest dependency groups with `include-group` ([#14539](https://github.com/astral-sh/uv/pull/14539)) - Fix repeated word in Pyodide doc ([#14554](https://github.com/astral-sh/uv/pull/14554)) -- Update CONTRIBUTING.md with instructions to format markdown files ([#14246](https://github.com/astral-sh/uv/pull/14246)) +- Update CONTRIBUTING.md with instructions to format Markdown files via Docker ([#14246](https://github.com/astral-sh/uv/pull/14246)) - Fix version number for `setup-python` ([#14533](https://github.com/astral-sh/uv/pull/14533)) ## 0.7.20 From 405ef66cef6cb67817d039277f05c924a5cff19e Mon Sep 17 00:00:00 2001 From: Charlie Marsh Date: Tue, 15 Jul 2025 10:00:04 -0400 Subject: [PATCH 032/130] Allow users to override index `cache-control` headers (#14620) ## Summary You can now override the cache control headers for the Simple API, file downloads, or both: ```toml [[tool.uv.index]] name = "example" url = "https://example.com/simple" cache-control = { api = "max-age=600", files = "max-age=365000000, immutable" } ``` Closes https://github.com/astral-sh/uv/issues/10444. --- crates/uv-client/src/cached_client.rs | 25 ++++-- crates/uv-client/src/error.rs | 3 + crates/uv-client/src/registry_client.rs | 58 +++++++++---- crates/uv-distribution-types/src/index.rs | 83 +++++++++++++++++++ crates/uv-distribution-types/src/index_url.rs | 80 ++++++++++++++++++ crates/uv/tests/it/show_settings.rs | 29 +++++++ docs/concepts/indexes.md | 37 +++++++++ uv.schema.json | 32 +++++++ 8 files changed, 323 insertions(+), 24 deletions(-) diff --git a/crates/uv-client/src/cached_client.rs b/crates/uv-client/src/cached_client.rs index ee3314d1c..f888ea5f1 100644 --- a/crates/uv-client/src/cached_client.rs +++ b/crates/uv-client/src/cached_client.rs @@ -196,16 +196,18 @@ impl + std::error::Error + 'static> From> for } #[derive(Debug, Clone, Copy)] -pub enum CacheControl { +pub enum CacheControl<'a> { /// Respect the `cache-control` header from the response. None, /// Apply `max-age=0, must-revalidate` to the request. MustRevalidate, /// Allow the client to return stale responses. AllowStale, + /// Override the cache control header with a custom value. + Override(&'a str), } -impl From for CacheControl { +impl From for CacheControl<'_> { fn from(value: Freshness) -> Self { match value { Freshness::Fresh => Self::None, @@ -259,7 +261,7 @@ impl CachedClient { &self, req: Request, cache_entry: &CacheEntry, - cache_control: CacheControl, + cache_control: CacheControl<'_>, response_callback: Callback, ) -> Result> { let payload = self @@ -292,7 +294,7 @@ impl CachedClient { &self, req: Request, cache_entry: &CacheEntry, - cache_control: CacheControl, + cache_control: CacheControl<'_>, response_callback: Callback, ) -> Result> { let fresh_req = req.try_clone().expect("HTTP request must be cloneable"); @@ -469,7 +471,7 @@ impl CachedClient { async fn send_cached( &self, mut req: Request, - cache_control: CacheControl, + cache_control: CacheControl<'_>, cached: DataWithCachePolicy, ) -> Result { // Apply the cache control header, if necessary. @@ -481,6 +483,13 @@ impl CachedClient { http::HeaderValue::from_static("no-cache"), ); } + CacheControl::Override(value) => { + req.headers_mut().insert( + http::header::CACHE_CONTROL, + http::HeaderValue::from_str(value) + .map_err(|_| ErrorKind::InvalidCacheControl(value.to_string()))?, + ); + } } Ok(match cached.cache_policy.before_request(&mut req) { BeforeRequest::Fresh => { @@ -488,7 +497,7 @@ impl CachedClient { CachedResponse::FreshCache(cached) } BeforeRequest::Stale(new_cache_policy_builder) => match cache_control { - CacheControl::None | CacheControl::MustRevalidate => { + CacheControl::None | CacheControl::MustRevalidate | CacheControl::Override(_) => { debug!("Found stale response for: {}", req.url()); self.send_cached_handle_stale(req, cached, new_cache_policy_builder) .await? @@ -599,7 +608,7 @@ impl CachedClient { &self, req: Request, cache_entry: &CacheEntry, - cache_control: CacheControl, + cache_control: CacheControl<'_>, response_callback: Callback, ) -> Result> { let payload = self @@ -623,7 +632,7 @@ impl CachedClient { &self, req: Request, cache_entry: &CacheEntry, - cache_control: CacheControl, + cache_control: CacheControl<'_>, response_callback: Callback, ) -> Result> { let mut past_retries = 0; diff --git a/crates/uv-client/src/error.rs b/crates/uv-client/src/error.rs index 754237fe2..035cdea71 100644 --- a/crates/uv-client/src/error.rs +++ b/crates/uv-client/src/error.rs @@ -259,6 +259,9 @@ pub enum ErrorKind { "Network connectivity is disabled, but the requested data wasn't found in the cache for: `{0}`" )] Offline(String), + + #[error("Invalid cache control header: `{0}`")] + InvalidCacheControl(String), } impl ErrorKind { diff --git a/crates/uv-client/src/registry_client.rs b/crates/uv-client/src/registry_client.rs index afa1b03ae..1d12c5adf 100644 --- a/crates/uv-client/src/registry_client.rs +++ b/crates/uv-client/src/registry_client.rs @@ -511,11 +511,17 @@ impl RegistryClient { format!("{package_name}.rkyv"), ); let cache_control = match self.connectivity { - Connectivity::Online => CacheControl::from( - self.cache - .freshness(&cache_entry, Some(package_name), None) - .map_err(ErrorKind::Io)?, - ), + Connectivity::Online => { + if let Some(header) = self.index_urls.simple_api_cache_control_for(index) { + CacheControl::Override(header) + } else { + CacheControl::from( + self.cache + .freshness(&cache_entry, Some(package_name), None) + .map_err(ErrorKind::Io)?, + ) + } + } Connectivity::Offline => CacheControl::AllowStale, }; @@ -571,7 +577,7 @@ impl RegistryClient { package_name: &PackageName, url: &DisplaySafeUrl, cache_entry: &CacheEntry, - cache_control: CacheControl, + cache_control: CacheControl<'_>, ) -> Result, Error> { let simple_request = self .uncached_client(url) @@ -783,11 +789,17 @@ impl RegistryClient { format!("{}.msgpack", filename.cache_key()), ); let cache_control = match self.connectivity { - Connectivity::Online => CacheControl::from( - self.cache - .freshness(&cache_entry, Some(&filename.name), None) - .map_err(ErrorKind::Io)?, - ), + Connectivity::Online => { + if let Some(header) = self.index_urls.artifact_cache_control_for(index) { + CacheControl::Override(header) + } else { + CacheControl::from( + self.cache + .freshness(&cache_entry, Some(&filename.name), None) + .map_err(ErrorKind::Io)?, + ) + } + } Connectivity::Offline => CacheControl::AllowStale, }; @@ -853,11 +865,25 @@ impl RegistryClient { format!("{}.msgpack", filename.cache_key()), ); let cache_control = match self.connectivity { - Connectivity::Online => CacheControl::from( - self.cache - .freshness(&cache_entry, Some(&filename.name), None) - .map_err(ErrorKind::Io)?, - ), + Connectivity::Online => { + if let Some(index) = index { + if let Some(header) = self.index_urls.artifact_cache_control_for(index) { + CacheControl::Override(header) + } else { + CacheControl::from( + self.cache + .freshness(&cache_entry, Some(&filename.name), None) + .map_err(ErrorKind::Io)?, + ) + } + } else { + CacheControl::from( + self.cache + .freshness(&cache_entry, Some(&filename.name), None) + .map_err(ErrorKind::Io)?, + ) + } + } Connectivity::Offline => CacheControl::AllowStale, }; diff --git a/crates/uv-distribution-types/src/index.rs b/crates/uv-distribution-types/src/index.rs index 8ac7c3cd4..04614a18e 100644 --- a/crates/uv-distribution-types/src/index.rs +++ b/crates/uv-distribution-types/src/index.rs @@ -6,11 +6,23 @@ use thiserror::Error; use uv_auth::{AuthPolicy, Credentials}; use uv_redacted::DisplaySafeUrl; +use uv_small_str::SmallString; use crate::index_name::{IndexName, IndexNameError}; use crate::origin::Origin; use crate::{IndexStatusCodeStrategy, IndexUrl, IndexUrlError, SerializableStatusCode}; +/// Cache control configuration for an index. +#[derive(Debug, Clone, Hash, Eq, PartialEq, Ord, PartialOrd, Serialize, Deserialize, Default)] +#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] +#[serde(rename_all = "kebab-case")] +pub struct IndexCacheControl { + /// Cache control header for Simple API requests. + pub api: Option, + /// Cache control header for file downloads. + pub files: Option, +} + #[derive(Debug, Clone, Hash, Eq, PartialEq, Ord, PartialOrd, Serialize, Deserialize)] #[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] #[serde(rename_all = "kebab-case")] @@ -104,6 +116,19 @@ pub struct Index { /// ``` #[serde(default)] pub ignore_error_codes: Option>, + /// Cache control configuration for this index. + /// + /// When set, these headers will override the server's cache control headers + /// for both package metadata requests and artifact downloads. + /// + /// ```toml + /// [[tool.uv.index]] + /// name = "my-index" + /// url = "https:///simple" + /// cache-control = { api = "max-age=600", files = "max-age=3600" } + /// ``` + #[serde(default)] + pub cache_control: Option, } #[derive( @@ -142,6 +167,7 @@ impl Index { publish_url: None, authenticate: AuthPolicy::default(), ignore_error_codes: None, + cache_control: None, } } @@ -157,6 +183,7 @@ impl Index { publish_url: None, authenticate: AuthPolicy::default(), ignore_error_codes: None, + cache_control: None, } } @@ -172,6 +199,7 @@ impl Index { publish_url: None, authenticate: AuthPolicy::default(), ignore_error_codes: None, + cache_control: None, } } @@ -250,6 +278,7 @@ impl From for Index { publish_url: None, authenticate: AuthPolicy::default(), ignore_error_codes: None, + cache_control: None, } } } @@ -273,6 +302,7 @@ impl FromStr for Index { publish_url: None, authenticate: AuthPolicy::default(), ignore_error_codes: None, + cache_control: None, }); } } @@ -289,6 +319,7 @@ impl FromStr for Index { publish_url: None, authenticate: AuthPolicy::default(), ignore_error_codes: None, + cache_control: None, }) } } @@ -384,3 +415,55 @@ pub enum IndexSourceError { #[error("Index included a name, but the name was empty")] EmptyName, } + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_index_cache_control_headers() { + // Test that cache control headers are properly parsed from TOML + let toml_str = r#" + name = "test-index" + url = "https://test.example.com/simple" + cache-control = { api = "max-age=600", files = "max-age=3600" } + "#; + + let index: Index = toml::from_str(toml_str).unwrap(); + assert_eq!(index.name.as_ref().unwrap().as_ref(), "test-index"); + assert!(index.cache_control.is_some()); + let cache_control = index.cache_control.as_ref().unwrap(); + assert_eq!(cache_control.api.as_deref(), Some("max-age=600")); + assert_eq!(cache_control.files.as_deref(), Some("max-age=3600")); + } + + #[test] + fn test_index_without_cache_control() { + // Test that indexes work without cache control headers + let toml_str = r#" + name = "test-index" + url = "https://test.example.com/simple" + "#; + + let index: Index = toml::from_str(toml_str).unwrap(); + assert_eq!(index.name.as_ref().unwrap().as_ref(), "test-index"); + assert_eq!(index.cache_control, None); + } + + #[test] + fn test_index_partial_cache_control() { + // Test that cache control can have just one field + let toml_str = r#" + name = "test-index" + url = "https://test.example.com/simple" + cache-control = { api = "max-age=300" } + "#; + + let index: Index = toml::from_str(toml_str).unwrap(); + assert_eq!(index.name.as_ref().unwrap().as_ref(), "test-index"); + assert!(index.cache_control.is_some()); + let cache_control = index.cache_control.as_ref().unwrap(); + assert_eq!(cache_control.api.as_deref(), Some("max-age=300")); + assert_eq!(cache_control.files, None); + } +} diff --git a/crates/uv-distribution-types/src/index_url.rs b/crates/uv-distribution-types/src/index_url.rs index 1c8cd0a76..bd3e9abc2 100644 --- a/crates/uv-distribution-types/src/index_url.rs +++ b/crates/uv-distribution-types/src/index_url.rs @@ -599,6 +599,26 @@ impl<'a> IndexUrls { } IndexStatusCodeStrategy::Default } + + /// Return the Simple API cache control header for an [`IndexUrl`], if configured. + pub fn simple_api_cache_control_for(&self, url: &IndexUrl) -> Option<&str> { + for index in &self.indexes { + if index.url() == url { + return index.cache_control.as_ref()?.api.as_deref(); + } + } + None + } + + /// Return the artifact cache control header for an [`IndexUrl`], if configured. + pub fn artifact_cache_control_for(&self, url: &IndexUrl) -> Option<&str> { + for index in &self.indexes { + if index.url() == url { + return index.cache_control.as_ref()?.files.as_deref(); + } + } + None + } } bitflags::bitflags! { @@ -717,4 +737,64 @@ mod tests { "git+https://github.com/example/repo.git" )); } + + #[test] + fn test_cache_control_lookup() { + use std::str::FromStr; + + use uv_small_str::SmallString; + + use crate::IndexFormat; + use crate::index_name::IndexName; + + let indexes = vec![ + Index { + name: Some(IndexName::from_str("index1").unwrap()), + url: IndexUrl::from_str("https://index1.example.com/simple").unwrap(), + cache_control: Some(crate::IndexCacheControl { + api: Some(SmallString::from("max-age=300")), + files: Some(SmallString::from("max-age=1800")), + }), + explicit: false, + default: false, + origin: None, + format: IndexFormat::Simple, + publish_url: None, + authenticate: uv_auth::AuthPolicy::default(), + ignore_error_codes: None, + }, + Index { + name: Some(IndexName::from_str("index2").unwrap()), + url: IndexUrl::from_str("https://index2.example.com/simple").unwrap(), + cache_control: None, + explicit: false, + default: false, + origin: None, + format: IndexFormat::Simple, + publish_url: None, + authenticate: uv_auth::AuthPolicy::default(), + ignore_error_codes: None, + }, + ]; + + let index_urls = IndexUrls::from_indexes(indexes); + + let url1 = IndexUrl::from_str("https://index1.example.com/simple").unwrap(); + assert_eq!( + index_urls.simple_api_cache_control_for(&url1), + Some("max-age=300") + ); + assert_eq!( + index_urls.artifact_cache_control_for(&url1), + Some("max-age=1800") + ); + + let url2 = IndexUrl::from_str("https://index2.example.com/simple").unwrap(); + assert_eq!(index_urls.simple_api_cache_control_for(&url2), None); + assert_eq!(index_urls.artifact_cache_control_for(&url2), None); + + let url3 = IndexUrl::from_str("https://index3.example.com/simple").unwrap(); + assert_eq!(index_urls.simple_api_cache_control_for(&url3), None); + assert_eq!(index_urls.artifact_cache_control_for(&url3), None); + } } diff --git a/crates/uv/tests/it/show_settings.rs b/crates/uv/tests/it/show_settings.rs index 7635bd523..2637af8ac 100644 --- a/crates/uv/tests/it/show_settings.rs +++ b/crates/uv/tests/it/show_settings.rs @@ -139,6 +139,7 @@ fn resolve_uv_toml() -> anyhow::Result<()> { publish_url: None, authenticate: Auto, ignore_error_codes: None, + cache_control: None, }, ], flat_index: [], @@ -320,6 +321,7 @@ fn resolve_uv_toml() -> anyhow::Result<()> { publish_url: None, authenticate: Auto, ignore_error_codes: None, + cache_control: None, }, ], flat_index: [], @@ -502,6 +504,7 @@ fn resolve_uv_toml() -> anyhow::Result<()> { publish_url: None, authenticate: Auto, ignore_error_codes: None, + cache_control: None, }, ], flat_index: [], @@ -716,6 +719,7 @@ fn resolve_pyproject_toml() -> anyhow::Result<()> { publish_url: None, authenticate: Auto, ignore_error_codes: None, + cache_control: None, }, ], flat_index: [], @@ -1059,6 +1063,7 @@ fn resolve_pyproject_toml() -> anyhow::Result<()> { publish_url: None, authenticate: Auto, ignore_error_codes: None, + cache_control: None, }, ], flat_index: [], @@ -1267,6 +1272,7 @@ fn resolve_index_url() -> anyhow::Result<()> { publish_url: None, authenticate: Auto, ignore_error_codes: None, + cache_control: None, }, Index { name: None, @@ -1299,6 +1305,7 @@ fn resolve_index_url() -> anyhow::Result<()> { publish_url: None, authenticate: Auto, ignore_error_codes: None, + cache_control: None, }, ], flat_index: [], @@ -1484,6 +1491,7 @@ fn resolve_index_url() -> anyhow::Result<()> { publish_url: None, authenticate: Auto, ignore_error_codes: None, + cache_control: None, }, Index { name: None, @@ -1516,6 +1524,7 @@ fn resolve_index_url() -> anyhow::Result<()> { publish_url: None, authenticate: Auto, ignore_error_codes: None, + cache_control: None, }, Index { name: None, @@ -1548,6 +1557,7 @@ fn resolve_index_url() -> anyhow::Result<()> { publish_url: None, authenticate: Auto, ignore_error_codes: None, + cache_control: None, }, ], flat_index: [], @@ -1755,6 +1765,7 @@ fn resolve_find_links() -> anyhow::Result<()> { publish_url: None, authenticate: Auto, ignore_error_codes: None, + cache_control: None, }, ], no_index: true, @@ -2124,6 +2135,7 @@ fn resolve_top_level() -> anyhow::Result<()> { publish_url: None, authenticate: Auto, ignore_error_codes: None, + cache_control: None, }, Index { name: None, @@ -2156,6 +2168,7 @@ fn resolve_top_level() -> anyhow::Result<()> { publish_url: None, authenticate: Auto, ignore_error_codes: None, + cache_control: None, }, ], flat_index: [], @@ -2337,6 +2350,7 @@ fn resolve_top_level() -> anyhow::Result<()> { publish_url: None, authenticate: Auto, ignore_error_codes: None, + cache_control: None, }, Index { name: None, @@ -2369,6 +2383,7 @@ fn resolve_top_level() -> anyhow::Result<()> { publish_url: None, authenticate: Auto, ignore_error_codes: None, + cache_control: None, }, ], flat_index: [], @@ -3564,6 +3579,7 @@ fn resolve_both() -> anyhow::Result<()> { publish_url: None, authenticate: Auto, ignore_error_codes: None, + cache_control: None, }, ], flat_index: [], @@ -3870,6 +3886,7 @@ fn resolve_config_file() -> anyhow::Result<()> { publish_url: None, authenticate: Auto, ignore_error_codes: None, + cache_control: None, }, ], flat_index: [], @@ -4658,6 +4675,7 @@ fn index_priority() -> anyhow::Result<()> { publish_url: None, authenticate: Auto, ignore_error_codes: None, + cache_control: None, }, Index { name: None, @@ -4690,6 +4708,7 @@ fn index_priority() -> anyhow::Result<()> { publish_url: None, authenticate: Auto, ignore_error_codes: None, + cache_control: None, }, ], flat_index: [], @@ -4873,6 +4892,7 @@ fn index_priority() -> anyhow::Result<()> { publish_url: None, authenticate: Auto, ignore_error_codes: None, + cache_control: None, }, Index { name: None, @@ -4905,6 +4925,7 @@ fn index_priority() -> anyhow::Result<()> { publish_url: None, authenticate: Auto, ignore_error_codes: None, + cache_control: None, }, ], flat_index: [], @@ -5094,6 +5115,7 @@ fn index_priority() -> anyhow::Result<()> { publish_url: None, authenticate: Auto, ignore_error_codes: None, + cache_control: None, }, Index { name: None, @@ -5126,6 +5148,7 @@ fn index_priority() -> anyhow::Result<()> { publish_url: None, authenticate: Auto, ignore_error_codes: None, + cache_control: None, }, ], flat_index: [], @@ -5310,6 +5333,7 @@ fn index_priority() -> anyhow::Result<()> { publish_url: None, authenticate: Auto, ignore_error_codes: None, + cache_control: None, }, Index { name: None, @@ -5342,6 +5366,7 @@ fn index_priority() -> anyhow::Result<()> { publish_url: None, authenticate: Auto, ignore_error_codes: None, + cache_control: None, }, ], flat_index: [], @@ -5533,6 +5558,7 @@ fn index_priority() -> anyhow::Result<()> { publish_url: None, authenticate: Auto, ignore_error_codes: None, + cache_control: None, }, Index { name: None, @@ -5565,6 +5591,7 @@ fn index_priority() -> anyhow::Result<()> { publish_url: None, authenticate: Auto, ignore_error_codes: None, + cache_control: None, }, ], flat_index: [], @@ -5749,6 +5776,7 @@ fn index_priority() -> anyhow::Result<()> { publish_url: None, authenticate: Auto, ignore_error_codes: None, + cache_control: None, }, Index { name: None, @@ -5781,6 +5809,7 @@ fn index_priority() -> anyhow::Result<()> { publish_url: None, authenticate: Auto, ignore_error_codes: None, + cache_control: None, }, ], flat_index: [], diff --git a/docs/concepts/indexes.md b/docs/concepts/indexes.md index 6c03bae66..5e6c3866c 100644 --- a/docs/concepts/indexes.md +++ b/docs/concepts/indexes.md @@ -244,6 +244,43 @@ authenticate = "never" When `authenticate` is set to `never`, uv will never search for credentials for the given index and will error if credentials are provided directly. +### Customizing cache control headers + +By default, uv will respect the cache control headers provided by the index. For example, PyPI +serves package metadata with a `max-age=600` header, thereby allowing uv to cache package metadata +for 10 minutes; and wheels and source distributions with a `max-age=365000000, immutable` header, +thereby allowing uv to cache artifacts indefinitely. + +To override the cache control headers for an index, use the `cache-control` setting: + +```toml +[[tool.uv.index]] +name = "example" +url = "https://example.com/simple" +cache-control = { api = "max-age=600", files = "max-age=365000000, immutable" } +``` + +The `cache-control` setting accepts an object with two optional keys: + +- `api`: Controls caching for Simple API requests (package metadata). +- `files`: Controls caching for artifact downloads (wheels and source distributions). + +The values for these keys are strings that follow the +[HTTP Cache-Control](https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Cache-Control) +syntax. For example, to force uv to always revalidate package metadata, set `api = "no-cache"`: + +```toml +[[tool.uv.index]] +name = "example" +url = "https://example.com/simple" +cache-control = { api = "no-cache" } +``` + +This setting is most commonly used to override the default cache control headers for private indexes +that otherwise disable caching, often unintentionally. We typically recommend following PyPI's +approach to caching headers, i.e., setting `api = "max-age=600"` and +`files = "max-age=365000000, immutable"`. + ## "Flat" indexes By default, `[[tool.uv.index]]` entries are assumed to be PyPI-style registries that implement the diff --git a/uv.schema.json b/uv.schema.json index 4190672e9..e418f37f0 100644 --- a/uv.schema.json +++ b/uv.schema.json @@ -907,6 +907,18 @@ ], "default": "auto" }, + "cache-control": { + "description": "Cache control configuration for this index.\n\nWhen set, these headers will override the server's cache control headers\nfor both package metadata requests and artifact downloads.\n\n```toml\n[[tool.uv.index]]\nname = \"my-index\"\nurl = \"https:///simple\"\ncache-control = { api = \"max-age=600\", files = \"max-age=3600\" }\n```", + "anyOf": [ + { + "$ref": "#/definitions/IndexCacheControl" + }, + { + "type": "null" + } + ], + "default": null + }, "default": { "description": "Mark the index as the default index.\n\nBy default, uv uses PyPI as the default index, such that even if additional indexes are\ndefined via `[[tool.uv.index]]`, PyPI will still be used as a fallback for packages that\naren't found elsewhere. To disable the PyPI default, set `default = true` on at least one\nother index.\n\nMarking an index as default will move it to the front of the list of indexes, such that it\nis given the highest priority when resolving packages.", "type": "boolean", @@ -972,6 +984,26 @@ "url" ] }, + "IndexCacheControl": { + "description": "Cache control configuration for an index.", + "type": "object", + "properties": { + "api": { + "description": "Cache control header for Simple API requests.", + "type": [ + "string", + "null" + ] + }, + "files": { + "description": "Cache control header for file downloads.", + "type": [ + "string", + "null" + ] + } + } + }, "IndexFormat": { "oneOf": [ { From cd0d5d4748af11007e05718005437c0780d09048 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alex=20Preng=C3=A8re?= <2138730+alexprengere@users.noreply.github.com> Date: Tue, 15 Jul 2025 19:03:01 +0200 Subject: [PATCH 033/130] Fix --all-arches when paired with --only-downloads (#14629) ## Summary On current main, and on the latest released version 0.7.21, I have: ``` $ uv python list --only-downloads --all-arches cpython-3.14.0b4-linux-x86_64-gnu cpython-3.14.0b4+freethreaded-linux-x86_64-gnu cpython-3.13.5-linux-x86_64-gnu cpython-3.13.5+freethreaded-linux-x86_64-gnu cpython-3.12.11-linux-x86_64-gnu cpython-3.11.13-linux-x86_64-gnu cpython-3.10.18-linux-x86_64-gnu cpython-3.9.23-linux-x86_64-gnu cpython-3.8.20-linux-x86_64-gnu pypy-3.11.13-linux-x86_64-gnu pypy-3.10.16-linux-x86_64-gnu pypy-3.9.19-linux-x86_64-gnu pypy-3.8.16-linux-x86_64-gnu graalpy-3.11.0-linux-x86_64-gnu graalpy-3.10.0-linux-x86_64-gnu graalpy-3.8.5-linux-x86_64-gnu ``` As you can see, `--all-arches` is not respected here. ## Test Plan With the patch: ``` $ cargo run python list --only-downloads --all-arches cpython-3.14.0b4-linux-x86_64-gnu cpython-3.14.0b4+freethreaded-linux-x86_64-gnu cpython-3.14.0b4-linux-x86_64_v2-gnu cpython-3.14.0b4+freethreaded-linux-x86_64_v2-gnu cpython-3.14.0b4-linux-x86_64_v3-gnu cpython-3.14.0b4+freethreaded-linux-x86_64_v3-gnu cpython-3.14.0b4-linux-x86_64_v4-gnu cpython-3.14.0b4+freethreaded-linux-x86_64_v4-gnu cpython-3.14.0b4-linux-aarch64-gnu cpython-3.14.0b4+freethreaded-linux-aarch64-gnu cpython-3.14.0b4-linux-powerpc64le-gnu cpython-3.14.0b4+freethreaded-linux-powerpc64le-gnu cpython-3.14.0b4-linux-riscv64gc-gnu cpython-3.14.0b4+freethreaded-linux-riscv64gc-gnu cpython-3.14.0b4-linux-s390x-gnu cpython-3.14.0b4+freethreaded-linux-s390x-gnu cpython-3.13.5-linux-x86_64-gnu cpython-3.13.5+freethreaded-linux-x86_64-gnu cpython-3.13.5-linux-x86_64_v2-gnu cpython-3.13.5+freethreaded-linux-x86_64_v2-gnu cpython-3.13.5-linux-x86_64_v3-gnu cpython-3.13.5+freethreaded-linux-x86_64_v3-gnu cpython-3.13.5-linux-x86_64_v4-gnu cpython-3.13.5+freethreaded-linux-x86_64_v4-gnu cpython-3.13.5-linux-aarch64-gnu cpython-3.13.5+freethreaded-linux-aarch64-gnu cpython-3.13.5-linux-powerpc64le-gnu cpython-3.13.5+freethreaded-linux-powerpc64le-gnu cpython-3.13.5-linux-riscv64gc-gnu cpython-3.13.5+freethreaded-linux-riscv64gc-gnu cpython-3.13.5-linux-s390x-gnu cpython-3.13.5+freethreaded-linux-s390x-gnu cpython-3.12.11-linux-x86_64-gnu cpython-3.12.11-linux-x86_64_v2-gnu cpython-3.12.11-linux-x86_64_v3-gnu cpython-3.12.11-linux-x86_64_v4-gnu cpython-3.12.11-linux-aarch64-gnu cpython-3.12.11-linux-powerpc64le-gnu cpython-3.12.11-linux-riscv64gc-gnu cpython-3.12.11-linux-s390x-gnu cpython-3.11.13-linux-x86_64-gnu cpython-3.11.13-linux-x86_64_v2-gnu cpython-3.11.13-linux-x86_64_v3-gnu cpython-3.11.13-linux-x86_64_v4-gnu cpython-3.11.13-linux-aarch64-gnu cpython-3.11.13-linux-powerpc64le-gnu cpython-3.11.13-linux-riscv64gc-gnu cpython-3.11.13-linux-s390x-gnu cpython-3.11.5-linux-x86-gnu cpython-3.10.18-linux-x86_64-gnu cpython-3.10.18-linux-x86_64_v2-gnu cpython-3.10.18-linux-x86_64_v3-gnu cpython-3.10.18-linux-x86_64_v4-gnu cpython-3.10.18-linux-aarch64-gnu cpython-3.10.18-linux-powerpc64le-gnu cpython-3.10.18-linux-riscv64gc-gnu cpython-3.10.18-linux-s390x-gnu cpython-3.10.13-linux-x86-gnu cpython-3.9.23-linux-x86_64-gnu cpython-3.9.23-linux-x86_64_v2-gnu cpython-3.9.23-linux-x86_64_v3-gnu cpython-3.9.23-linux-x86_64_v4-gnu cpython-3.9.23-linux-aarch64-gnu cpython-3.9.23-linux-powerpc64le-gnu cpython-3.9.23-linux-riscv64gc-gnu cpython-3.9.23-linux-s390x-gnu cpython-3.9.18-linux-x86-gnu cpython-3.8.20-linux-x86_64-gnu cpython-3.8.20-linux-aarch64-gnu cpython-3.8.17-linux-x86-gnu pypy-3.11.13-linux-x86_64-gnu pypy-3.11.13-linux-aarch64-gnu pypy-3.11.13-linux-x86-gnu pypy-3.10.16-linux-x86_64-gnu pypy-3.10.16-linux-aarch64-gnu pypy-3.10.16-linux-x86-gnu pypy-3.10.14-linux-s390x-gnu pypy-3.9.19-linux-x86_64-gnu pypy-3.9.19-linux-aarch64-gnu pypy-3.9.19-linux-x86-gnu pypy-3.9.19-linux-s390x-gnu pypy-3.8.16-linux-x86_64-gnu pypy-3.8.16-linux-aarch64-gnu pypy-3.8.16-linux-x86-gnu pypy-3.8.16-linux-s390x-gnu graalpy-3.11.0-linux-x86_64-gnu graalpy-3.11.0-linux-aarch64-gnu graalpy-3.10.0-linux-x86_64-gnu graalpy-3.10.0-linux-aarch64-gnu graalpy-3.8.5-linux-x86_64-gnu graalpy-3.8.5-linux-aarch64-gnu ``` --- crates/uv/src/commands/python/list.rs | 2 ++ 1 file changed, 2 insertions(+) diff --git a/crates/uv/src/commands/python/list.rs b/crates/uv/src/commands/python/list.rs index 2cd54747c..17528a11e 100644 --- a/crates/uv/src/commands/python/list.rs +++ b/crates/uv/src/commands/python/list.rs @@ -81,6 +81,8 @@ pub(crate) async fn list( PythonListKinds::Installed => None, PythonListKinds::Downloads => Some(if all_platforms { base_download_request + } else if all_arches { + base_download_request.fill_platform()?.with_any_arch() } else { base_download_request.fill_platform()? }), From bb1e9a247c5e488a712e8f1cc040f025f9751337 Mon Sep 17 00:00:00 2001 From: Zanie Blue Date: Tue, 15 Jul 2025 12:12:36 -0500 Subject: [PATCH 034/130] Update preview installation of Python executables to be non-fatal (#14612) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Previously, if installation of executables into the bin directory failed we'd with a non-zero code. However, if we make this behavior the default we don't want it to be fatal. There's a `--bin` opt-in to _require_ successful executable installation and a `--no-bin` opt-out to silence the warning / opt-out of installation entirely. Part of https://github.com/astral-sh/uv/issues/14296 — we need this before we can stabilize the behavior. In #14614 we do the same for writing entries to the Windows registry. --- crates/uv-cli/src/lib.rs | 15 ++- crates/uv-python/src/windows_registry.rs | 7 +- crates/uv/src/commands/python/install.rs | 145 +++++++++++++++++------ crates/uv/src/lib.rs | 2 + crates/uv/src/settings.rs | 7 ++ crates/uv/tests/it/help.rs | 5 + crates/uv/tests/it/python_install.rs | 68 ++++++++++- docs/reference/cli.md | 3 +- 8 files changed, 212 insertions(+), 40 deletions(-) diff --git a/crates/uv-cli/src/lib.rs b/crates/uv-cli/src/lib.rs index 0f3652341..70d5322d9 100644 --- a/crates/uv-cli/src/lib.rs +++ b/crates/uv-cli/src/lib.rs @@ -4941,6 +4941,19 @@ pub struct PythonInstallArgs { #[arg(long, short, env = EnvVars::UV_PYTHON_INSTALL_DIR)] pub install_dir: Option, + /// Install a Python executable into the `bin` directory. + /// + /// This is the default behavior. If this flag is provided explicitly, uv will error if the + /// executable cannot be installed. + /// + /// See `UV_PYTHON_BIN_DIR` to customize the target directory. + #[arg(long, overrides_with("no_bin"), hide = true)] + pub bin: bool, + + /// Do not install a Python executable into the `bin` directory. + #[arg(long, overrides_with("bin"), conflicts_with("default"))] + pub no_bin: bool, + /// The Python version(s) to install. /// /// If not provided, the requested Python version(s) will be read from the `UV_PYTHON` @@ -5003,7 +5016,7 @@ pub struct PythonInstallArgs { /// and `python`. /// /// If multiple Python versions are requested, uv will exit with an error. - #[arg(long)] + #[arg(long, conflicts_with("no_bin"))] pub default: bool, } diff --git a/crates/uv-python/src/windows_registry.rs b/crates/uv-python/src/windows_registry.rs index 69e179bbf..7c6f6f307 100644 --- a/crates/uv-python/src/windows_registry.rs +++ b/crates/uv-python/src/windows_registry.rs @@ -129,12 +129,13 @@ fn read_registry_entry(company: &str, tag: &str, tag_key: &Key) -> Option, ) -> Result<(), ManagedPep514Error> { let pointer_width = match installation.key().arch().family().pointer_width() { Ok(PointerWidth::U32) => 32, @@ -146,9 +147,7 @@ pub fn create_registry_entry( } }; - if let Err(err) = write_registry_entry(installation, pointer_width) { - errors.push((installation.key().clone(), err.into())); - } + write_registry_entry(installation, pointer_width)?; Ok(()) } diff --git a/crates/uv/src/commands/python/install.rs b/crates/uv/src/commands/python/install.rs index 8c8387d07..b22d6010e 100644 --- a/crates/uv/src/commands/python/install.rs +++ b/crates/uv/src/commands/python/install.rs @@ -135,6 +135,14 @@ impl Changelog { } } +#[derive(Debug, Clone, Copy)] +enum InstallErrorKind { + DownloadUnpack, + Bin, + #[cfg(windows)] + Registry, +} + /// Download and install Python versions. #[allow(clippy::fn_params_excessive_bools)] pub(crate) async fn install( @@ -143,6 +151,7 @@ pub(crate) async fn install( targets: Vec, reinstall: bool, upgrade: bool, + bin: Option, force: bool, python_install_mirror: Option, pypy_install_mirror: Option, @@ -432,12 +441,16 @@ pub(crate) async fn install( downloaded.push(installation.clone()); } Err(err) => { - errors.push((download.key().clone(), anyhow::Error::new(err))); + errors.push(( + InstallErrorKind::DownloadUnpack, + download.key().clone(), + anyhow::Error::new(err), + )); } } } - let bin = if preview.is_enabled() { + let bin_dir = if matches!(bin, Some(true)) || preview.is_enabled() { Some(python_executable_dir()?) } else { None @@ -460,7 +473,7 @@ pub(crate) async fn install( continue; } - let bin = bin + let bin_dir = bin_dir .as_ref() .expect("We should have a bin directory with preview enabled") .as_path(); @@ -468,27 +481,38 @@ pub(crate) async fn install( let upgradeable = (default || is_default_install) || requested_minor_versions.contains(&installation.key().version().python_version()); - create_bin_links( - installation, - bin, - reinstall, - force, - default, - upgradeable, - upgrade, - is_default_install, - first_request, - &existing_installations, - &installations, - &mut changelog, - &mut errors, - preview, - )?; + if !matches!(bin, Some(false)) { + create_bin_links( + installation, + bin_dir, + reinstall, + force, + default, + upgradeable, + upgrade, + is_default_install, + first_request, + &existing_installations, + &installations, + &mut changelog, + &mut errors, + preview, + ); + } if preview.is_enabled() { #[cfg(windows)] { - uv_python::windows_registry::create_registry_entry(installation, &mut errors)?; + match uv_python::windows_registry::create_registry_entry(installation) { + Ok(()) => {} + Err(err) => { + errors.push(( + InstallErrorKind::Registry, + installation.key().clone(), + err.into(), + )); + } + } } } } @@ -636,24 +660,47 @@ pub(crate) async fn install( } } - if preview.is_enabled() { - let bin = bin + if preview.is_enabled() && !matches!(bin, Some(false)) { + let bin_dir = bin_dir .as_ref() .expect("We should have a bin directory with preview enabled") .as_path(); - warn_if_not_on_path(bin); + warn_if_not_on_path(bin_dir); } } if !errors.is_empty() { - for (key, err) in errors + // If there are only bin install errors and the user didn't opt-in, we're only going to warn + let fatal = errors + .iter() + .all(|(kind, _, _)| matches!(kind, InstallErrorKind::Bin)) + && bin.is_none(); + + for (kind, key, err) in errors .into_iter() - .sorted_unstable_by(|(key_a, _), (key_b, _)| key_a.cmp(key_b)) + .sorted_unstable_by(|(_, key_a, _), (_, key_b, _)| key_a.cmp(key_b)) { + let (level, verb) = match kind { + InstallErrorKind::DownloadUnpack => ("error".red().bold().to_string(), "install"), + InstallErrorKind::Bin => { + let level = match bin { + None => "warning".yellow().bold().to_string(), + Some(false) => continue, + Some(true) => "error".red().bold().to_string(), + }; + (level, "install executable for") + } + #[cfg(windows)] + InstallErrorKind::Registry => ( + "error".red().bold().to_string(), + "install registry entry for", + ), + }; + writeln!( printer.stderr(), - "{}: Failed to install {}", - "error".red().bold(), + "{level}{} Failed to {verb} {}", + ":".bold(), key.green() )?; for err in err.chain() { @@ -665,6 +712,11 @@ pub(crate) async fn install( )?; } } + + if fatal { + return Ok(ExitStatus::Success); + } + return Ok(ExitStatus::Failure); } @@ -672,6 +724,8 @@ pub(crate) async fn install( } /// Link the binaries of a managed Python installation to the bin directory. +/// +/// This function is fallible, but errors are pushed to `errors` instead of being thrown. #[allow(clippy::fn_params_excessive_bools)] fn create_bin_links( installation: &ManagedPythonInstallation, @@ -686,9 +740,9 @@ fn create_bin_links( existing_installations: &[ManagedPythonInstallation], installations: &[&ManagedPythonInstallation], changelog: &mut Changelog, - errors: &mut Vec<(PythonInstallationKey, Error)>, + errors: &mut Vec<(InstallErrorKind, PythonInstallationKey, Error)>, preview: PreviewMode, -) -> Result<(), Error> { +) { let targets = if (default || is_default_install) && first_request.matches_installation(installation) { vec![ @@ -773,6 +827,7 @@ fn create_bin_links( ); } else { errors.push(( + InstallErrorKind::Bin, installation.key().clone(), anyhow::anyhow!( "Executable already exists at `{}` but is not managed by uv; use `--force` to replace it", @@ -848,7 +903,17 @@ fn create_bin_links( } // Replace the existing link - fs_err::remove_file(&to)?; + if let Err(err) = fs_err::remove_file(&to) { + errors.push(( + InstallErrorKind::Bin, + installation.key().clone(), + anyhow::anyhow!( + "Executable already exists at `{}` but could not be removed: {err}", + to.simplified_display() + ), + )); + continue; + } if let Some(existing) = existing { // Ensure we do not report installation of this executable for an existing @@ -860,7 +925,18 @@ fn create_bin_links( .remove(&target); } - create_link_to_executable(&target, executable)?; + if let Err(err) = create_link_to_executable(&target, executable) { + errors.push(( + InstallErrorKind::Bin, + installation.key().clone(), + anyhow::anyhow!( + "Failed to create link at `{}`: {err}", + target.simplified_display() + ), + )); + continue; + } + debug!( "Updated executable at `{}` to {}", target.simplified_display(), @@ -874,11 +950,14 @@ fn create_bin_links( .insert(target.clone()); } Err(err) => { - errors.push((installation.key().clone(), anyhow::Error::new(err))); + errors.push(( + InstallErrorKind::Bin, + installation.key().clone(), + anyhow::Error::new(err), + )); } } } - Ok(()) } pub(crate) fn format_executables( diff --git a/crates/uv/src/lib.rs b/crates/uv/src/lib.rs index 0b4d0bb82..3a700b965 100644 --- a/crates/uv/src/lib.rs +++ b/crates/uv/src/lib.rs @@ -1402,6 +1402,7 @@ async fn run(mut cli: Cli) -> Result { args.targets, args.reinstall, upgrade, + args.bin, args.force, args.python_install_mirror, args.pypy_install_mirror, @@ -1430,6 +1431,7 @@ async fn run(mut cli: Cli) -> Result { args.targets, reinstall, upgrade, + args.bin, args.force, args.python_install_mirror, args.pypy_install_mirror, diff --git a/crates/uv/src/settings.rs b/crates/uv/src/settings.rs index 8a325d538..d373250ac 100644 --- a/crates/uv/src/settings.rs +++ b/crates/uv/src/settings.rs @@ -933,6 +933,7 @@ pub(crate) struct PythonInstallSettings { pub(crate) targets: Vec, pub(crate) reinstall: bool, pub(crate) force: bool, + pub(crate) bin: Option, pub(crate) python_install_mirror: Option, pub(crate) pypy_install_mirror: Option, pub(crate) python_downloads_json_url: Option, @@ -961,6 +962,8 @@ impl PythonInstallSettings { install_dir, targets, reinstall, + bin, + no_bin, force, mirror: _, pypy_mirror: _, @@ -973,6 +976,7 @@ impl PythonInstallSettings { targets, reinstall, force, + bin: flag(bin, no_bin, "bin"), python_install_mirror: python_mirror, pypy_install_mirror: pypy_mirror, python_downloads_json_url, @@ -992,6 +996,7 @@ pub(crate) struct PythonUpgradeSettings { pub(crate) pypy_install_mirror: Option, pub(crate) python_downloads_json_url: Option, pub(crate) default: bool, + pub(crate) bin: Option, } impl PythonUpgradeSettings { @@ -1013,6 +1018,7 @@ impl PythonUpgradeSettings { args.python_downloads_json_url.or(python_downloads_json_url); let force = false; let default = false; + let bin = None; let PythonUpgradeArgs { install_dir, @@ -1030,6 +1036,7 @@ impl PythonUpgradeSettings { pypy_install_mirror: pypy_mirror, python_downloads_json_url, default, + bin, } } } diff --git a/crates/uv/tests/it/help.rs b/crates/uv/tests/it/help.rs index 8faebd040..a6230108c 100644 --- a/crates/uv/tests/it/help.rs +++ b/crates/uv/tests/it/help.rs @@ -504,6 +504,9 @@ fn help_subsubcommand() { [env: UV_PYTHON_INSTALL_DIR=] + --no-bin + Do not install a Python executable into the `bin` directory + --mirror Set the URL to use as the source for downloading Python installations. @@ -790,6 +793,8 @@ fn help_flag_subsubcommand() { Options: -i, --install-dir The directory to store the Python installation in [env: UV_PYTHON_INSTALL_DIR=] + --no-bin + Do not install a Python executable into the `bin` directory --mirror Set the URL to use as the source for downloading Python installations [env: UV_PYTHON_INSTALL_MIRROR=] diff --git a/crates/uv/tests/it/python_install.rs b/crates/uv/tests/it/python_install.rs index bd723e5d1..0cb952054 100644 --- a/crates/uv/tests/it/python_install.rs +++ b/crates/uv/tests/it/python_install.rs @@ -430,15 +430,35 @@ fn python_install_preview() { bin_python.touch().unwrap(); uv_snapshot!(context.filters(), context.python_install().arg("--preview").arg("3.13"), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + warning: Failed to install executable for cpython-3.13.5-[PLATFORM] + Caused by: Executable already exists at `[BIN]/python3.13` but is not managed by uv; use `--force` to replace it + "); + + // With `--bin`, this should error instead of warn + uv_snapshot!(context.filters(), context.python_install().arg("--preview").arg("--bin").arg("3.13"), @r" success: false exit_code: 1 ----- stdout ----- ----- stderr ----- - error: Failed to install cpython-3.13.5-[PLATFORM] + error: Failed to install executable for cpython-3.13.5-[PLATFORM] Caused by: Executable already exists at `[BIN]/python3.13` but is not managed by uv; use `--force` to replace it "); + // With `--no-bin`, this should be silent + uv_snapshot!(context.filters(), context.python_install().arg("--preview").arg("--no-bin").arg("3.13"), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + "); + uv_snapshot!(context.filters(), context.python_install().arg("--preview").arg("--force").arg("3.13"), @r" success: true exit_code: 0 @@ -565,6 +585,52 @@ fn python_install_preview() { } } +#[test] +fn python_install_preview_no_bin() { + let context: TestContext = TestContext::new_with_versions(&[]) + .with_filtered_python_keys() + .with_filtered_exe_suffix() + .with_managed_python_dirs(); + + // Install the latest version + uv_snapshot!(context.filters(), context.python_install().arg("--preview").arg("--no-bin"), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Installed Python 3.13.5 in [TIME] + + cpython-3.13.5-[PLATFORM] + "); + + let bin_python = context + .bin_dir + .child(format!("python3.13{}", std::env::consts::EXE_SUFFIX)); + + // The executable should not be installed in the bin directory + bin_python.assert(predicate::path::missing()); + + uv_snapshot!(context.filters(), context.python_install().arg("--preview").arg("--no-bin").arg("--default"), @r" + success: false + exit_code: 2 + ----- stdout ----- + + ----- stderr ----- + error: the argument '--no-bin' cannot be used with '--default' + + Usage: uv python install --no-bin --install-dir [TARGETS]... + + For more information, try '--help'. + "); + + let bin_python = context + .bin_dir + .child(format!("python{}", std::env::consts::EXE_SUFFIX)); + + // The executable should not be installed in the bin directory + bin_python.assert(predicate::path::missing()); +} + #[test] fn python_install_preview_upgrade() { let context = TestContext::new_with_versions(&[]) diff --git a/docs/reference/cli.md b/docs/reference/cli.md index 13df63c19..93d928518 100644 --- a/docs/reference/cli.md +++ b/docs/reference/cli.md @@ -2795,7 +2795,8 @@ uv python install [OPTIONS] [TARGETS]...

    May also be set with the UV_PYTHON_INSTALL_MIRROR environment variable.

    --native-tls

    Whether to load TLS certificates from the platform's native certificate store.

    By default, uv loads certificates from the bundled webpki-roots crate. The webpki-roots are a reliable set of trust roots from Mozilla, and including them in uv improves portability and performance (especially on macOS).

    However, in some cases, you may want to use the platform's native certificate store, especially if you're relying on a corporate trust root (e.g., for a mandatory proxy) that's included in your system's certificate store.

    -

    May also be set with the UV_NATIVE_TLS environment variable.

    --no-cache, --no-cache-dir, -n

    Avoid reading from or writing to the cache, instead using a temporary directory for the duration of the operation

    +

    May also be set with the UV_NATIVE_TLS environment variable.

    --no-bin

    Do not install a Python executable into the bin directory

    +
    --no-cache, --no-cache-dir, -n

    Avoid reading from or writing to the cache, instead using a temporary directory for the duration of the operation

    May also be set with the UV_NO_CACHE environment variable.

    --no-config

    Avoid discovering configuration files (pyproject.toml, uv.toml).

    Normally, configuration files are discovered in the current directory, parent directories, or user configuration directories.

    May also be set with the UV_NO_CONFIG environment variable.

    --no-managed-python

    Disable use of uv-managed Python versions.

    From d2c81e503f19cf63bd335e8a08df1bf6b542dac0 Mon Sep 17 00:00:00 2001 From: Zanie Blue Date: Tue, 15 Jul 2025 12:29:11 -0500 Subject: [PATCH 035/130] Make preview Python registration on Windows non-fatal (#14614) Same as #14612 for registration with the Windows Registry. --- crates/uv-cli/src/lib.rs | 11 ++++++++ crates/uv/src/commands/python/install.rs | 32 ++++++++++++++---------- crates/uv/src/lib.rs | 2 ++ crates/uv/src/settings.rs | 7 ++++++ crates/uv/tests/it/help.rs | 5 ++++ docs/reference/cli.md | 1 + 6 files changed, 45 insertions(+), 13 deletions(-) diff --git a/crates/uv-cli/src/lib.rs b/crates/uv-cli/src/lib.rs index 70d5322d9..2efb30724 100644 --- a/crates/uv-cli/src/lib.rs +++ b/crates/uv-cli/src/lib.rs @@ -4954,6 +4954,17 @@ pub struct PythonInstallArgs { #[arg(long, overrides_with("bin"), conflicts_with("default"))] pub no_bin: bool, + /// Register the Python installation in the Windows registry. + /// + /// This is the default behavior on Windows. If this flag is provided explicitly, uv will error if the + /// registry entry cannot be created. + #[arg(long, overrides_with("no_registry"), hide = true)] + pub registry: bool, + + /// Do not register the Python installation in the Windows registry. + #[arg(long, overrides_with("registry"))] + pub no_registry: bool, + /// The Python version(s) to install. /// /// If not provided, the requested Python version(s) will be read from the `UV_PYTHON` diff --git a/crates/uv/src/commands/python/install.rs b/crates/uv/src/commands/python/install.rs index b22d6010e..bbab7cbb1 100644 --- a/crates/uv/src/commands/python/install.rs +++ b/crates/uv/src/commands/python/install.rs @@ -152,6 +152,7 @@ pub(crate) async fn install( reinstall: bool, upgrade: bool, bin: Option, + registry: Option, force: bool, python_install_mirror: Option, pypy_install_mirror: Option, @@ -500,7 +501,7 @@ pub(crate) async fn install( ); } - if preview.is_enabled() { + if preview.is_enabled() && !matches!(registry, Some(false)) { #[cfg(windows)] { match uv_python::windows_registry::create_registry_entry(installation) { @@ -670,11 +671,14 @@ pub(crate) async fn install( } if !errors.is_empty() { - // If there are only bin install errors and the user didn't opt-in, we're only going to warn - let fatal = errors - .iter() - .all(|(kind, _, _)| matches!(kind, InstallErrorKind::Bin)) - && bin.is_none(); + // If there are only side-effect install errors and the user didn't opt-in, we're only going + // to warn + let fatal = !errors.iter().all(|(kind, _, _)| match kind { + InstallErrorKind::Bin => bin.is_none(), + #[cfg(windows)] + InstallErrorKind::Registry => registry.is_none(), + InstallErrorKind::DownloadUnpack => false, + }); for (kind, key, err) in errors .into_iter() @@ -691,10 +695,14 @@ pub(crate) async fn install( (level, "install executable for") } #[cfg(windows)] - InstallErrorKind::Registry => ( - "error".red().bold().to_string(), - "install registry entry for", - ), + InstallErrorKind::Registry => { + let level = match registry { + None => "warning".yellow().bold().to_string(), + Some(false) => continue, + Some(true) => "error".red().bold().to_string(), + }; + (level, "install registry entry for") + } }; writeln!( @@ -714,10 +722,8 @@ pub(crate) async fn install( } if fatal { - return Ok(ExitStatus::Success); + return Ok(ExitStatus::Failure); } - - return Ok(ExitStatus::Failure); } Ok(ExitStatus::Success) diff --git a/crates/uv/src/lib.rs b/crates/uv/src/lib.rs index 3a700b965..e6fea035f 100644 --- a/crates/uv/src/lib.rs +++ b/crates/uv/src/lib.rs @@ -1403,6 +1403,7 @@ async fn run(mut cli: Cli) -> Result { args.reinstall, upgrade, args.bin, + args.registry, args.force, args.python_install_mirror, args.pypy_install_mirror, @@ -1432,6 +1433,7 @@ async fn run(mut cli: Cli) -> Result { reinstall, upgrade, args.bin, + args.registry, args.force, args.python_install_mirror, args.pypy_install_mirror, diff --git a/crates/uv/src/settings.rs b/crates/uv/src/settings.rs index d373250ac..b221f0f5d 100644 --- a/crates/uv/src/settings.rs +++ b/crates/uv/src/settings.rs @@ -934,6 +934,7 @@ pub(crate) struct PythonInstallSettings { pub(crate) reinstall: bool, pub(crate) force: bool, pub(crate) bin: Option, + pub(crate) registry: Option, pub(crate) python_install_mirror: Option, pub(crate) pypy_install_mirror: Option, pub(crate) python_downloads_json_url: Option, @@ -964,6 +965,8 @@ impl PythonInstallSettings { reinstall, bin, no_bin, + registry, + no_registry, force, mirror: _, pypy_mirror: _, @@ -977,6 +980,7 @@ impl PythonInstallSettings { reinstall, force, bin: flag(bin, no_bin, "bin"), + registry: flag(registry, no_registry, "registry"), python_install_mirror: python_mirror, pypy_install_mirror: pypy_mirror, python_downloads_json_url, @@ -992,6 +996,7 @@ pub(crate) struct PythonUpgradeSettings { pub(crate) install_dir: Option, pub(crate) targets: Vec, pub(crate) force: bool, + pub(crate) registry: Option, pub(crate) python_install_mirror: Option, pub(crate) pypy_install_mirror: Option, pub(crate) python_downloads_json_url: Option, @@ -1019,6 +1024,7 @@ impl PythonUpgradeSettings { let force = false; let default = false; let bin = None; + let registry = None; let PythonUpgradeArgs { install_dir, @@ -1032,6 +1038,7 @@ impl PythonUpgradeSettings { install_dir, targets, force, + registry, python_install_mirror: python_mirror, pypy_install_mirror: pypy_mirror, python_downloads_json_url, diff --git a/crates/uv/tests/it/help.rs b/crates/uv/tests/it/help.rs index a6230108c..a557b0eff 100644 --- a/crates/uv/tests/it/help.rs +++ b/crates/uv/tests/it/help.rs @@ -507,6 +507,9 @@ fn help_subsubcommand() { --no-bin Do not install a Python executable into the `bin` directory + --no-registry + Do not register the Python installation in the Windows registry + --mirror Set the URL to use as the source for downloading Python installations. @@ -795,6 +798,8 @@ fn help_flag_subsubcommand() { The directory to store the Python installation in [env: UV_PYTHON_INSTALL_DIR=] --no-bin Do not install a Python executable into the `bin` directory + --no-registry + Do not register the Python installation in the Windows registry --mirror Set the URL to use as the source for downloading Python installations [env: UV_PYTHON_INSTALL_MIRROR=] diff --git a/docs/reference/cli.md b/docs/reference/cli.md index 93d928518..f6bc028df 100644 --- a/docs/reference/cli.md +++ b/docs/reference/cli.md @@ -2804,6 +2804,7 @@ uv python install [OPTIONS] [TARGETS]...

    May also be set with the UV_NO_MANAGED_PYTHON environment variable.

    --no-progress

    Hide all progress outputs.

    For example, spinners or progress bars.

    May also be set with the UV_NO_PROGRESS environment variable.

    --no-python-downloads

    Disable automatic downloads of Python.

    +
    --no-registry

    Do not register the Python installation in the Windows registry

    --offline

    Disable network access.

    When disabled, uv will only use locally cached data and locally available files.

    May also be set with the UV_OFFLINE environment variable.

    --project project

    Run the command within the given project directory.

    From c226d66f35b17df57018015c5f9c21d446d51849 Mon Sep 17 00:00:00 2001 From: Zanie Blue Date: Tue, 15 Jul 2025 12:55:57 -0500 Subject: [PATCH 036/130] Rename "Dependency specifiers" section to exclude PEP 508 reference (#14631) --- docs/concepts/projects/dependencies.md | 8 ++++---- docs/js/extra.js | 2 ++ 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/docs/concepts/projects/dependencies.md b/docs/concepts/projects/dependencies.md index 2eabbf4dc..022db4d7e 100644 --- a/docs/concepts/projects/dependencies.md +++ b/docs/concepts/projects/dependencies.md @@ -808,12 +808,12 @@ Or, to opt-out of using an editable dependency in a workspace: $ uv add --no-editable ./path/foo ``` -## Dependency specifiers (PEP 508) +## Dependency specifiers -uv uses +uv uses standard [dependency specifiers](https://packaging.python.org/en/latest/specifications/dependency-specifiers/), -previously known as [PEP 508](https://peps.python.org/pep-0508/). A dependency specifier is composed -of, in order: +originally defined in [PEP 508](https://peps.python.org/pep-0508/). A dependency specifier is +composed of, in order: - The dependency name - The extras you want (optional) diff --git a/docs/js/extra.js b/docs/js/extra.js index bfb34c7fa..58a71e98d 100644 --- a/docs/js/extra.js +++ b/docs/js/extra.js @@ -78,6 +78,8 @@ document$.subscribe(function () { "concepts/projects/#building-projects": "concepts/projects/build/", "concepts/projects/#build-isolation": "concepts/projects/config/#build-isolation", + "concepts/projects/dependencies/#dependency-specifiers-pep-508": + "concepts/projects/dependencies/#dependency-specifiers", }; // The prefix for the site, see `site_dir` in `mkdocs.yml` From d5257202662773b6794b1b2de6c490dd1404d7b5 Mon Sep 17 00:00:00 2001 From: Zanie Blue Date: Tue, 15 Jul 2025 13:47:02 -0500 Subject: [PATCH 037/130] Add `uv python update-shell` (#14627) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Part of #14296 This is the same as `uv tool update-shell` but handles the case where the Python bin directory is configured to a different path. ``` ❯ UV_PYTHON_BIN_DIR=/tmp/foo cargo run -q -- python install --preview 3.13.3 Installed Python 3.13.3 in 1.75s + cpython-3.13.3-macos-aarch64-none warning: `/tmp/foo` is not on your PATH. To use installed Python executables, run `export PATH="/tmp/foo:$PATH"` or `uv python update-shell`. ❯ UV_PYTHON_BIN_DIR=/tmp/foo cargo run -q -- python update-shell Created configuration file: /Users/zb/.zshenv Restart your shell to apply changes ❯ cat /Users/zb/.zshenv # uv export PATH="/tmp/foo:$PATH" ❯ UV_TOOL_BIN_DIR=/tmp/bar cargo run -q -- tool update-shell Updated configuration file: /Users/zb/.zshenv Restart your shell to apply changes ❯ cat /Users/zb/.zshenv # uv export PATH="/tmp/foo:$PATH" # uv export PATH="/tmp/bar:$PATH" ``` --- crates/uv-cli/src/lib.rs | 13 ++ crates/uv/src/commands/mod.rs | 1 + crates/uv/src/commands/python/install.rs | 23 ++- crates/uv/src/commands/python/mod.rs | 1 + crates/uv/src/commands/python/update_shell.rs | 153 ++++++++++++++++++ crates/uv/src/lib.rs | 6 + crates/uv/tests/it/help.rs | 35 ++-- docs/reference/cli.md | 65 ++++++++ 8 files changed, 274 insertions(+), 23 deletions(-) create mode 100644 crates/uv/src/commands/python/update_shell.rs diff --git a/crates/uv-cli/src/lib.rs b/crates/uv-cli/src/lib.rs index 2efb30724..a846aec59 100644 --- a/crates/uv-cli/src/lib.rs +++ b/crates/uv-cli/src/lib.rs @@ -4856,6 +4856,19 @@ pub enum PythonCommand { /// Uninstall Python versions. Uninstall(PythonUninstallArgs), + + /// Ensure that the Python executable directory is on the `PATH`. + /// + /// If the Python executable directory is not present on the `PATH`, uv will attempt to add it to + /// the relevant shell configuration files. + /// + /// If the shell configuration files already include a blurb to add the executable directory to + /// the path, but the directory is not present on the `PATH`, uv will exit with an error. + /// + /// The Python executable directory is determined according to the XDG standard and can be + /// retrieved with `uv python dir --bin`. + #[command(alias = "ensurepath")] + UpdateShell, } #[derive(Args)] diff --git a/crates/uv/src/commands/mod.rs b/crates/uv/src/commands/mod.rs index d1e647363..405aad955 100644 --- a/crates/uv/src/commands/mod.rs +++ b/crates/uv/src/commands/mod.rs @@ -38,6 +38,7 @@ pub(crate) use python::install::install as python_install; pub(crate) use python::list::list as python_list; pub(crate) use python::pin::pin as python_pin; pub(crate) use python::uninstall::uninstall as python_uninstall; +pub(crate) use python::update_shell::update_shell as python_update_shell; #[cfg(feature = "self-update")] pub(crate) use self_update::self_update; pub(crate) use tool::dir::dir as tool_dir; diff --git a/crates/uv/src/commands/python/install.rs b/crates/uv/src/commands/python/install.rs index bbab7cbb1..feb0cf7c7 100644 --- a/crates/uv/src/commands/python/install.rs +++ b/crates/uv/src/commands/python/install.rs @@ -993,20 +993,29 @@ fn warn_if_not_on_path(bin: &Path) { if !Shell::contains_path(bin) { if let Some(shell) = Shell::from_env() { if let Some(command) = shell.prepend_path(bin) { - warn_user!( - "`{}` is not on your PATH. To use the installed Python executable, run `{}`.", - bin.simplified_display().cyan(), - command.green(), - ); + if shell.supports_update() { + warn_user!( + "`{}` is not on your PATH. To use installed Python executables, run `{}` or `{}`.", + bin.simplified_display().cyan(), + command.green(), + "uv python update-shell".green() + ); + } else { + warn_user!( + "`{}` is not on your PATH. To use installed Python executables, run `{}`.", + bin.simplified_display().cyan(), + command.green() + ); + } } else { warn_user!( - "`{}` is not on your PATH. To use the installed Python executable, add the directory to your PATH.", + "`{}` is not on your PATH. To use installed Python executables, add the directory to your PATH.", bin.simplified_display().cyan(), ); } } else { warn_user!( - "`{}` is not on your PATH. To use the installed Python executable, add the directory to your PATH.", + "`{}` is not on your PATH. To use installed Python executables, add the directory to your PATH.", bin.simplified_display().cyan(), ); } diff --git a/crates/uv/src/commands/python/mod.rs b/crates/uv/src/commands/python/mod.rs index afc700d23..6f7a5c980 100644 --- a/crates/uv/src/commands/python/mod.rs +++ b/crates/uv/src/commands/python/mod.rs @@ -4,6 +4,7 @@ pub(crate) mod install; pub(crate) mod list; pub(crate) mod pin; pub(crate) mod uninstall; +pub(crate) mod update_shell; #[derive(Debug, Copy, Clone, Eq, PartialEq, Ord, PartialOrd)] pub(super) enum ChangeEventKind { diff --git a/crates/uv/src/commands/python/update_shell.rs b/crates/uv/src/commands/python/update_shell.rs new file mode 100644 index 000000000..18757ff9e --- /dev/null +++ b/crates/uv/src/commands/python/update_shell.rs @@ -0,0 +1,153 @@ +#![cfg_attr(windows, allow(unreachable_code))] + +use std::fmt::Write; + +use anyhow::Result; +use owo_colors::OwoColorize; +use tokio::io::AsyncWriteExt; +use tracing::debug; + +use uv_fs::Simplified; +use uv_python::managed::python_executable_dir; +use uv_shell::Shell; + +use crate::commands::ExitStatus; +use crate::printer::Printer; + +/// Ensure that the executable directory is in PATH. +pub(crate) async fn update_shell(printer: Printer) -> Result { + let executable_directory = python_executable_dir()?; + debug!( + "Ensuring that the executable directory is in PATH: {}", + executable_directory.simplified_display() + ); + + #[cfg(windows)] + { + if uv_shell::windows::prepend_path(&executable_directory)? { + writeln!( + printer.stderr(), + "Updated PATH to include executable directory {}", + executable_directory.simplified_display().cyan() + )?; + writeln!(printer.stderr(), "Restart your shell to apply changes")?; + } else { + writeln!( + printer.stderr(), + "Executable directory {} is already in PATH", + executable_directory.simplified_display().cyan() + )?; + } + + return Ok(ExitStatus::Success); + } + + if Shell::contains_path(&executable_directory) { + writeln!( + printer.stderr(), + "Executable directory {} is already in PATH", + executable_directory.simplified_display().cyan() + )?; + return Ok(ExitStatus::Success); + } + + // Determine the current shell. + let Some(shell) = Shell::from_env() else { + return Err(anyhow::anyhow!( + "The executable directory {} is not in PATH, but the current shell could not be determined", + executable_directory.simplified_display().cyan() + )); + }; + + // Look up the configuration files (e.g., `.bashrc`, `.zshrc`) for the shell. + let files = shell.configuration_files(); + if files.is_empty() { + return Err(anyhow::anyhow!( + "The executable directory {} is not in PATH, but updating {shell} is currently unsupported", + executable_directory.simplified_display().cyan() + )); + } + + // Prepare the command (e.g., `export PATH="$HOME/.cargo/bin:$PATH"`). + let Some(command) = shell.prepend_path(&executable_directory) else { + return Err(anyhow::anyhow!( + "The executable directory {} is not in PATH, but the necessary command to update {shell} could not be determined", + executable_directory.simplified_display().cyan() + )); + }; + + // Update each file, as necessary. + let mut updated = false; + for file in files { + // Search for the command in the file, to avoid redundant updates. + match fs_err::tokio::read_to_string(&file).await { + Ok(contents) => { + if contents + .lines() + .map(str::trim) + .filter(|line| !line.starts_with('#')) + .any(|line| line.contains(&command)) + { + debug!( + "Skipping already-updated configuration file: {}", + file.simplified_display() + ); + continue; + } + + // Append the command to the file. + fs_err::tokio::OpenOptions::new() + .create(true) + .truncate(true) + .write(true) + .open(&file) + .await? + .write_all(format!("{contents}\n# uv\n{command}\n").as_bytes()) + .await?; + + writeln!( + printer.stderr(), + "Updated configuration file: {}", + file.simplified_display().cyan() + )?; + updated = true; + } + Err(err) if err.kind() == std::io::ErrorKind::NotFound => { + // Ensure that the directory containing the file exists. + if let Some(parent) = file.parent() { + fs_err::tokio::create_dir_all(&parent).await?; + } + + // Append the command to the file. + fs_err::tokio::OpenOptions::new() + .create(true) + .truncate(true) + .write(true) + .open(&file) + .await? + .write_all(format!("# uv\n{command}\n").as_bytes()) + .await?; + + writeln!( + printer.stderr(), + "Created configuration file: {}", + file.simplified_display().cyan() + )?; + updated = true; + } + Err(err) => { + return Err(err.into()); + } + } + } + + if updated { + writeln!(printer.stderr(), "Restart your shell to apply changes")?; + Ok(ExitStatus::Success) + } else { + Err(anyhow::anyhow!( + "The executable directory {} is not in PATH, but the {shell} configuration files are already up-to-date", + executable_directory.simplified_display().cyan() + )) + } +} diff --git a/crates/uv/src/lib.rs b/crates/uv/src/lib.rs index e6fea035f..384f48ac4 100644 --- a/crates/uv/src/lib.rs +++ b/crates/uv/src/lib.rs @@ -1537,6 +1537,12 @@ async fn run(mut cli: Cli) -> Result { commands::python_dir(args.bin)?; Ok(ExitStatus::Success) } + Commands::Python(PythonNamespace { + command: PythonCommand::UpdateShell, + }) => { + commands::python_update_shell(printer).await?; + Ok(ExitStatus::Success) + } Commands::Publish(args) => { show_settings!(args); diff --git a/crates/uv/tests/it/help.rs b/crates/uv/tests/it/help.rs index a557b0eff..39de4c6f9 100644 --- a/crates/uv/tests/it/help.rs +++ b/crates/uv/tests/it/help.rs @@ -290,14 +290,15 @@ fn help_subcommand() { Usage: uv python [OPTIONS] Commands: - list List the available Python installations - install Download and install Python versions - upgrade Upgrade installed Python versions to the latest supported patch release (requires the - `--preview` flag) - find Search for a Python installation - pin Pin to a specific Python version - dir Show the uv Python installation directory - uninstall Uninstall Python versions + list List the available Python installations + install Download and install Python versions + upgrade Upgrade installed Python versions to the latest supported patch release (requires + the `--preview` flag) + find Search for a Python installation + pin Pin to a specific Python version + dir Show the uv Python installation directory + uninstall Uninstall Python versions + update-shell Ensure that the Python executable directory is on the `PATH` Cache options: -n, --no-cache @@ -725,14 +726,15 @@ fn help_flag_subcommand() { Usage: uv python [OPTIONS] Commands: - list List the available Python installations - install Download and install Python versions - upgrade Upgrade installed Python versions to the latest supported patch release (requires the - `--preview` flag) - find Search for a Python installation - pin Pin to a specific Python version - dir Show the uv Python installation directory - uninstall Uninstall Python versions + list List the available Python installations + install Download and install Python versions + upgrade Upgrade installed Python versions to the latest supported patch release (requires + the `--preview` flag) + find Search for a Python installation + pin Pin to a specific Python version + dir Show the uv Python installation directory + uninstall Uninstall Python versions + update-shell Ensure that the Python executable directory is on the `PATH` Cache options: -n, --no-cache Avoid reading from or writing to the cache, instead using a temporary @@ -934,6 +936,7 @@ fn help_unknown_subsubcommand() { pin dir uninstall + update-shell "); } diff --git a/docs/reference/cli.md b/docs/reference/cli.md index f6bc028df..66c46ae0c 100644 --- a/docs/reference/cli.md +++ b/docs/reference/cli.md @@ -2633,6 +2633,7 @@ uv python [OPTIONS]
    uv python pin

    Pin to a specific Python version

    uv python dir

    Show the uv Python installation directory

    uv python uninstall

    Uninstall Python versions

    +
    uv python update-shell

    Ensure that the Python executable directory is on the PATH

    ### uv python list @@ -3206,6 +3207,70 @@ uv python uninstall [OPTIONS] ...

    You can configure fine-grained logging using the RUST_LOG environment variable. (https://docs.rs/tracing-subscriber/latest/tracing_subscriber/filter/struct.EnvFilter.html#directives)

    +### uv python update-shell + +Ensure that the Python executable directory is on the `PATH`. + +If the Python executable directory is not present on the `PATH`, uv will attempt to add it to the relevant shell configuration files. + +If the shell configuration files already include a blurb to add the executable directory to the path, but the directory is not present on the `PATH`, uv will exit with an error. + +The Python executable directory is determined according to the XDG standard and can be retrieved with `uv python dir --bin`. + +

    Usage

    + +``` +uv python update-shell [OPTIONS] +``` + +

    Options

    + +
    --allow-insecure-host, --trusted-host allow-insecure-host

    Allow insecure connections to a host.

    +

    Can be provided multiple times.

    +

    Expects to receive either a hostname (e.g., localhost), a host-port pair (e.g., localhost:8080), or a URL (e.g., https://localhost).

    +

    WARNING: Hosts included in this list will not be verified against the system's certificate store. Only use --allow-insecure-host in a secure network with verified sources, as it bypasses SSL verification and could expose you to MITM attacks.

    +

    May also be set with the UV_INSECURE_HOST environment variable.

    --cache-dir cache-dir

    Path to the cache directory.

    +

    Defaults to $XDG_CACHE_HOME/uv or $HOME/.cache/uv on macOS and Linux, and %LOCALAPPDATA%\uv\cache on Windows.

    +

    To view the location of the cache directory, run uv cache dir.

    +

    May also be set with the UV_CACHE_DIR environment variable.

    --color color-choice

    Control the use of color in output.

    +

    By default, uv will automatically detect support for colors when writing to a terminal.

    +

    Possible values:

    +
      +
    • auto: Enables colored output only when the output is going to a terminal or TTY with support
    • +
    • always: Enables colored output regardless of the detected environment
    • +
    • never: Disables colored output
    • +
    --config-file config-file

    The path to a uv.toml file to use for configuration.

    +

    While uv configuration can be included in a pyproject.toml file, it is not allowed in this context.

    +

    May also be set with the UV_CONFIG_FILE environment variable.

    --directory directory

    Change to the given directory prior to running the command.

    +

    Relative paths are resolved with the given directory as the base.

    +

    See --project to only change the project root directory.

    +
    --help, -h

    Display the concise help for this command

    +
    --managed-python

    Require use of uv-managed Python versions.

    +

    By default, uv prefers using Python versions it manages. However, it will use system Python versions if a uv-managed Python is not installed. This option disables use of system Python versions.

    +

    May also be set with the UV_MANAGED_PYTHON environment variable.

    --native-tls

    Whether to load TLS certificates from the platform's native certificate store.

    +

    By default, uv loads certificates from the bundled webpki-roots crate. The webpki-roots are a reliable set of trust roots from Mozilla, and including them in uv improves portability and performance (especially on macOS).

    +

    However, in some cases, you may want to use the platform's native certificate store, especially if you're relying on a corporate trust root (e.g., for a mandatory proxy) that's included in your system's certificate store.

    +

    May also be set with the UV_NATIVE_TLS environment variable.

    --no-cache, --no-cache-dir, -n

    Avoid reading from or writing to the cache, instead using a temporary directory for the duration of the operation

    +

    May also be set with the UV_NO_CACHE environment variable.

    --no-config

    Avoid discovering configuration files (pyproject.toml, uv.toml).

    +

    Normally, configuration files are discovered in the current directory, parent directories, or user configuration directories.

    +

    May also be set with the UV_NO_CONFIG environment variable.

    --no-managed-python

    Disable use of uv-managed Python versions.

    +

    Instead, uv will search for a suitable Python version on the system.

    +

    May also be set with the UV_NO_MANAGED_PYTHON environment variable.

    --no-progress

    Hide all progress outputs.

    +

    For example, spinners or progress bars.

    +

    May also be set with the UV_NO_PROGRESS environment variable.

    --no-python-downloads

    Disable automatic downloads of Python.

    +
    --offline

    Disable network access.

    +

    When disabled, uv will only use locally cached data and locally available files.

    +

    May also be set with the UV_OFFLINE environment variable.

    --project project

    Run the command within the given project directory.

    +

    All pyproject.toml, uv.toml, and .python-version files will be discovered by walking up the directory tree from the project root, as will the project's virtual environment (.venv).

    +

    Other command-line arguments (such as relative paths) will be resolved relative to the current working directory.

    +

    See --directory to change the working directory entirely.

    +

    This setting has no effect when used in the uv pip interface.

    +

    May also be set with the UV_PROJECT environment variable.

    --quiet, -q

    Use quiet output.

    +

    Repeating this option, e.g., -qq, will enable a silent mode in which uv will write no output to stdout.

    +
    --verbose, -v

    Use verbose output.

    +

    You can configure fine-grained logging using the RUST_LOG environment variable. (https://docs.rs/tracing-subscriber/latest/tracing_subscriber/filter/struct.EnvFilter.html#directives)

    +
    + ## uv pip Manage Python packages with a pip-compatible interface From ab2bd0179bac888188b69524a20f8d336e439a64 Mon Sep 17 00:00:00 2001 From: Zanie Blue Date: Tue, 15 Jul 2025 14:35:54 -0500 Subject: [PATCH 038/130] Mention the `revision` in the lockfile versioning doc (#14634) --- docs/concepts/resolution.md | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/docs/concepts/resolution.md b/docs/concepts/resolution.md index ec28d71a3..e857e7b1d 100644 --- a/docs/concepts/resolution.md +++ b/docs/concepts/resolution.md @@ -535,3 +535,7 @@ The schema version is considered part of the public API, and so is only bumped i a breaking change (see [Versioning](../reference/policies/versioning.md)). As such, all uv patch versions within a given minor uv release are guaranteed to have full lockfile compatibility. In other words, lockfiles may only be rejected across minor releases. + +The `revision` field of the lockfile is used to track backwards compatible changes to the lockfile. +For example, adding a new field to distributions. Changes to the revision will not cause older +versions of uv to error. From 863e73a841ca6360a611196e9c5e3ccee894f9d8 Mon Sep 17 00:00:00 2001 From: Zanie Blue Date: Tue, 15 Jul 2025 16:47:35 -0500 Subject: [PATCH 039/130] Skip Windows Python interpreters that return a broken MSIX package code (#14636) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Currently we treat all spawn failures as fatal, because they indicate a broken interpreter. In this case, I think we should just skip these broken interpreters — though I don't know the root cause of why it's broken yet. Closes https://github.com/astral-sh/uv/issues/14637 See https://discord.com/channels/1039017663004942429/1039017663512449056/1394758502647333025 --- crates/uv-python/src/discovery.rs | 8 ++++++++ crates/uv-python/src/interpreter.rs | 31 +++++++++++++++++++++++++---- 2 files changed, 35 insertions(+), 4 deletions(-) diff --git a/crates/uv-python/src/discovery.rs b/crates/uv-python/src/discovery.rs index 67f8f37ff..c067082dd 100644 --- a/crates/uv-python/src/discovery.rs +++ b/crates/uv-python/src/discovery.rs @@ -884,6 +884,14 @@ impl Error { ); false } + #[cfg(windows)] + InterpreterError::CorruptWindowsPackage { path, err } => { + debug!( + "Skipping bad interpreter at {} from {source}: {err}", + path.display() + ); + false + } InterpreterError::NotFound(path) | InterpreterError::BrokenSymlink(BrokenSymlink { path, .. }) => { // If the interpreter is from an active, valid virtual environment, we should diff --git a/crates/uv-python/src/interpreter.rs b/crates/uv-python/src/interpreter.rs index 0f074ebb6..fc5adb833 100644 --- a/crates/uv-python/src/interpreter.rs +++ b/crates/uv-python/src/interpreter.rs @@ -34,6 +34,9 @@ use crate::{ VirtualEnvironment, }; +#[cfg(windows)] +use windows_sys::Win32::Foundation::{APPMODEL_ERROR_NO_PACKAGE, ERROR_CANT_ACCESS_FILE}; + /// A Python executable and its associated platform markers. #[derive(Debug, Clone)] pub struct Interpreter { @@ -760,6 +763,13 @@ pub enum Error { #[source] err: io::Error, }, + #[cfg(windows)] + #[error("Failed to query Python interpreter at `{path}`")] + CorruptWindowsPackage { + path: PathBuf, + #[source] + err: io::Error, + }, #[error("{0}")] UnexpectedResponse(UnexpectedResponseError), #[error("{0}")] @@ -872,10 +882,23 @@ impl InterpreterInfo { .arg("-c") .arg(script) .output() - .map_err(|err| Error::SpawnFailed { - path: interpreter.to_path_buf(), - err, - })?; + .map_err( + |err| match err.raw_os_error().and_then(|code| u32::try_from(code).ok()) { + // These error codes are returned if the Python interpreter is a corrupt MSIX + // package, which we want to differentiate from a typical spawn failure. + #[cfg(windows)] + Some(APPMODEL_ERROR_NO_PACKAGE | ERROR_CANT_ACCESS_FILE) => { + Error::CorruptWindowsPackage { + path: interpreter.to_path_buf(), + err, + } + } + _ => Error::SpawnFailed { + path: interpreter.to_path_buf(), + err, + }, + }, + )?; if !output.status.success() { let stderr = String::from_utf8_lossy(&output.stderr).trim().to_string(); From 8d6d0678a71d86020caaf20107b1e81af29f471d Mon Sep 17 00:00:00 2001 From: Zanie Blue Date: Tue, 15 Jul 2025 16:47:43 -0500 Subject: [PATCH 040/130] Move "Conflicting dependencies" to the "Resolution" page (#14633) --- docs/concepts/projects/config.md | 121 +++++++++++-------------------- docs/concepts/resolution.md | 81 +++++++++++++++++++-- 2 files changed, 118 insertions(+), 84 deletions(-) diff --git a/docs/concepts/projects/config.md b/docs/concepts/projects/config.md index f9d33ed90..8efb667a1 100644 --- a/docs/concepts/projects/config.md +++ b/docs/concepts/projects/config.md @@ -196,41 +196,6 @@ To target this environment, you'd export `UV_PROJECT_ENVIRONMENT=/usr/local`. environment. The `--active` flag can be used to opt-in to respecting `VIRTUAL_ENV`. The `--no-active` flag can be used to silence the warning. -## Limited resolution environments - -If your project supports a more limited set of platforms or Python versions, you can constrain the -set of solved platforms via the `environments` setting, which accepts a list of PEP 508 environment -markers. For example, to constrain the lockfile to macOS and Linux, and exclude Windows: - -```toml title="pyproject.toml" -[tool.uv] -environments = [ - "sys_platform == 'darwin'", - "sys_platform == 'linux'", -] -``` - -See the [resolution documentation](../resolution.md#limited-resolution-environments) for more. - -## Required environments - -If your project _must_ support a specific platform or Python version, you can mark that platform as -required via the `required-environments` setting. For example, to require that the project supports -Intel macOS: - -```toml title="pyproject.toml" -[tool.uv] -required-environments = [ - "sys_platform == 'darwin' and platform_machine == 'x86_64'", -] -``` - -The `required-environments` setting is only relevant for packages that do not publish a source -distribution (like PyTorch), as such packages can _only_ be installed on environments covered by the -set of pre-built binary distributions (wheels) published by that package. - -See the [resolution documentation](../resolution.md#required-environments) for more. - ## Build isolation By default, uv builds all packages in isolated virtual environments, as per @@ -401,33 +366,12 @@ in the deployed environment without a dependency on the originating source code. ## Conflicting dependencies -uv requires that all optional dependencies ("extras") declared by the project are compatible with -each other and resolves all optional dependencies together when creating the lockfile. +uv requires resolves all project dependencies together, including optional dependencies ("extras") +and dependency groups. If dependencies declared in one section are not compatible with those in +another section, uv will fail to resolve the requirements of the project with an error. -If optional dependencies declared in one extra are not compatible with those in another extra, uv -will fail to resolve the requirements of the project with an error. - -To work around this, uv supports declaring conflicting extras. For example, consider two sets of -optional dependencies that conflict with one another: - -```toml title="pyproject.toml" -[project.optional-dependencies] -extra1 = ["numpy==2.1.2"] -extra2 = ["numpy==2.0.0"] -``` - -If you run `uv lock` with the above dependencies, resolution will fail: - -```console -$ uv lock - x No solution found when resolving dependencies: - `-> Because myproject[extra2] depends on numpy==2.0.0 and myproject[extra1] depends on numpy==2.1.2, we can conclude that myproject[extra1] and - myproject[extra2] are incompatible. - And because your project requires myproject[extra1] and myproject[extra2], we can conclude that your projects's requirements are unsatisfiable. -``` - -But if you specify that `extra1` and `extra2` are conflicting, uv will resolve them separately. -Specify conflicts in the `tool.uv` section: +uv supports explicit declaration of conflicting dependency groups. For example, to declare that the +`optional-dependency` groups `extra1` and `extra2` are incompatible: ```toml title="pyproject.toml" [tool.uv] @@ -439,25 +383,9 @@ conflicts = [ ] ``` -Now, running `uv lock` will succeed. Note though, that now you cannot install both `extra1` and -`extra2` at the same time: - -```console -$ uv sync --extra extra1 --extra extra2 -Resolved 3 packages in 14ms -error: extra `extra1`, extra `extra2` are incompatible with the declared conflicts: {`myproject[extra1]`, `myproject[extra2]`} -``` - -This error occurs because installing both `extra1` and `extra2` would result in installing two -different versions of a package into the same environment. - -The above strategy for dealing with conflicting extras also works with dependency groups: +Or, to declare the development dependency groups `group1` and `group2` incompatible: ```toml title="pyproject.toml" -[dependency-groups] -group1 = ["numpy==2.1.2"] -group2 = ["numpy==2.0.0"] - [tool.uv] conflicts = [ [ @@ -467,4 +395,39 @@ conflicts = [ ] ``` -The only difference with conflicting extras is that you need to use `group` instead of `extra`. +See the [resolution documentation](../resolution.md#conflicting-dependencies) for more. + +## Limited resolution environments + +If your project supports a more limited set of platforms or Python versions, you can constrain the +set of solved platforms via the `environments` setting, which accepts a list of PEP 508 environment +markers. For example, to constrain the lockfile to macOS and Linux, and exclude Windows: + +```toml title="pyproject.toml" +[tool.uv] +environments = [ + "sys_platform == 'darwin'", + "sys_platform == 'linux'", +] +``` + +See the [resolution documentation](../resolution.md#limited-resolution-environments) for more. + +## Required environments + +If your project _must_ support a specific platform or Python version, you can mark that platform as +required via the `required-environments` setting. For example, to require that the project supports +Intel macOS: + +```toml title="pyproject.toml" +[tool.uv] +required-environments = [ + "sys_platform == 'darwin' and platform_machine == 'x86_64'", +] +``` + +The `required-environments` setting is only relevant for packages that do not publish a source +distribution (like PyTorch), as such packages can _only_ be installed on environments covered by the +set of pre-built binary distributions (wheels) published by that package. + +See the [resolution documentation](../resolution.md#required-environments) for more. diff --git a/docs/concepts/resolution.md b/docs/concepts/resolution.md index e857e7b1d..278289ea9 100644 --- a/docs/concepts/resolution.md +++ b/docs/concepts/resolution.md @@ -453,6 +453,77 @@ though only `name`, `version`, `requires-dist`, `requires-python`, and `provides uv. The `version` field is also considered optional. If omitted, the metadata will be used for all versions of the specified package. +## Conflicting dependencies + +uv requires that all optional dependencies ("extras") declared by the project are compatible with +each other and resolves all optional dependencies together when creating the lockfile. + +If optional dependencies declared in one extra are not compatible with those in another extra, uv +will fail to resolve the requirements of the project with an error. + +To work around this, uv supports declaring conflicting extras. For example, consider two sets of +optional dependencies that conflict with one another: + +```toml title="pyproject.toml" +[project.optional-dependencies] +extra1 = ["numpy==2.1.2"] +extra2 = ["numpy==2.0.0"] +``` + +If you run `uv lock` with the above dependencies, resolution will fail: + +```console +$ uv lock + x No solution found when resolving dependencies: + `-> Because myproject[extra2] depends on numpy==2.0.0 and myproject[extra1] depends on numpy==2.1.2, we can conclude that myproject[extra1] and + myproject[extra2] are incompatible. + And because your project requires myproject[extra1] and myproject[extra2], we can conclude that your projects's requirements are unsatisfiable. +``` + +But if you specify that `extra1` and `extra2` are conflicting, uv will resolve them separately. +Specify conflicts in the `tool.uv` section: + +```toml title="pyproject.toml" +[tool.uv] +conflicts = [ + [ + { extra = "extra1" }, + { extra = "extra2" }, + ], +] +``` + +Now, running `uv lock` will succeed. Note though, that now you cannot install both `extra1` and +`extra2` at the same time: + +```console +$ uv sync --extra extra1 --extra extra2 +Resolved 3 packages in 14ms +error: extra `extra1`, extra `extra2` are incompatible with the declared conflicts: {`myproject[extra1]`, `myproject[extra2]`} +``` + +This error occurs because installing both `extra1` and `extra2` would result in installing two +different versions of a package into the same environment. + +The above strategy for dealing with conflicting extras also works with dependency groups: + +```toml title="pyproject.toml" +[dependency-groups] +group1 = ["numpy==2.1.2"] +group2 = ["numpy==2.0.0"] + +[tool.uv] +conflicts = [ + [ + { group = "group1" }, + { group = "group2" }, + ], +] +``` + +The only difference from conflicting extras is that you need to use the `group` key instead of +`extra`. + ## Lower bounds By default, `uv add` adds lower bounds to dependencies and, when using uv to manage projects, uv @@ -513,11 +584,6 @@ reading and extracting archives in the following formats: - lzma tarball (`.tar.lzma`) - zip (`.zip`) -## Learn more - -For more details about the internals of the resolver, see the -[resolver reference](../reference/resolver-internals.md) documentation. - ## Lockfile versioning The `uv.lock` file uses a versioned schema. The schema version is included in the `version` field of @@ -539,3 +605,8 @@ other words, lockfiles may only be rejected across minor releases. The `revision` field of the lockfile is used to track backwards compatible changes to the lockfile. For example, adding a new field to distributions. Changes to the revision will not cause older versions of uv to error. + +## Learn more + +For more details about the internals of the resolver, see the +[resolver reference](../reference/resolver-internals.md) documentation. From 861f7a1c42e366eec0529ef98eeed06665ccba6c Mon Sep 17 00:00:00 2001 From: Gilles Peiffer Date: Wed, 16 Jul 2025 15:44:29 +0200 Subject: [PATCH 041/130] docs: add missing backtick (#14654) Subject is message :) --- docs/pip/packages.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/pip/packages.md b/docs/pip/packages.md index a47b1aa0e..cdce527b6 100644 --- a/docs/pip/packages.md +++ b/docs/pip/packages.md @@ -128,7 +128,7 @@ $ uv pip install --group some/path/pyproject.toml:foo --group other/pyproject.to !!! note - As in pip, `--group` flags do not apply to other sources specified with flags like `-r` or -e`. + As in pip, `--group` flags do not apply to other sources specified with flags like `-r` or `-e`. For instance, `uv pip install -r some/path/pyproject.toml --group foo` sources `foo` from `./pyproject.toml` and **not** `some/path/pyproject.toml`. From 03de6c36e34032a754fd49ecbd300246954d8042 Mon Sep 17 00:00:00 2001 From: Charlie Marsh Date: Wed, 16 Jul 2025 09:48:16 -0400 Subject: [PATCH 042/130] Warn on invalid `uv.toml` when provided via direct path (#14653) ## Summary We validate the `uv.toml` when it's discovered automatically, but not when provided via `--config-file`. The same limitations exist, though -- I think the lack of enforcement is just an oversight. Closes https://github.com/astral-sh/uv/issues/14650. --- crates/uv-settings/src/lib.rs | 7 ++++++- crates/uv/tests/it/pip_install.rs | 26 +++++++++++++++++++++++++- 2 files changed, 31 insertions(+), 2 deletions(-) diff --git a/crates/uv-settings/src/lib.rs b/crates/uv-settings/src/lib.rs index 54ae4e261..d676cc060 100644 --- a/crates/uv-settings/src/lib.rs +++ b/crates/uv-settings/src/lib.rs @@ -170,7 +170,12 @@ impl FilesystemOptions { /// Load a [`FilesystemOptions`] from a `uv.toml` file. pub fn from_file(path: impl AsRef) -> Result { - Ok(Self(read_file(path.as_ref())?)) + let path = path.as_ref(); + tracing::debug!("Reading user configuration from: `{}`", path.display()); + + let options = read_file(path)?; + validate_uv_toml(path, &options)?; + Ok(Self(options)) } } diff --git a/crates/uv/tests/it/pip_install.rs b/crates/uv/tests/it/pip_install.rs index bc27228c7..123d9066b 100644 --- a/crates/uv/tests/it/pip_install.rs +++ b/crates/uv/tests/it/pip_install.rs @@ -267,7 +267,7 @@ fn invalid_toml_filename() -> Result<()> { } #[test] -fn invalid_uv_toml_option_disallowed() -> Result<()> { +fn invalid_uv_toml_option_disallowed_automatic_discovery() -> Result<()> { let context = TestContext::new("3.12"); let uv_toml = context.temp_dir.child("uv.toml"); uv_toml.write_str(indoc! {r" @@ -288,6 +288,30 @@ fn invalid_uv_toml_option_disallowed() -> Result<()> { Ok(()) } +#[test] +fn invalid_uv_toml_option_disallowed_command_line() -> Result<()> { + let context = TestContext::new("3.12"); + let uv_toml = context.temp_dir.child("foo.toml"); + uv_toml.write_str(indoc! {r" + managed = true + "})?; + + uv_snapshot!(context.pip_install() + .arg("iniconfig") + .arg("--config-file") + .arg("foo.toml"), @r" + success: false + exit_code: 2 + ----- stdout ----- + + ----- stderr ----- + error: Failed to parse: `foo.toml`. The `managed` field is not allowed in a `uv.toml` file. `managed` is only applicable in the context of a project, and should be placed in a `pyproject.toml` file instead. + " + ); + + Ok(()) +} + #[test] fn cache_uv_toml_credentials() -> Result<()> { let context = TestContext::new("3.12"); From e547527587fc47d3c83b200afa0d6bd15897081a Mon Sep 17 00:00:00 2001 From: Nathan Cain <13713501+nathanscain@users.noreply.github.com> Date: Wed, 16 Jul 2025 08:52:17 -0500 Subject: [PATCH 043/130] Add UV_LIBC to allow libc selection in multi-libc environment (#14646) Closes #14262 ## Description Adds `UV_LIBC` environment variable and implements check within `Libc::from_env` as recommended here: https://github.com/astral-sh/uv/issues/14262#issuecomment-3014600313 Gave this a few passes to make sure I follow dev practices within uv as best I am able. Feel free to call out anything that could be improved. ## Test Plan Planned to simply run existing test suite. Open to adding more tests once implementation is validated due to my limited Rust experience. --- crates/uv-python/src/platform.rs | 42 +++++++++++++++++++++----------- crates/uv-static/src/env_vars.rs | 4 +++ docs/reference/environment.md | 5 ++++ 3 files changed, 37 insertions(+), 14 deletions(-) diff --git a/crates/uv-python/src/platform.rs b/crates/uv-python/src/platform.rs index ce8620ae2..606e05e28 100644 --- a/crates/uv-python/src/platform.rs +++ b/crates/uv-python/src/platform.rs @@ -5,6 +5,8 @@ use std::ops::Deref; use std::{fmt, str::FromStr}; use thiserror::Error; +use uv_static::EnvVars; + #[derive(Error, Debug)] pub enum Error { #[error("Unknown operating system: {0}")] @@ -15,6 +17,8 @@ pub enum Error { UnknownLibc(String), #[error("Unsupported variant `{0}` for architecture `{1}`")] UnsupportedVariant(String, String), + #[error(transparent)] + LibcDetectionError(#[from] LibcDetectionError), } /// Architecture variants, e.g., with support for different instruction sets @@ -95,22 +99,32 @@ pub enum Libc { } impl Libc { - pub(crate) fn from_env() -> Result { + pub(crate) fn from_env() -> Result { match std::env::consts::OS { - "linux" => Ok(Self::Some(match detect_linux_libc()? { - LibcVersion::Manylinux { .. } => match std::env::consts::ARCH { - // Checks if the CPU supports hardware floating-point operations. - // Depending on the result, it selects either the `gnueabihf` (hard-float) or `gnueabi` (soft-float) environment. - // download-metadata.json only includes armv7. - "arm" | "armv5te" | "armv7" => match detect_hardware_floating_point_support() { - Ok(true) => target_lexicon::Environment::Gnueabihf, - Ok(false) => target_lexicon::Environment::Gnueabi, - Err(_) => target_lexicon::Environment::Gnu, + "linux" => { + if let Ok(libc) = std::env::var(EnvVars::UV_LIBC) { + if !libc.is_empty() { + return Self::from_str(&libc); + } + } + + Ok(Self::Some(match detect_linux_libc()? { + LibcVersion::Manylinux { .. } => match std::env::consts::ARCH { + // Checks if the CPU supports hardware floating-point operations. + // Depending on the result, it selects either the `gnueabihf` (hard-float) or `gnueabi` (soft-float) environment. + // download-metadata.json only includes armv7. + "arm" | "armv5te" | "armv7" => { + match detect_hardware_floating_point_support() { + Ok(true) => target_lexicon::Environment::Gnueabihf, + Ok(false) => target_lexicon::Environment::Gnueabi, + Err(_) => target_lexicon::Environment::Gnu, + } + } + _ => target_lexicon::Environment::Gnu, }, - _ => target_lexicon::Environment::Gnu, - }, - LibcVersion::Musllinux { .. } => target_lexicon::Environment::Musl, - })), + LibcVersion::Musllinux { .. } => target_lexicon::Environment::Musl, + })) + } "windows" | "macos" => Ok(Self::None), // Use `None` on platforms without explicit support. _ => Ok(Self::None), diff --git a/crates/uv-static/src/env_vars.rs b/crates/uv-static/src/env_vars.rs index 5b91fccea..ae981cac3 100644 --- a/crates/uv-static/src/env_vars.rs +++ b/crates/uv-static/src/env_vars.rs @@ -154,6 +154,10 @@ impl EnvVars { /// `--no-python-downloads` option. Whether uv should allow Python downloads. pub const UV_PYTHON_DOWNLOADS: &'static str = "UV_PYTHON_DOWNLOADS"; + /// Overrides the environment-determined libc on linux systems when filling in the current platform + /// within Python version requests. Options are: `gnu`, `gnueabi`, `gnueabihf`, `musl`, and `none`. + pub const UV_LIBC: &'static str = "UV_LIBC"; + /// Equivalent to the `--compile-bytecode` command-line argument. If set, uv /// will compile Python source files to bytecode after installation. pub const UV_COMPILE_BYTECODE: &'static str = "UV_COMPILE_BYTECODE"; diff --git a/docs/reference/environment.md b/docs/reference/environment.md index bf8bf29ec..47e4d8db9 100644 --- a/docs/reference/environment.md +++ b/docs/reference/environment.md @@ -167,6 +167,11 @@ Defaults to `~/.local/bin`. Equivalent to the `--keyring-provider` command-line argument. If set, uv will use this value as the keyring provider. +### `UV_LIBC` + +Overrides the environment-determined libc on linux systems when filling in the current platform +within Python version requests. Options are: `gnu`, `gnueabi`, `gnueabihf`, `musl`, and `none`. + ### `UV_LINK_MODE` Equivalent to the `--link-mode` command-line argument. If set, uv will use this as From 0cf5ecf8413c54d7607acdf67cc41f8285f291ed Mon Sep 17 00:00:00 2001 From: Zanie Blue Date: Wed, 16 Jul 2025 09:04:58 -0500 Subject: [PATCH 044/130] Request arm64 Python in aarch64-windows smoke test (#14655) The Python interpreter selected by `py` recently changed to x64 instead of arm64. Closes https://github.com/astral-sh/uv/pull/14652 See https://github.com/astral-sh/uv/pull/14652 --- .github/workflows/ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 0ccc9ea4e..bb357f4a3 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -2261,7 +2261,7 @@ jobs: name: uv-windows-aarch64-${{ github.sha }} - name: "Validate global Python install" - run: py -3.13 ./scripts/check_system_python.py --uv ./uv.exe + run: py -3.13-arm64 ./scripts/check_system_python.py --uv ./uv.exe # Test our PEP 514 integration that installs Python into the Windows registry. system-test-windows-registry: From 1b2f212e8b2f91069b858cb7f5905589c9d15add Mon Sep 17 00:00:00 2001 From: Zanie Blue Date: Wed, 16 Jul 2025 09:05:10 -0500 Subject: [PATCH 045/130] Use `[PYTHON]` placeholder in filtered Python names (#14640) We should never replace with a non-placeholder, it is very confusing when trying to understand test behavior --- crates/uv/tests/it/common/mod.rs | 15 ++++- crates/uv/tests/it/pip_sync.rs | 50 ++++++++--------- crates/uv/tests/it/python_find.rs | 82 ++++++++++++++-------------- crates/uv/tests/it/python_install.rs | 12 ++-- crates/uv/tests/it/python_list.rs | 4 +- crates/uv/tests/it/run.rs | 40 +++++++------- crates/uv/tests/it/sync.rs | 28 ++++------ crates/uv/tests/it/tool_list.rs | 6 +- 8 files changed, 121 insertions(+), 116 deletions(-) diff --git a/crates/uv/tests/it/common/mod.rs b/crates/uv/tests/it/common/mod.rs index 2dc72fa1d..d4a73f953 100644 --- a/crates/uv/tests/it/common/mod.rs +++ b/crates/uv/tests/it/common/mod.rs @@ -210,12 +210,14 @@ impl TestContext { pub fn with_filtered_python_names(mut self) -> Self { if cfg!(windows) { self.filters - .push((r"python\.exe".to_string(), "python".to_string())); + .push((r"python\.exe".to_string(), "[PYTHON]".to_string())); } else { self.filters - .push((r"python\d.\d\d".to_string(), "python".to_string())); + .push((r"python\d.\d\d".to_string(), "[PYTHON]".to_string())); self.filters - .push((r"python\d".to_string(), "python".to_string())); + .push((r"python\d".to_string(), "[PYTHON]".to_string())); + self.filters + .push((r"/python".to_string(), "/[PYTHON]".to_string())); } self } @@ -224,6 +226,13 @@ impl TestContext { /// `Scripts` on Windows and `bin` on Unix. #[must_use] pub fn with_filtered_virtualenv_bin(mut self) -> Self { + self.filters.push(( + format!( + r"[\\/]{}[\\/]", + venv_bin_path(PathBuf::new()).to_string_lossy() + ), + "/[BIN]/".to_string(), + )); self.filters.push(( format!(r"[\\/]{}", venv_bin_path(PathBuf::new()).to_string_lossy()), "/[BIN]".to_string(), diff --git a/crates/uv/tests/it/pip_sync.rs b/crates/uv/tests/it/pip_sync.rs index 43cbc26c7..537c5dff2 100644 --- a/crates/uv/tests/it/pip_sync.rs +++ b/crates/uv/tests/it/pip_sync.rs @@ -43,15 +43,15 @@ fn missing_venv() -> Result<()> { requirements.write_str("anyio")?; fs::remove_dir_all(&context.venv)?; - uv_snapshot!(context.filters(), context.pip_sync().arg("requirements.txt"), @r###" + uv_snapshot!(context.filters(), context.pip_sync().arg("requirements.txt"), @r" success: false exit_code: 2 ----- stdout ----- ----- stderr ----- - error: Failed to inspect Python interpreter from active virtual environment at `.venv/[BIN]/python` - Caused by: Python interpreter not found at `[VENV]/[BIN]/python` - "###); + error: Failed to inspect Python interpreter from active virtual environment at `.venv/[BIN]/[PYTHON]` + Caused by: Python interpreter not found at `[VENV]/[BIN]/[PYTHON]` + "); assert!(predicates::path::missing().eval(&context.venv)); @@ -5191,18 +5191,18 @@ fn target_built_distribution() -> Result<()> { uv_snapshot!(context.filters(), context.pip_sync() .arg("requirements.in") .arg("--target") - .arg("target"), @r###" + .arg("target"), @r" success: true exit_code: 0 ----- stdout ----- ----- stderr ----- - Using CPython 3.12.[X] interpreter at: .venv/[BIN]/python + Using CPython 3.12.[X] interpreter at: .venv/[BIN]/[PYTHON] Resolved 1 package in [TIME] Prepared 1 package in [TIME] Installed 1 package in [TIME] + iniconfig==2.0.0 - "###); + "); // Ensure that the package is present in the target directory. assert!(context.temp_dir.child("target").child("iniconfig").is_dir()); @@ -5227,20 +5227,20 @@ fn target_built_distribution() -> Result<()> { uv_snapshot!(context.filters(), context.pip_sync() .arg("requirements.in") .arg("--target") - .arg("target"), @r###" + .arg("target"), @r" success: true exit_code: 0 ----- stdout ----- ----- stderr ----- - Using CPython 3.12.[X] interpreter at: .venv/[BIN]/python + Using CPython 3.12.[X] interpreter at: .venv/[BIN]/[PYTHON] Resolved 1 package in [TIME] Prepared 1 package in [TIME] Uninstalled 1 package in [TIME] Installed 1 package in [TIME] - iniconfig==2.0.0 + iniconfig==1.1.1 - "###); + "); // Remove it, and replace with `flask`, which includes a binary. let requirements_in = context.temp_dir.child("requirements.in"); @@ -5249,20 +5249,20 @@ fn target_built_distribution() -> Result<()> { uv_snapshot!(context.filters(), context.pip_sync() .arg("requirements.in") .arg("--target") - .arg("target"), @r###" + .arg("target"), @r" success: true exit_code: 0 ----- stdout ----- ----- stderr ----- - Using CPython 3.12.[X] interpreter at: .venv/[BIN]/python + Using CPython 3.12.[X] interpreter at: .venv/[BIN]/[PYTHON] Resolved 1 package in [TIME] Prepared 1 package in [TIME] Uninstalled 1 package in [TIME] Installed 1 package in [TIME] + flask==3.0.2 - iniconfig==1.1.1 - "###); + "); // Ensure that the binary is present in the target directory. assert!( context @@ -5293,18 +5293,18 @@ fn target_source_distribution() -> Result<()> { .arg("--no-binary") .arg("iniconfig") .arg("--target") - .arg("target"), @r###" + .arg("target"), @r" success: true exit_code: 0 ----- stdout ----- ----- stderr ----- - Using CPython 3.12.[X] interpreter at: .venv/[BIN]/python + Using CPython 3.12.[X] interpreter at: .venv/[BIN]/[PYTHON] Resolved 1 package in [TIME] Prepared 1 package in [TIME] Installed 1 package in [TIME] + iniconfig==2.0.0 - "###); + "); // Ensure that the build requirements are not present in the target directory. assert!(!context.temp_dir.child("target").child("hatchling").is_dir()); @@ -5364,18 +5364,18 @@ fn target_no_build_isolation() -> Result<()> { .arg("--no-binary") .arg("wheel") .arg("--target") - .arg("target"), @r###" + .arg("target"), @r" success: true exit_code: 0 ----- stdout ----- ----- stderr ----- - Using CPython 3.12.[X] interpreter at: .venv/[BIN]/python + Using CPython 3.12.[X] interpreter at: .venv/[BIN]/[PYTHON] Resolved 1 package in [TIME] Prepared 1 package in [TIME] Installed 1 package in [TIME] + wheel==0.43.0 - "###); + "); // Ensure that the build requirements are not present in the target directory. assert!(!context.temp_dir.child("target").child("flit_core").is_dir()); @@ -5447,18 +5447,18 @@ fn prefix() -> Result<()> { uv_snapshot!(context.filters(), context.pip_sync() .arg("requirements.in") .arg("--prefix") - .arg(prefix.path()), @r###" + .arg(prefix.path()), @r" success: true exit_code: 0 ----- stdout ----- ----- stderr ----- - Using CPython 3.12.[X] interpreter at: .venv/[BIN]/python + Using CPython 3.12.[X] interpreter at: .venv/[BIN]/[PYTHON] Resolved 1 package in [TIME] Prepared 1 package in [TIME] Installed 1 package in [TIME] + iniconfig==2.0.0 - "###); + "); // Ensure that we can't import the package. context.assert_command("import iniconfig").failure(); @@ -5483,20 +5483,20 @@ fn prefix() -> Result<()> { uv_snapshot!(context.filters(), context.pip_sync() .arg("requirements.in") .arg("--prefix") - .arg(prefix.path()), @r###" + .arg(prefix.path()), @r" success: true exit_code: 0 ----- stdout ----- ----- stderr ----- - Using CPython 3.12.[X] interpreter at: .venv/[BIN]/python + Using CPython 3.12.[X] interpreter at: .venv/[BIN]/[PYTHON] Resolved 1 package in [TIME] Prepared 1 package in [TIME] Uninstalled 1 package in [TIME] Installed 1 package in [TIME] - iniconfig==2.0.0 + iniconfig==1.1.1 - "###); + "); Ok(()) } diff --git a/crates/uv/tests/it/python_find.rs b/crates/uv/tests/it/python_find.rs index b8b42d61b..49e60c068 100644 --- a/crates/uv/tests/it/python_find.rs +++ b/crates/uv/tests/it/python_find.rs @@ -425,25 +425,25 @@ fn python_find_venv() { // is super annoying and requires some changes to how we represent working directories in the // test context to resolve. #[cfg(not(windows))] - uv_snapshot!(context.filters(), context.python_find(), @r###" + uv_snapshot!(context.filters(), context.python_find(), @r" success: true exit_code: 0 ----- stdout ----- - [VENV]/[BIN]/python + [VENV]/[BIN]/[PYTHON] ----- stderr ----- - "###); + "); // Even if the `VIRTUAL_ENV` is not set (the test context includes this by default) #[cfg(not(windows))] - uv_snapshot!(context.filters(), context.python_find().env_remove(EnvVars::VIRTUAL_ENV), @r###" + uv_snapshot!(context.filters(), context.python_find().env_remove(EnvVars::VIRTUAL_ENV), @r" success: true exit_code: 0 ----- stdout ----- - [VENV]/[BIN]/python + [VENV]/[BIN]/[PYTHON] ----- stderr ----- - "###); + "); let child_dir = context.temp_dir.child("child"); child_dir.create_dir_all().unwrap(); @@ -485,14 +485,14 @@ fn python_find_venv() { // We should find virtual environments from a child directory #[cfg(not(windows))] - uv_snapshot!(context.filters(), context.python_find().current_dir(&child_dir).env_remove(EnvVars::VIRTUAL_ENV), @r###" + uv_snapshot!(context.filters(), context.python_find().current_dir(&child_dir).env_remove(EnvVars::VIRTUAL_ENV), @r" success: true exit_code: 0 ----- stdout ----- - [VENV]/[BIN]/python + [VENV]/[BIN]/[PYTHON] ----- stderr ----- - "###); + "); // A virtual environment in the child directory takes precedence over the parent uv_snapshot!(context.filters(), context.venv().arg("--python").arg("3.11").arg("-q").current_dir(&child_dir), @r###" @@ -504,14 +504,14 @@ fn python_find_venv() { "###); #[cfg(not(windows))] - uv_snapshot!(context.filters(), context.python_find().current_dir(&child_dir).env_remove(EnvVars::VIRTUAL_ENV), @r###" + uv_snapshot!(context.filters(), context.python_find().current_dir(&child_dir).env_remove(EnvVars::VIRTUAL_ENV), @r" success: true exit_code: 0 ----- stdout ----- - [TEMP_DIR]/child/.venv/[BIN]/python + [TEMP_DIR]/child/.venv/[BIN]/[PYTHON] ----- stderr ----- - "###); + "); // But if we delete the parent virtual environment fs_err::remove_dir_all(context.temp_dir.child(".venv")).unwrap(); @@ -528,36 +528,36 @@ fn python_find_venv() { // Unless, it is requested by path #[cfg(not(windows))] - uv_snapshot!(context.filters(), context.python_find().arg("child/.venv"), @r###" + uv_snapshot!(context.filters(), context.python_find().arg("child/.venv"), @r" success: true exit_code: 0 ----- stdout ----- - [TEMP_DIR]/child/.venv/[BIN]/python + [TEMP_DIR]/child/.venv/[BIN]/[PYTHON] ----- stderr ----- - "###); + "); // Or activated via `VIRTUAL_ENV` #[cfg(not(windows))] - uv_snapshot!(context.filters(), context.python_find().env(EnvVars::VIRTUAL_ENV, child_dir.join(".venv").as_os_str()), @r###" + uv_snapshot!(context.filters(), context.python_find().env(EnvVars::VIRTUAL_ENV, child_dir.join(".venv").as_os_str()), @r" success: true exit_code: 0 ----- stdout ----- - [TEMP_DIR]/child/.venv/[BIN]/python + [TEMP_DIR]/child/.venv/[BIN]/[PYTHON] ----- stderr ----- - "###); + "); // Or at the front of the PATH #[cfg(not(windows))] - uv_snapshot!(context.filters(), context.python_find().env(EnvVars::UV_TEST_PYTHON_PATH, child_dir.join(".venv").join("bin").as_os_str()), @r###" + uv_snapshot!(context.filters(), context.python_find().env(EnvVars::UV_TEST_PYTHON_PATH, child_dir.join(".venv").join("bin").as_os_str()), @r" success: true exit_code: 0 ----- stdout ----- - [TEMP_DIR]/child/.venv/[BIN]/python + [TEMP_DIR]/child/.venv/[BIN]/[PYTHON] ----- stderr ----- - "###); + "); // This holds even if there are other directories before it in the path, as long as they do // not contain a Python executable @@ -569,14 +569,14 @@ fn python_find_venv() { ]) .unwrap(); - uv_snapshot!(context.filters(), context.python_find().env(EnvVars::UV_TEST_PYTHON_PATH, path.as_os_str()), @r###" + uv_snapshot!(context.filters(), context.python_find().env(EnvVars::UV_TEST_PYTHON_PATH, path.as_os_str()), @r" success: true exit_code: 0 ----- stdout ----- - [TEMP_DIR]/child/.venv/[BIN]/python + [TEMP_DIR]/child/.venv/[BIN]/[PYTHON] ----- stderr ----- - "###); + "); } // But, if there's an executable _before_ the virtual environment — we prefer that @@ -678,33 +678,32 @@ fn python_find_unsupported_version() { #[test] fn python_find_venv_invalid() { let context: TestContext = TestContext::new("3.12") - // Enable additional filters for Windows compatibility - .with_filtered_exe_suffix() .with_filtered_python_names() - .with_filtered_virtualenv_bin(); + .with_filtered_virtualenv_bin() + .with_filtered_exe_suffix(); // We find the virtual environment - uv_snapshot!(context.filters(), context.python_find().env(EnvVars::VIRTUAL_ENV, context.venv.as_os_str()), @r###" + uv_snapshot!(context.filters(), context.python_find().env(EnvVars::VIRTUAL_ENV, context.venv.as_os_str()), @r" success: true exit_code: 0 ----- stdout ----- - [VENV]/[BIN]/python + [VENV]/[BIN]/[PYTHON] ----- stderr ----- - "###); + "); // If the binaries are missing from a virtual environment, we fail fs_err::remove_dir_all(venv_bin_path(&context.venv)).unwrap(); - uv_snapshot!(context.filters(), context.python_find().env(EnvVars::VIRTUAL_ENV, context.venv.as_os_str()), @r###" + uv_snapshot!(context.filters(), context.python_find().env(EnvVars::VIRTUAL_ENV, context.venv.as_os_str()), @r" success: false exit_code: 2 ----- stdout ----- ----- stderr ----- - error: Failed to inspect Python interpreter from active virtual environment at `.venv/[BIN]/python` - Caused by: Python interpreter not found at `[VENV]/[BIN]/python` - "###); + error: Failed to inspect Python interpreter from active virtual environment at `.venv/[BIN]/[PYTHON]` + Caused by: Python interpreter not found at `[VENV]/[BIN]/[PYTHON]` + "); // Unless the virtual environment is not active uv_snapshot!(context.filters(), context.python_find(), @r###" @@ -783,9 +782,10 @@ fn python_required_python_major_minor() { #[test] fn python_find_script() { let context = TestContext::new("3.13") - .with_filtered_exe_suffix() .with_filtered_virtualenv_bin() - .with_filtered_python_names(); + .with_filtered_python_names() + .with_filtered_exe_suffix(); + let filters = context .filters() .into_iter() @@ -819,7 +819,7 @@ fn python_find_script() { success: true exit_code: 0 ----- stdout ----- - [CACHE_DIR]/environments-v2/[HASHEDNAME]/[BIN]/python + [CACHE_DIR]/environments-v2/[HASHEDNAME]/[BIN]/[PYTHON] ----- stderr ----- "); @@ -828,9 +828,9 @@ fn python_find_script() { #[test] fn python_find_script_no_environment() { let context = TestContext::new("3.13") - .with_filtered_exe_suffix() .with_filtered_virtualenv_bin() - .with_filtered_python_names(); + .with_filtered_python_names() + .with_filtered_exe_suffix(); let script = context.temp_dir.child("foo.py"); @@ -846,7 +846,7 @@ fn python_find_script_no_environment() { success: true exit_code: 0 ----- stdout ----- - [VENV]/[BIN]/python + [VENV]/[BIN]/[PYTHON] ----- stderr ----- "); @@ -881,9 +881,9 @@ fn python_find_script_python_not_found() { #[test] fn python_find_script_no_such_version() { let context = TestContext::new("3.13") - .with_filtered_exe_suffix() .with_filtered_virtualenv_bin() .with_filtered_python_names() + .with_filtered_exe_suffix() .with_filtered_python_sources(); let filters = context .filters() diff --git a/crates/uv/tests/it/python_install.rs b/crates/uv/tests/it/python_install.rs index 0cb952054..62b3254b8 100644 --- a/crates/uv/tests/it/python_install.rs +++ b/crates/uv/tests/it/python_install.rs @@ -1499,10 +1499,10 @@ fn python_install_patch_dylib() { fn python_install_314() { let context: TestContext = TestContext::new_with_versions(&[]) .with_filtered_python_keys() - .with_filtered_exe_suffix() .with_managed_python_dirs() + .with_filtered_python_install_bin() .with_filtered_python_names() - .with_filtered_python_install_bin(); + .with_filtered_exe_suffix(); // Install 3.14 // For now, this provides test coverage of pre-release handling @@ -1533,7 +1533,7 @@ fn python_install_314() { success: true exit_code: 0 ----- stdout ----- - [TEMP_DIR]/managed/cpython-3.14.0b4-[PLATFORM]/[INSTALL-BIN]/python + [TEMP_DIR]/managed/cpython-3.14.0b4-[PLATFORM]/[INSTALL-BIN]/[PYTHON] ----- stderr ----- "); @@ -1543,7 +1543,7 @@ fn python_install_314() { success: true exit_code: 0 ----- stdout ----- - [TEMP_DIR]/managed/cpython-3.14.0b4-[PLATFORM]/[INSTALL-BIN]/python + [TEMP_DIR]/managed/cpython-3.14.0b4-[PLATFORM]/[INSTALL-BIN]/[PYTHON] ----- stderr ----- "); @@ -1552,7 +1552,7 @@ fn python_install_314() { success: true exit_code: 0 ----- stdout ----- - [TEMP_DIR]/managed/cpython-3.14.0b4-[PLATFORM]/[INSTALL-BIN]/python + [TEMP_DIR]/managed/cpython-3.14.0b4-[PLATFORM]/[INSTALL-BIN]/[PYTHON] ----- stderr ----- "); @@ -1572,7 +1572,7 @@ fn python_install_314() { success: true exit_code: 0 ----- stdout ----- - [TEMP_DIR]/managed/cpython-3.13.5-[PLATFORM]/[INSTALL-BIN]/python + [TEMP_DIR]/managed/cpython-3.13.5-[PLATFORM]/[INSTALL-BIN]/[PYTHON] ----- stderr ----- "); diff --git a/crates/uv/tests/it/python_list.rs b/crates/uv/tests/it/python_list.rs index 959ebdd80..11472baec 100644 --- a/crates/uv/tests/it/python_list.rs +++ b/crates/uv/tests/it/python_list.rs @@ -411,8 +411,8 @@ fn python_list_downloads_installed() { let context: TestContext = TestContext::new_with_versions(&[]) .with_filtered_python_keys() - .with_filtered_python_names() .with_filtered_python_install_bin() + .with_filtered_python_names() .with_managed_python_dirs(); // We do not test showing all interpreters — as it differs per platform @@ -450,7 +450,7 @@ fn python_list_downloads_installed() { success: true exit_code: 0 ----- stdout ----- - cpython-3.10.18-[PLATFORM] managed/cpython-3.10.18-[PLATFORM]/[INSTALL-BIN]/python + cpython-3.10.18-[PLATFORM] managed/cpython-3.10.18-[PLATFORM]/[INSTALL-BIN]/[PYTHON] pypy-3.10.16-[PLATFORM] graalpy-3.10.0-[PLATFORM] diff --git a/crates/uv/tests/it/run.rs b/crates/uv/tests/it/run.rs index 98c2adbfe..93420cca0 100644 --- a/crates/uv/tests/it/run.rs +++ b/crates/uv/tests/it/run.rs @@ -2851,11 +2851,11 @@ fn run_no_project() -> Result<()> { init.touch()?; // `run` should run in the context of the project. - uv_snapshot!(context.filters(), context.run().arg("python").arg("-c").arg("import sys; print(sys.executable)"), @r###" + uv_snapshot!(context.filters(), context.run().arg("python").arg("-c").arg("import sys; print(sys.executable)"), @r" success: true exit_code: 0 ----- stdout ----- - [VENV]/[BIN]/python + [VENV]/[BIN]/[PYTHON] ----- stderr ----- Resolved 6 packages in [TIME] @@ -2865,50 +2865,50 @@ fn run_no_project() -> Result<()> { + foo==1.0.0 (from file://[TEMP_DIR]/) + idna==3.6 + sniffio==1.3.1 - "###); + "); // `run --no-project` should not (but it should still run in the same environment, as it would // if there were no project at all). - uv_snapshot!(context.filters(), context.run().arg("--no-project").arg("python").arg("-c").arg("import sys; print(sys.executable)"), @r###" + uv_snapshot!(context.filters(), context.run().arg("--no-project").arg("python").arg("-c").arg("import sys; print(sys.executable)"), @r" success: true exit_code: 0 ----- stdout ----- - [VENV]/[BIN]/python + [VENV]/[BIN]/[PYTHON] ----- stderr ----- - "###); + "); // `run --no-project --isolated` should run in an entirely isolated environment. - uv_snapshot!(context.filters(), context.run().arg("--no-project").arg("--isolated").arg("python").arg("-c").arg("import sys; print(sys.executable)"), @r###" + uv_snapshot!(context.filters(), context.run().arg("--no-project").arg("--isolated").arg("python").arg("-c").arg("import sys; print(sys.executable)"), @r" success: true exit_code: 0 ----- stdout ----- - [CACHE_DIR]/builds-v0/[TMP]/python + [CACHE_DIR]/builds-v0/[TMP]/[PYTHON] ----- stderr ----- - "###); + "); // `run --no-project` should not (but it should still run in the same environment, as it would // if there were no project at all). - uv_snapshot!(context.filters(), context.run().arg("--no-project").arg("python").arg("-c").arg("import sys; print(sys.executable)"), @r###" + uv_snapshot!(context.filters(), context.run().arg("--no-project").arg("python").arg("-c").arg("import sys; print(sys.executable)"), @r" success: true exit_code: 0 ----- stdout ----- - [VENV]/[BIN]/python + [VENV]/[BIN]/[PYTHON] ----- stderr ----- - "###); + "); // `run --no-project --locked` should fail. - uv_snapshot!(context.filters(), context.run().arg("--no-project").arg("--locked").arg("python").arg("-c").arg("import sys; print(sys.executable)"), @r###" + uv_snapshot!(context.filters(), context.run().arg("--no-project").arg("--locked").arg("python").arg("-c").arg("import sys; print(sys.executable)"), @r" success: true exit_code: 0 ----- stdout ----- - [VENV]/[BIN]/python + [VENV]/[BIN]/[PYTHON] ----- stderr ----- warning: `--locked` has no effect when used alongside `--no-project` - "###); + "); Ok(()) } @@ -3092,14 +3092,14 @@ fn run_project_toml_error() -> Result<()> { "###); // `run --no-project` should not - uv_snapshot!(context.filters(), context.run().arg("--no-project").arg("python").arg("-c").arg("import sys; print(sys.executable)"), @r###" + uv_snapshot!(context.filters(), context.run().arg("--no-project").arg("python").arg("-c").arg("import sys; print(sys.executable)"), @r" success: true exit_code: 0 ----- stdout ----- - [VENV]/[BIN]/python + [VENV]/[BIN]/[PYTHON] ----- stderr ----- - "###); + "); Ok(()) } @@ -3691,7 +3691,7 @@ fn run_linked_environment_path() -> Result<()> { exit_code: 0 ----- stdout ----- [TEMP_DIR]/target - [TEMP_DIR]/target/[BIN]/python + [TEMP_DIR]/target/[BIN]/[PYTHON] ----- stderr ----- Resolved 8 packages in [TIME] @@ -3705,7 +3705,7 @@ fn run_linked_environment_path() -> Result<()> { }, { assert_snapshot!( black_entrypoint, @r##" - #![TEMP_DIR]/target/[BIN]/python + #![TEMP_DIR]/target/[BIN]/[PYTHON] # -*- coding: utf-8 -*- import sys from black import patched_main diff --git a/crates/uv/tests/it/sync.rs b/crates/uv/tests/it/sync.rs index 7063035f9..9fecd50b0 100644 --- a/crates/uv/tests/it/sync.rs +++ b/crates/uv/tests/it/sync.rs @@ -306,7 +306,7 @@ fn sync_json() -> Result<()> { "environment": { "path": "[VENV]/", "python": { - "path": "[VENV]/[BIN]/python", + "path": "[VENV]/[BIN]/[PYTHON]", "version": "3.12.[X]", "implementation": "cpython" } @@ -350,7 +350,7 @@ fn sync_json() -> Result<()> { "environment": { "path": "[VENV]/", "python": { - "path": "[VENV]/[BIN]/python", + "path": "[VENV]/[BIN]/[PYTHON]", "version": "3.12.[X]", "implementation": "cpython" } @@ -389,7 +389,7 @@ fn sync_json() -> Result<()> { "environment": { "path": "[VENV]/", "python": { - "path": "[VENV]/[BIN]/python", + "path": "[VENV]/[BIN]/[PYTHON]", "version": "3.12.[X]", "implementation": "cpython" } @@ -475,7 +475,7 @@ fn sync_dry_json() -> Result<()> { "environment": { "path": "[VENV]/", "python": { - "path": "[VENV]/[BIN]/python", + "path": "[VENV]/[BIN]/[PYTHON]", "version": "3.12.[X]", "implementation": "cpython" } @@ -4884,14 +4884,10 @@ fn sync_active_script_environment_json() -> Result<()> { let filters = context .filters() .into_iter() - .chain(vec![ - ( - r"environments-v2/script-[a-z0-9]+", - "environments-v2/script-[HASH]", - ), - ("bin/python3", "[PYTHON]"), - ("Scripts/python.exe", "[PYTHON]"), - ]) + .chain(vec![( + r"environments-v2/script-[a-z0-9]+", + "environments-v2/script-[HASH]", + )]) .collect::>(); // Running `uv sync --script` with `VIRTUAL_ENV` should warn @@ -4914,7 +4910,7 @@ fn sync_active_script_environment_json() -> Result<()> { "environment": { "path": "[CACHE_DIR]/environments-v2/script-[HASH]", "python": { - "path": "[CACHE_DIR]/environments-v2/script-[HASH]/[BIN]/python", + "path": "[CACHE_DIR]/environments-v2/script-[HASH]/[BIN]/[PYTHON]", "version": "3.11.[X]", "implementation": "cpython" } @@ -4960,7 +4956,7 @@ fn sync_active_script_environment_json() -> Result<()> { "environment": { "path": "[TEMP_DIR]/foo", "python": { - "path": "[TEMP_DIR]/foo/[BIN]/python", + "path": "[TEMP_DIR]/foo/[BIN]/[PYTHON]", "version": "3.11.[X]", "implementation": "cpython" } @@ -5019,7 +5015,7 @@ fn sync_active_script_environment_json() -> Result<()> { "environment": { "path": "[TEMP_DIR]/foo", "python": { - "path": "[TEMP_DIR]/foo/[BIN]/python", + "path": "[TEMP_DIR]/foo/[BIN]/[PYTHON]", "version": "3.12.[X]", "implementation": "cpython" } @@ -6558,7 +6554,7 @@ fn sync_invalid_environment() -> Result<()> { ----- stdout ----- ----- stderr ----- - warning: Ignoring existing virtual environment linked to non-existent Python interpreter: .venv/[BIN]/python -> python + warning: Ignoring existing virtual environment linked to non-existent Python interpreter: .venv/[BIN]/[PYTHON] -> python Using CPython 3.12.[X] interpreter at: [PYTHON-3.12] Removed virtual environment at: .venv Creating virtual environment at: .venv diff --git a/crates/uv/tests/it/tool_list.rs b/crates/uv/tests/it/tool_list.rs index 93dd5756e..9268118ca 100644 --- a/crates/uv/tests/it/tool_list.rs +++ b/crates/uv/tests/it/tool_list.rs @@ -180,7 +180,7 @@ fn tool_list_bad_environment() -> Result<()> { .tool_list() .env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str()) .env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str()), - @r###" + @r" success: true exit_code: 0 ----- stdout ----- @@ -188,8 +188,8 @@ fn tool_list_bad_environment() -> Result<()> { - ruff ----- stderr ----- - warning: Invalid environment at `tools/black`: missing Python executable at `tools/black/[BIN]/python` (run `uv tool install black --reinstall` to reinstall) - "### + warning: Invalid environment at `tools/black`: missing Python executable at `tools/black/[BIN]/[PYTHON]` (run `uv tool install black --reinstall` to reinstall) + " ); Ok(()) From eaff96e5dce946ec91ee632025f4dbd8ae67f173 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Wed, 16 Jul 2025 09:06:06 -0500 Subject: [PATCH 046/130] Sync latest Python releases (#14643) Automated update for Python releases. Co-authored-by: zanieb <2586601+zanieb@users.noreply.github.com> --- crates/uv-python/download-metadata.json | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/crates/uv-python/download-metadata.json b/crates/uv-python/download-metadata.json index 8c7ffec4c..540a3c8a0 100644 --- a/crates/uv-python/download-metadata.json +++ b/crates/uv-python/download-metadata.json @@ -35771,8 +35771,8 @@ "minor": 11, "patch": 0, "prerelease": "", - "url": "https://github.com/oracle/graalpython/releases/download/graal-24.2.1/graalpy-24.2.1-macos-aarch64.tar.gz", - "sha256": "61e11d5176d5bb709b919979ef3525f4db1e39c404b59aa54d887f56bf8fab44", + "url": "https://github.com/oracle/graalpython/releases/download/graal-24.2.2/graalpy-24.2.2-macos-aarch64.tar.gz", + "sha256": "f4a2ae01bae0fa53ec0d19f86d73c6dcc2a162d245552030183b84bfdd8f7635", "variant": null }, "graalpy-3.11.0-darwin-x86_64-none": { @@ -35787,8 +35787,8 @@ "minor": 11, "patch": 0, "prerelease": "", - "url": "https://github.com/oracle/graalpython/releases/download/graal-24.2.1/graalpy-24.2.1-macos-amd64.tar.gz", - "sha256": "4bc42b36117c9ab09c4f411ec5a7a85ed58521dd20b529d971bb0ed3d0b7c363", + "url": "https://github.com/oracle/graalpython/releases/download/graal-24.2.2/graalpy-24.2.2-macos-amd64.tar.gz", + "sha256": "2f4d5e7dbdf90e38778dfcb8ca3e1ec7eee257ef726b1937d5bc91b54cdddf9b", "variant": null }, "graalpy-3.11.0-linux-aarch64-gnu": { @@ -35803,8 +35803,8 @@ "minor": 11, "patch": 0, "prerelease": "", - "url": "https://github.com/oracle/graalpython/releases/download/graal-24.2.1/graalpy-24.2.1-linux-aarch64.tar.gz", - "sha256": "2a80800a76ee6b737d6458ba9ab30ce386dfdd5b2b2bec3ee6bc51fd8e51e7c2", + "url": "https://github.com/oracle/graalpython/releases/download/graal-24.2.2/graalpy-24.2.2-linux-aarch64.tar.gz", + "sha256": "c9be459ab9479892b88dd63f8f88cbc7b1067f4cb27ff17f4761b36de6bd73af", "variant": null }, "graalpy-3.11.0-linux-x86_64-gnu": { @@ -35819,8 +35819,8 @@ "minor": 11, "patch": 0, "prerelease": "", - "url": "https://github.com/oracle/graalpython/releases/download/graal-24.2.1/graalpy-24.2.1-linux-amd64.tar.gz", - "sha256": "55872af24819cb99efa2338db057aeda0c8f9dd412a4a6f5ea19b256ee82fd9e", + "url": "https://github.com/oracle/graalpython/releases/download/graal-24.2.2/graalpy-24.2.2-linux-amd64.tar.gz", + "sha256": "604b7abf6c58038a30866e52da43818af63bcd97909af8b1a96523c7f0e01414", "variant": null }, "graalpy-3.11.0-windows-x86_64-none": { @@ -35835,8 +35835,8 @@ "minor": 11, "patch": 0, "prerelease": "", - "url": "https://github.com/oracle/graalpython/releases/download/graal-24.2.1/graalpy-24.2.1-windows-amd64.zip", - "sha256": "bad923fb64fa2fc71bb424818aac8dcfe0cc9554abef5235d7c08e597ed778ae", + "url": "https://github.com/oracle/graalpython/releases/download/graal-24.2.2/graalpy-24.2.2-windows-amd64.zip", + "sha256": "9606134284d4d95b2f9d69c3087cd3e9e488f46355b419f5e66588a3281df6a3", "variant": null }, "graalpy-3.10.0-darwin-aarch64-none": { From 1f49fbd53cdef47582cfdd45f31f077fbf75210d Mon Sep 17 00:00:00 2001 From: Zanie Blue Date: Wed, 16 Jul 2025 09:17:01 -0500 Subject: [PATCH 047/130] Display `sys.executable` names in check system jobs (#14656) Cherry-picked from https://github.com/astral-sh/uv/pull/14652 This is useful for debugging --- scripts/check_system_python.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/scripts/check_system_python.py b/scripts/check_system_python.py index 565518e50..fbfc5557e 100755 --- a/scripts/check_system_python.py +++ b/scripts/check_system_python.py @@ -24,7 +24,7 @@ def install_package(*, uv: str, package: str): check=True, ) - logging.info(f"Checking that `{package}` can be imported.") + logging.info(f"Checking that `{package}` can be imported with `{sys.executable}`.") code = subprocess.run( [sys.executable, "-c", f"import {package}"], cwd=temp_dir, @@ -82,7 +82,9 @@ if __name__ == "__main__": ) # Ensure that the package (`pylint`) is installed. - logging.info("Checking that `pylint` is installed.") + logging.info( + f"Checking that `pylint` is installed with `{sys.executable} -m pip`." + ) code = subprocess.run( [sys.executable, "-m", "pip", "show", "pylint"], cwd=temp_dir, From 8b29ec0bfd4d34141bfcfb8f5424cbc737dba0b8 Mon Sep 17 00:00:00 2001 From: Zanie Blue Date: Wed, 16 Jul 2025 09:20:25 -0500 Subject: [PATCH 048/130] Use `astral.sh` instead of `example.com` in `lock_unique_named_index` (#14657) This test flakes a lot, maybe using a different domain will help Closes https://github.com/astral-sh/uv/issues/14542 --- crates/uv/tests/it/lock.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/uv/tests/it/lock.rs b/crates/uv/tests/it/lock.rs index d5757b6ef..f91870762 100644 --- a/crates/uv/tests/it/lock.rs +++ b/crates/uv/tests/it/lock.rs @@ -16412,7 +16412,7 @@ fn lock_unique_named_index() -> Result<()> { [[tool.uv.index]] name = "example" - url = "https://example.com" + url = "https://astral.sh" "#, )?; From 7fece9b90a07ab8479ba9445f90cedc3b08f61a8 Mon Sep 17 00:00:00 2001 From: konsti Date: Wed, 16 Jul 2025 15:21:22 +0100 Subject: [PATCH 049/130] Remove marker from `Edge` (#14649) It seems that this field is unused. --- crates/uv-distribution-types/src/dist_error.rs | 4 ++-- crates/uv-distribution-types/src/resolution.rs | 9 ++++----- crates/uv-resolver/src/lock/export/pylock_toml.rs | 2 +- crates/uv-resolver/src/lock/installable.rs | 13 ++++++------- crates/uv-resolver/src/resolution/output.rs | 13 ++++--------- 5 files changed, 17 insertions(+), 24 deletions(-) diff --git a/crates/uv-distribution-types/src/dist_error.rs b/crates/uv-distribution-types/src/dist_error.rs index a452ce663..d2cfee16d 100644 --- a/crates/uv-distribution-types/src/dist_error.rs +++ b/crates/uv-distribution-types/src/dist_error.rs @@ -131,11 +131,11 @@ impl DerivationChain { )); let target = edge.source(); let extra = match edge.weight() { - Edge::Optional(extra, ..) => Some(extra.clone()), + Edge::Optional(extra) => Some(extra.clone()), _ => None, }; let group = match edge.weight() { - Edge::Dev(group, ..) => Some(group.clone()), + Edge::Dev(group) => Some(group.clone()), _ => None, }; queue.push_back((target, extra, group, path)); diff --git a/crates/uv-distribution-types/src/resolution.rs b/crates/uv-distribution-types/src/resolution.rs index 5ff34adf5..e690b8693 100644 --- a/crates/uv-distribution-types/src/resolution.rs +++ b/crates/uv-distribution-types/src/resolution.rs @@ -1,6 +1,5 @@ use uv_distribution_filename::DistExtension; use uv_normalize::{ExtraName, GroupName, PackageName}; -use uv_pep508::MarkerTree; use uv_pypi_types::{HashDigest, HashDigests}; use crate::{ @@ -202,12 +201,12 @@ impl Node { } } -/// An edge in the resolution graph, along with the marker that must be satisfied to traverse it. +/// An edge in the resolution graph. #[derive(Debug, Clone)] pub enum Edge { - Prod(MarkerTree), - Optional(ExtraName, MarkerTree), - Dev(GroupName, MarkerTree), + Prod, + Optional(ExtraName), + Dev(GroupName), } impl From<&ResolvedDist> for RequirementSource { diff --git a/crates/uv-resolver/src/lock/export/pylock_toml.rs b/crates/uv-resolver/src/lock/export/pylock_toml.rs index d2c2383a5..8a53fd8f7 100644 --- a/crates/uv-resolver/src/lock/export/pylock_toml.rs +++ b/crates/uv-resolver/src/lock/export/pylock_toml.rs @@ -1152,7 +1152,7 @@ impl<'lock> PylockToml { }; let index = graph.add_node(dist); - graph.add_edge(root, index, Edge::Prod(package.marker)); + graph.add_edge(root, index, Edge::Prod); } Ok(Resolution::new(graph)) diff --git a/crates/uv-resolver/src/lock/installable.rs b/crates/uv-resolver/src/lock/installable.rs index e3cdbf019..4851306da 100644 --- a/crates/uv-resolver/src/lock/installable.rs +++ b/crates/uv-resolver/src/lock/installable.rs @@ -13,7 +13,6 @@ use uv_configuration::ExtrasSpecificationWithDefaults; use uv_configuration::{BuildOptions, DependencyGroupsWithDefaults, InstallOptions}; use uv_distribution_types::{Edge, Node, Resolution, ResolvedDist}; use uv_normalize::{ExtraName, GroupName, PackageName}; -use uv_pep508::MarkerTree; use uv_platform_tags::Tags; use uv_pypi_types::ResolverMarkerEnvironment; @@ -113,7 +112,7 @@ pub trait Installable<'lock> { inverse.insert(&dist.id, index); // Add an edge from the root. - petgraph.add_edge(root, index, Edge::Prod(MarkerTree::TRUE)); + petgraph.add_edge(root, index, Edge::Prod); // Push the package onto the queue. roots.push((dist, index)); @@ -189,7 +188,7 @@ pub trait Installable<'lock> { // a specific marker environment and set of extras/groups. // So at this point, we know the extras/groups have been // satisfied, so we can safely drop the conflict marker. - Edge::Dev(group.clone(), dep.complexified_marker.pep508()), + Edge::Dev(group.clone()), ); // Push its dependencies on the queue. @@ -231,7 +230,7 @@ pub trait Installable<'lock> { inverse.insert(&dist.id, index); // Add the edge. - petgraph.add_edge(root, index, Edge::Prod(dependency.marker)); + petgraph.add_edge(root, index, Edge::Prod); // Push its dependencies on the queue. if seen.insert((&dist.id, None)) { @@ -300,7 +299,7 @@ pub trait Installable<'lock> { }; // Add the edge. - petgraph.add_edge(root, index, Edge::Dev(group.clone(), dependency.marker)); + petgraph.add_edge(root, index, Edge::Dev(group.clone())); // Push its dependencies on the queue. if seen.insert((&dist.id, None)) { @@ -484,9 +483,9 @@ pub trait Installable<'lock> { index, dep_index, if let Some(extra) = extra { - Edge::Optional(extra.clone(), dep.complexified_marker.pep508()) + Edge::Optional(extra.clone()) } else { - Edge::Prod(dep.complexified_marker.pep508()) + Edge::Prod }, ); diff --git a/crates/uv-resolver/src/resolution/output.rs b/crates/uv-resolver/src/resolution/output.rs index 928b9c605..dd2b3388f 100644 --- a/crates/uv-resolver/src/resolution/output.rs +++ b/crates/uv-resolver/src/resolution/output.rs @@ -894,16 +894,11 @@ impl From for uv_distribution_types::Resolution { // Re-add the edges to the reduced graph. for edge in graph.edge_indices() { let (source, target) = graph.edge_endpoints(edge).unwrap(); - // OK to ignore conflicting marker because we've asserted - // above that we aren't in universal mode. If we aren't in - // universal mode, then there can be no conflicts since - // conflicts imply forks and forks imply universal mode. - let marker = graph[edge].pep508(); match (&graph[source], &graph[target]) { (ResolutionGraphNode::Root, ResolutionGraphNode::Dist(target_dist)) => { let target = inverse[&target_dist.name()]; - transformed.update_edge(root, target, Edge::Prod(marker)); + transformed.update_edge(root, target, Edge::Prod); } ( ResolutionGraphNode::Dist(source_dist), @@ -913,11 +908,11 @@ impl From for uv_distribution_types::Resolution { let target = inverse[&target_dist.name()]; let edge = if let Some(extra) = source_dist.extra.as_ref() { - Edge::Optional(extra.clone(), marker) + Edge::Optional(extra.clone()) } else if let Some(dev) = source_dist.dev.as_ref() { - Edge::Dev(dev.clone(), marker) + Edge::Dev(dev.clone()) } else { - Edge::Prod(marker) + Edge::Prod }; transformed.add_edge(source, target, edge); From 052a74c45110d5ddad9943ff33c2c3c332b96e0f Mon Sep 17 00:00:00 2001 From: konsti Date: Wed, 16 Jul 2025 15:56:32 +0100 Subject: [PATCH 050/130] Fix doctests (#14658) `cargo nextest run` doesn't run them, but `cargo insta test --test-runner nextest` does, which surfaced those failures. --- crates/uv-pep508/src/lib.rs | 2 +- crates/uv-requirements-txt/src/shquote.rs | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/crates/uv-pep508/src/lib.rs b/crates/uv-pep508/src/lib.rs index e2945743b..f63d46206 100644 --- a/crates/uv-pep508/src/lib.rs +++ b/crates/uv-pep508/src/lib.rs @@ -11,7 +11,7 @@ //! let marker = r#"requests [security,tests] >= 2.8.1, == 2.8.* ; python_version > "3.8""#; //! let dependency_specification = Requirement::::from_str(marker).unwrap(); //! assert_eq!(dependency_specification.name.as_ref(), "requests"); -//! assert_eq!(dependency_specification.extras, vec![ExtraName::from_str("security").unwrap(), ExtraName::from_str("tests").unwrap()]); +//! assert_eq!(dependency_specification.extras, vec![ExtraName::from_str("security").unwrap(), ExtraName::from_str("tests").unwrap()].into()); //! ``` #![warn(missing_docs)] diff --git a/crates/uv-requirements-txt/src/shquote.rs b/crates/uv-requirements-txt/src/shquote.rs index d30b4bc5b..180a62496 100644 --- a/crates/uv-requirements-txt/src/shquote.rs +++ b/crates/uv-requirements-txt/src/shquote.rs @@ -146,8 +146,8 @@ fn unquote_open_escape(acc: &mut String, cursor: &mut std::iter::Enumerate Result, UnquoteError> { // If the string does not contain any single-quotes, double-quotes, or escape sequences, it From 7cdc1f62ee9e1dbf6b9cbb9967c3e7e75813bd99 Mon Sep 17 00:00:00 2001 From: Charlie Marsh Date: Wed, 16 Jul 2025 12:02:29 -0400 Subject: [PATCH 051/130] Suggest `uv cache clean` prior to `--reinstall` (#14659) ## Summary Closes https://github.com/astral-sh/uv/issues/14479. --- docs/concepts/cache.md | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/docs/concepts/cache.md b/docs/concepts/cache.md index 6610ccb55..189465ac4 100644 --- a/docs/concepts/cache.md +++ b/docs/concepts/cache.md @@ -19,12 +19,17 @@ The specifics of uv's caching semantics vary based on the nature of the dependen If you're running into caching issues, uv includes a few escape hatches: +- To clear the cache entirely, run `uv cache clean`. To clear the cache for a specific package, run + `uv cache clean `. For example, `uv cache clean ruff` will clear the cache for the + `ruff` package. - To force uv to revalidate cached data for all dependencies, pass `--refresh` to any command (e.g., `uv sync --refresh` or `uv pip install --refresh ...`). - To force uv to revalidate cached data for a specific dependency pass `--refresh-package` to any - command (e.g., `uv sync --refresh-package flask` or `uv pip install --refresh-package flask ...`). + command (e.g., `uv sync --refresh-package ruff` or `uv pip install --refresh-package ruff ...`). - To force uv to ignore existing installed versions, pass `--reinstall` to any installation command - (e.g., `uv sync --reinstall` or `uv pip install --reinstall ...`). + (e.g., `uv sync --reinstall` or `uv pip install --reinstall ...`). (Consider running + `uv cache clean ` first, to ensure that the cache is cleared prior to + reinstallation.) As a special case, uv will always rebuild and reinstall any local directory dependencies passed explicitly on the command-line (e.g., `uv pip install .`). From a8bb7be52b15ac0b8bced1d58b4044191491ecce Mon Sep 17 00:00:00 2001 From: Geoffrey Thomas Date: Wed, 16 Jul 2025 21:39:21 -0400 Subject: [PATCH 052/130] windows_exception: Improve async signal safety (#14619) It's not as bad as I feared to bypass libsys's stderr. (There's still a lock in libsys's backtrace, which might also not be too bad to bypass.) --- Cargo.lock | 2 + Cargo.toml | 3 +- crates/uv/Cargo.toml | 2 + crates/uv/src/windows_exception.rs | 299 +++++++++++++++++++++++------ 4 files changed, 247 insertions(+), 59 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index c43f4872d..3ff7ad6d0 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4637,6 +4637,7 @@ version = "0.7.21" dependencies = [ "anstream", "anyhow", + "arrayvec", "assert_cmd", "assert_fs", "axoupdater", @@ -4735,6 +4736,7 @@ dependencies = [ "which", "whoami", "windows 0.59.0", + "windows-result 0.3.4", "wiremock", "zip", ] diff --git a/Cargo.toml b/Cargo.toml index 752955223..2c32ce8d0 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -75,6 +75,7 @@ uv-workspace = { path = "crates/uv-workspace" } anstream = { version = "0.6.15" } anyhow = { version = "1.0.89" } arcstr = { version = "1.2.0" } +arrayvec = { version = "0.7.6" } astral-tokio-tar = { version = "0.5.1" } async-channel = { version = "2.3.1" } async-compression = { version = "0.4.12", features = ["bzip2", "gzip", "xz", "zstd"] } @@ -184,7 +185,7 @@ url = { version = "2.5.2", features = ["serde"] } version-ranges = { git = "https://github.com/astral-sh/pubgrub", rev = "06ec5a5f59ffaeb6cf5079c6cb184467da06c9db" } walkdir = { version = "2.5.0" } which = { version = "8.0.0", features = ["regex"] } -windows = { version = "0.59.0", features = ["Win32_System_Kernel", "Win32_System_Diagnostics_Debug", "Win32_Storage_FileSystem"] } +windows = { version = "0.59.0", features = ["Win32_Globalization", "Win32_System_Console", "Win32_System_Kernel", "Win32_System_Diagnostics_Debug", "Win32_Storage_FileSystem"] } windows-core = { version = "0.59.0" } windows-registry = { version = "0.5.0" } windows-result = { version = "0.3.0" } diff --git a/crates/uv/Cargo.toml b/crates/uv/Cargo.toml index fe2f2200c..d72035467 100644 --- a/crates/uv/Cargo.toml +++ b/crates/uv/Cargo.toml @@ -107,8 +107,10 @@ which = { workspace = true } zip = { workspace = true } [target.'cfg(target_os = "windows")'.dependencies] +arrayvec = { workspace = true } self-replace = { workspace = true } windows = { workspace = true } +windows-result = { workspace = true } [dev-dependencies] assert_cmd = { version = "2.0.16" } diff --git a/crates/uv/src/windows_exception.rs b/crates/uv/src/windows_exception.rs index e96075f96..2e40e89cc 100644 --- a/crates/uv/src/windows_exception.rs +++ b/crates/uv/src/windows_exception.rs @@ -9,121 +9,304 @@ //! implementation and also displays some minimal information from the exception itself. #![allow(unsafe_code)] -#![allow(clippy::print_stderr)] +// Usually we want fs_err over std::fs, but there's no advantage here, we don't +// report errors encountered while reporting an exception. +#![allow(clippy::disallowed_types)] +use std::fmt::Write; +use std::fs::File; +use std::mem::ManuallyDrop; +use std::os::windows::io::FromRawHandle; + +use arrayvec::ArrayVec; use windows::Win32::{ Foundation, + Globalization::CP_UTF8, + System::Console::{ + CONSOLE_MODE, GetConsoleMode, GetConsoleOutputCP, GetStdHandle, STD_ERROR_HANDLE, + WriteConsoleW, + }, System::Diagnostics::Debug::{ CONTEXT, EXCEPTION_CONTINUE_SEARCH, EXCEPTION_POINTERS, SetUnhandledExceptionFilter, }, }; -fn display_exception_info(name: &str, info: &[usize; 15]) { - match info[0] { - 0 => eprintln!("{name} reading {:#x}", info[1]), - 1 => eprintln!("{name} writing {:#x}", info[1]), - 8 => eprintln!("{name} executing {:#x}", info[1]), - _ => eprintln!("{name} from operation {} at {:#x}", info[0], info[1]), +/// A write target for standard error that can be safely used in an exception handler. +/// +/// The exception handler can be called at any point in the execution of machine code, perhaps +/// halfway through a Rust operation. It needs to be robust to operating with unknown program +/// state, a concept that the UNIX world calls "async signal safety." In particular, we can't +/// write to `std::io::stderr()` because that takes a lock, and we could be called in the middle of +/// code that is holding that lock. +enum ExceptionSafeStderr { + // This is a simplified version of the logic in Rust std::sys::stdio::windows, on the + // assumption that we're only writing strs, not bytes (so we do not need to care about + // incomplete or invalid UTF-8) and we don't care about Windows 7 or every drop of + // performance. + // - If stderr is a non-UTF-8 console, we need to write UTF-16 with WriteConsoleW, and we + // convert with encode_utf16(). + // - If stderr is not a console, we cannot use WriteConsole and must use NtWriteFile, which + // takes (UTF-8) bytes. + // - If stderr is a UTF-8 console, we can do either. std uses NtWriteFile. + // Note that we do not want to close stderr at any point, hence ManuallyDrop. + WriteConsole(Foundation::HANDLE), + NtWriteFile(ManuallyDrop), +} + +impl ExceptionSafeStderr { + fn new() -> Result { + // SAFETY: winapi call, no interesting parameters + let handle = unsafe { GetStdHandle(STD_ERROR_HANDLE) }?; + if handle.is_invalid() { + return Err(windows_result::Error::empty()); + } + let mut mode = CONSOLE_MODE::default(); + // SAFETY: winapi calls, no interesting parameters + if unsafe { + GetConsoleMode(handle, &raw mut mode).is_ok() && GetConsoleOutputCP() != CP_UTF8 + } { + Ok(Self::WriteConsole(handle)) + } else { + // SAFETY: winapi call, we just got this handle from the OS and checked it + let file = unsafe { File::from_raw_handle(handle.0) }; + Ok(Self::NtWriteFile(ManuallyDrop::new(file))) + } + } + + fn write_winerror(&mut self, s: &str) -> Result<(), windows_result::Error> { + match self { + Self::WriteConsole(handle) => { + // According to comments in the ReactOS source, NT's behavior is that writes of 80 + // bytes or fewer are passed in-line in the message to the console server and + // longer writes allocate out of a shared heap with CSRSS. In an attempt to avoid + // allocations, write in 80-byte chunks. + let mut buf = ArrayVec::::new(); + for c in s.encode_utf16() { + if buf.try_push(c).is_err() { + // SAFETY: winapi call, arrayvec guarantees the slice is valid + unsafe { WriteConsoleW(*handle, &buf, None, None) }?; + buf.clear(); + buf.push(c); + } + } + if !buf.is_empty() { + // SAFETY: winapi call, arrayvec guarantees the slice is valid + unsafe { WriteConsoleW(*handle, &buf, None, None) }?; + } + } + Self::NtWriteFile(file) => { + use std::io::Write; + file.write_all(s.as_bytes())?; + } + } + Ok(()) } } +impl Write for ExceptionSafeStderr { + fn write_str(&mut self, s: &str) -> std::fmt::Result { + self.write_winerror(s).map_err(|_| std::fmt::Error) + } +} + +fn display_exception_info( + e: &mut ExceptionSafeStderr, + name: &str, + info: &[usize; 15], +) -> std::fmt::Result { + match info[0] { + 0 => writeln!(e, "{name} reading {:#x}", info[1])?, + 1 => writeln!(e, "{name} writing {:#x}", info[1])?, + 8 => writeln!(e, "{name} executing {:#x}", info[1])?, + _ => writeln!(e, "{name} from operation {} at {:#x}", info[0], info[1])?, + } + Ok(()) +} + #[cfg(target_arch = "x86")] -fn dump_regs(c: &CONTEXT) { - eprintln!( - "eax={:08x} ebx={:08x} ecx={:08x} edx={:08x} esi={:08x} edi={:08x}", - c.Eax, c.Ebx, c.Ecx, c.Edx, c.Esi, c.Edi - ); - eprintln!( - "eip={:08x} ebp={:08x} esp={:08x} eflags={:08x}", - c.Eip, c.Ebp, c.Esp, c.EFlags - ); +fn dump_regs(e: &mut ExceptionSafeStderr, c: &CONTEXT) -> std::fmt::Result { + let CONTEXT { + Eax, + Ebx, + Ecx, + Edx, + Esi, + Edi, + Eip, + Ebp, + Esp, + EFlags, + .. + } = c; + writeln!( + e, + "eax={Eax:08x} ebx={Ebx:08x} ecx={Ecx:08x} edx={Edx:08x} esi={Esi:08x} edi={Edi:08x}" + )?; + writeln!( + e, + "eip={Eip:08x} ebp={Ebp:08x} esp={Esp:08x} eflags={EFlags:08x}" + )?; + Ok(()) } #[cfg(target_arch = "x86_64")] -fn dump_regs(c: &CONTEXT) { - eprintln!("rax={:016x} rbx={:016x} rcx={:016x}", c.Rax, c.Rbx, c.Rcx); - eprintln!("rdx={:016x} rsx={:016x} rdi={:016x}", c.Rdx, c.Rsi, c.Rdi); - eprintln!("rsp={:016x} rbp={:016x} r8={:016x}", c.Rsp, c.Rbp, c.R8); - eprintln!(" r9={:016x} r10={:016x} r11={:016x}", c.R9, c.R10, c.R11); - eprintln!("r12={:016x} r13={:016x} r14={:016x}", c.R12, c.R13, c.R14); - eprintln!( - "r15={:016x} rip={:016x} eflags={:016x}", - c.R15, c.Rip, c.EFlags - ); +fn dump_regs(e: &mut ExceptionSafeStderr, c: &CONTEXT) -> std::fmt::Result { + let CONTEXT { + Rax, + Rbx, + Rcx, + Rdx, + Rsi, + Rdi, + Rsp, + Rbp, + R8, + R9, + R10, + R11, + R12, + R13, + R14, + R15, + Rip, + EFlags, + .. + } = c; + writeln!(e, "rax={Rax:016x} rbx={Rbx:016x} rcx={Rcx:016x}")?; + writeln!(e, "rdx={Rdx:016x} rsi={Rsi:016x} rdi={Rdi:016x}")?; + writeln!(e, "rsp={Rsp:016x} rbp={Rbp:016x} r8={R8 :016x}")?; + writeln!(e, " r9={R9 :016x} r10={R10:016x} r11={R11:016x}")?; + writeln!(e, "r12={R12:016x} r13={R13:016x} r14={R14:016x}")?; + writeln!(e, "r15={R15:016x} rip={Rip:016x} eflags={EFlags:016x}")?; + Ok(()) } #[cfg(target_arch = "aarch64")] -fn dump_regs(c: &CONTEXT) { +fn dump_regs(e: &mut ExceptionSafeStderr, c: &CONTEXT) -> std::fmt::Result { + let CONTEXT { Cpsr, Sp, Pc, .. } = c; // SAFETY: The two variants of this anonymous union are equivalent, // one's an array and one has named registers. - let r = unsafe { c.Anonymous.Anonymous }; - eprintln!("cpsr={:016x} sp={:016x} pc={:016x}", c.Cpsr, c.Sp, c.Pc); - eprintln!(" x0={:016x} x1={:016x} x2={:016x}", r.X0, r.X1, r.X2); - eprintln!(" x3={:016x} x4={:016x} x5={:016x}", r.X3, r.X4, r.X5); - eprintln!(" x6={:016x} x7={:016x} x8={:016x}", r.X6, r.X7, r.X8); - eprintln!(" x9={:016x} x10={:016x} x11={:016x}", r.X9, r.X10, r.X11); - eprintln!(" x12={:016x} x13={:016x} x14={:016x}", r.X12, r.X13, r.X14); - eprintln!(" x15={:016x} x16={:016x} x17={:016x}", r.X15, r.X16, r.X17); - eprintln!(" x18={:016x} x19={:016x} x20={:016x}", r.X18, r.X19, r.X20); - eprintln!(" x21={:016x} x22={:016x} x23={:016x}", r.X21, r.X22, r.X23); - eprintln!(" x24={:016x} x25={:016x} x26={:016x}", r.X24, r.X25, r.X26); - eprintln!(" x27={:016x} x28={:016x}", r.X27, r.X28); - eprintln!(" fp={:016x} lr={:016x}", r.Fp, r.Lr); + let regs = unsafe { c.Anonymous.Anonymous }; + let Windows::Win32::System::Diagnostics::Debug::CONTEXT_0_0 { + X0, + X1, + X2, + X3, + X4, + X5, + X6, + X7, + X8, + X9, + X10, + X11, + X12, + X13, + X14, + X15, + X16, + X17, + X18, + X19, + X20, + X21, + X22, + X23, + X24, + X25, + X26, + X27, + X28, + Fp, + Lr, + } = regs; + writeln!(e, "cpsr={Cpsr:016x} sp={Sp :016x} pc={Pc :016x}")?; + writeln!(e, " x0={X0 :016x} x1={X1 :016x} x2={X2 :016x}")?; + writeln!(e, " x3={X3 :016x} x4={X4 :016x} x5={X5 :016x}")?; + writeln!(e, " x6={X6 :016x} x7={X7 :016x} x8={X8 :016x}")?; + writeln!(e, " x9={X9 :016x} x10={X10:016x} x11={X11:016x}")?; + writeln!(e, " x12={X12 :016x} x13={X13:016x} x14={X14:016x}")?; + writeln!(e, " x15={X15 :016x} x16={X16:016x} x17={X17:016x}")?; + writeln!(e, " x18={X18 :016x} x19={X19:016x} x20={X20:016x}")?; + writeln!(e, " x21={X21 :016x} x22={X22:016x} x23={X23:016x}")?; + writeln!(e, " x24={X24 :016x} x25={X25:016x} x26={X26:016x}")?; + writeln!(e, " x27={X27 :016x} x28={X28:016x}")?; + writeln!(e, " fp={Fp :016x} lr={Lr :016x}")?; + Ok(()) } -unsafe extern "system" fn unhandled_exception_filter( - exception_info: *const EXCEPTION_POINTERS, -) -> i32 { - // TODO: Really we should not be using eprintln here because Stderr is not async-signal-safe. - // Probably we should be calling the console APIs directly. - eprintln!("error: unhandled exception in uv, please report a bug:"); +fn dump_exception(exception_info: *const EXCEPTION_POINTERS) -> std::fmt::Result { + let mut e = ExceptionSafeStderr::new().map_err(|_| std::fmt::Error)?; + writeln!(e, "error: unhandled exception in uv, please report a bug:")?; let mut context = None; // SAFETY: Pointer comes from the OS if let Some(info) = unsafe { exception_info.as_ref() } { // SAFETY: Pointer comes from the OS if let Some(exc) = unsafe { info.ExceptionRecord.as_ref() } { - eprintln!( + writeln!( + e, "code {:#X} at address {:?}", exc.ExceptionCode.0, exc.ExceptionAddress - ); + )?; match exc.ExceptionCode { Foundation::EXCEPTION_ACCESS_VIOLATION => { - display_exception_info("EXCEPTION_ACCESS_VIOLATION", &exc.ExceptionInformation); + display_exception_info( + &mut e, + "EXCEPTION_ACCESS_VIOLATION", + &exc.ExceptionInformation, + )?; } Foundation::EXCEPTION_IN_PAGE_ERROR => { - display_exception_info("EXCEPTION_IN_PAGE_ERROR", &exc.ExceptionInformation); + display_exception_info( + &mut e, + "EXCEPTION_IN_PAGE_ERROR", + &exc.ExceptionInformation, + )?; } Foundation::EXCEPTION_ILLEGAL_INSTRUCTION => { - eprintln!("EXCEPTION_ILLEGAL_INSTRUCTION"); + writeln!(e, "EXCEPTION_ILLEGAL_INSTRUCTION")?; } Foundation::EXCEPTION_STACK_OVERFLOW => { - eprintln!("EXCEPTION_STACK_OVERFLOW"); + writeln!(e, "EXCEPTION_STACK_OVERFLOW")?; } _ => {} } } else { - eprintln!("(ExceptionRecord is NULL)"); + writeln!(e, "(ExceptionRecord is NULL)")?; } // SAFETY: Pointer comes from the OS context = unsafe { info.ContextRecord.as_ref() }; } else { - eprintln!("(ExceptionInfo is NULL)"); + writeln!(e, "(ExceptionInfo is NULL)")?; } + // TODO: std::backtrace does a lot of allocations, so we are no longer async-signal-safe at + // this point, but hopefully we got a useful error message on screen already. We could do a + // better job by using backtrace-rs directly + arrayvec. let backtrace = std::backtrace::Backtrace::capture(); if backtrace.status() == std::backtrace::BacktraceStatus::Disabled { - eprintln!("note: run with `RUST_BACKTRACE=1` environment variable to display a backtrace"); + writeln!( + e, + "note: run with `RUST_BACKTRACE=1` environment variable to display a backtrace" + )?; } else { if let Some(context) = context { - dump_regs(context); + dump_regs(&mut e, context)?; } - eprintln!("stack backtrace:\n{backtrace:#}"); + writeln!(e, "stack backtrace:\n{backtrace:#}")?; } + Ok(()) +} + +unsafe extern "system" fn unhandled_exception_filter( + exception_info: *const EXCEPTION_POINTERS, +) -> i32 { + let _ = dump_exception(exception_info); EXCEPTION_CONTINUE_SEARCH } /// Set up our handler for unhandled exceptions. pub(crate) fn setup() { - // SAFETY: winapi call + // SAFETY: winapi call, argument is a mostly async-signal-safe function unsafe { SetUnhandledExceptionFilter(Some(Some(unhandled_exception_filter))); } From b3df1c2401e71eea05f2dba3ff39d48a1a5d4ded Mon Sep 17 00:00:00 2001 From: Geoffrey Thomas Date: Thu, 17 Jul 2025 08:29:41 -0400 Subject: [PATCH 053/130] Fix typo in #14619 (#14677) --- crates/uv/src/windows_exception.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/uv/src/windows_exception.rs b/crates/uv/src/windows_exception.rs index 2e40e89cc..048eaa1ba 100644 --- a/crates/uv/src/windows_exception.rs +++ b/crates/uv/src/windows_exception.rs @@ -187,7 +187,7 @@ fn dump_regs(e: &mut ExceptionSafeStderr, c: &CONTEXT) -> std::fmt::Result { // SAFETY: The two variants of this anonymous union are equivalent, // one's an array and one has named registers. let regs = unsafe { c.Anonymous.Anonymous }; - let Windows::Win32::System::Diagnostics::Debug::CONTEXT_0_0 { + let windows::Win32::System::Diagnostics::Debug::CONTEXT_0_0 { X0, X1, X2, From 09fc943cca0789e83c6afa16fc17d1d6f54b7978 Mon Sep 17 00:00:00 2001 From: Zanie Blue Date: Thu, 17 Jul 2025 07:38:12 -0500 Subject: [PATCH 054/130] Rename msrv build job for consistency with other binary builds (#14679) --- .github/workflows/ci.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index bb357f4a3..e9beddcc5 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -654,8 +654,8 @@ jobs: ${{ env.UV_WORKSPACE }}/target/aarch64-pc-windows-msvc/debug/uvx.exe retention-days: 1 - cargo-build-msrv: - name: "cargo build (msrv)" + build-binary-msrv: + name: "build binary | msrv" needs: determine_changes if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && (needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main') }} runs-on: github-ubuntu-24.04-x86_64-8 From bdb8c2646a0f4ce64574b048e9b60246193a9ffc Mon Sep 17 00:00:00 2001 From: adisbladis Date: Fri, 18 Jul 2025 01:11:32 +1200 Subject: [PATCH 055/130] Add UV_COMPILE_BYTECODE_TIMEOUT environment variable (#14369) ## Summary When installing packages on _very_ slow/overloaded systems it'spossible to trigger bytecode compilation timeouts, which tends to happen in environments such as Qemu (especially without KVM/virtio), but also on systems that are simply overloaded. I've seen this in my Nix builds if I for example am compiling a Linux kernel at the same time as a few other concurrent builds. By making the bytecode compilation timeout adjustable you can work around such issues. I plan to set `UV_COMPILE_BYTECODE_TIMEOUT=0` in the [pyproject.nix builders](https://pyproject-nix.github.io/pyproject.nix/build.html) to make them more reliable. - Related issues * https://github.com/astral-sh/uv/issues/6105 ## Test Plan Only manual testing was applied in this instance. There is no existing automated tests for bytecode compilation timeout afaict. --- crates/uv-installer/src/compile.rs | 61 ++++++++++++++++++++++++------ crates/uv-static/src/env_vars.rs | 3 ++ docs/reference/environment.md | 4 ++ 3 files changed, 56 insertions(+), 12 deletions(-) diff --git a/crates/uv-installer/src/compile.rs b/crates/uv-installer/src/compile.rs index 7dd167b4a..4ee74f40d 100644 --- a/crates/uv-installer/src/compile.rs +++ b/crates/uv-installer/src/compile.rs @@ -2,7 +2,7 @@ use std::panic::AssertUnwindSafe; use std::path::{Path, PathBuf}; use std::process::Stdio; use std::time::Duration; -use std::{io, panic}; +use std::{env, io, panic}; use async_channel::{Receiver, SendError}; use tempfile::tempdir_in; @@ -20,7 +20,7 @@ use uv_warnings::warn_user; const COMPILEALL_SCRIPT: &str = include_str!("pip_compileall.py"); /// This is longer than any compilation should ever take. -const COMPILE_TIMEOUT: Duration = Duration::from_secs(60); +const DEFAULT_COMPILE_TIMEOUT: Duration = Duration::from_secs(60); #[derive(Debug, Error)] pub enum CompileError { @@ -55,6 +55,8 @@ pub enum CompileError { }, #[error("Python startup timed out ({}s)", _0.as_secs_f32())] StartupTimeout(Duration), + #[error("Got invalid value from environment for {var}: {message}.")] + EnvironmentError { var: &'static str, message: String }, } /// Bytecode compile all file in `dir` using a pool of Python interpreters running a Python script @@ -88,6 +90,29 @@ pub async fn compile_tree( let tempdir = tempdir_in(cache).map_err(CompileError::TempFile)?; let pip_compileall_py = tempdir.path().join("pip_compileall.py"); + let timeout: Option = match env::var(EnvVars::UV_COMPILE_BYTECODE_TIMEOUT) { + Ok(value) => { + if value == "0" { + debug!("Disabling bytecode compilation timeout"); + None + } else { + if let Ok(duration) = value.parse::().map(Duration::from_secs) { + debug!( + "Using bytecode compilation timeout of {}s", + duration.as_secs() + ); + Some(duration) + } else { + return Err(CompileError::EnvironmentError { + var: "UV_COMPILE_BYTECODE_TIMEOUT", + message: format!("Expected an integer number of seconds, got \"{value}\""), + }); + } + } + } + Err(_) => Some(DEFAULT_COMPILE_TIMEOUT), + }; + debug!("Starting {} bytecode compilation workers", worker_count); let mut worker_handles = Vec::new(); for _ in 0..worker_count { @@ -98,6 +123,7 @@ pub async fn compile_tree( python_executable.to_path_buf(), pip_compileall_py.clone(), receiver.clone(), + timeout, ); // Spawn each worker on a dedicated thread. @@ -189,6 +215,7 @@ async fn worker( interpreter: PathBuf, pip_compileall_py: PathBuf, receiver: Receiver, + timeout: Option, ) -> Result<(), CompileError> { fs_err::tokio::write(&pip_compileall_py, COMPILEALL_SCRIPT) .await @@ -208,12 +235,17 @@ async fn worker( } } }; + // Handle a broken `python` by using a timeout, one that's higher than any compilation // should ever take. let (mut bytecode_compiler, child_stdin, mut child_stdout, mut child_stderr) = - tokio::time::timeout(COMPILE_TIMEOUT, wait_until_ready) - .await - .map_err(|_| CompileError::StartupTimeout(COMPILE_TIMEOUT))??; + if let Some(duration) = timeout { + tokio::time::timeout(duration, wait_until_ready) + .await + .map_err(|_| CompileError::StartupTimeout(timeout.unwrap()))?? + } else { + wait_until_ready.await? + }; let stderr_reader = tokio::task::spawn(async move { let mut child_stderr_collected: Vec = Vec::new(); @@ -223,7 +255,7 @@ async fn worker( Ok(child_stderr_collected) }); - let result = worker_main_loop(receiver, child_stdin, &mut child_stdout).await; + let result = worker_main_loop(receiver, child_stdin, &mut child_stdout, timeout).await; // Reap the process to avoid zombies. let _ = bytecode_compiler.kill().await; @@ -340,6 +372,7 @@ async fn worker_main_loop( receiver: Receiver, mut child_stdin: ChildStdin, child_stdout: &mut BufReader, + timeout: Option, ) -> Result<(), CompileError> { let mut out_line = String::new(); while let Ok(source_file) = receiver.recv().await { @@ -372,12 +405,16 @@ async fn worker_main_loop( // Handle a broken `python` by using a timeout, one that's higher than any compilation // should ever take. - tokio::time::timeout(COMPILE_TIMEOUT, python_handle) - .await - .map_err(|_| CompileError::CompileTimeout { - elapsed: COMPILE_TIMEOUT, - source_file: source_file.clone(), - })??; + if let Some(duration) = timeout { + tokio::time::timeout(duration, python_handle) + .await + .map_err(|_| CompileError::CompileTimeout { + elapsed: duration, + source_file: source_file.clone(), + })??; + } else { + python_handle.await?; + } // This is a sanity check, if we don't get the path back something has gone wrong, e.g. // we're not actually running a python interpreter. diff --git a/crates/uv-static/src/env_vars.rs b/crates/uv-static/src/env_vars.rs index ae981cac3..216228ff2 100644 --- a/crates/uv-static/src/env_vars.rs +++ b/crates/uv-static/src/env_vars.rs @@ -162,6 +162,9 @@ impl EnvVars { /// will compile Python source files to bytecode after installation. pub const UV_COMPILE_BYTECODE: &'static str = "UV_COMPILE_BYTECODE"; + /// Timeout (in seconds) for bytecode compilation. + pub const UV_COMPILE_BYTECODE_TIMEOUT: &'static str = "UV_COMPILE_BYTECODE_TIMEOUT"; + /// Equivalent to the `--no-editable` command-line argument. If set, uv /// installs any editable dependencies, including the project and any workspace members, as /// non-editable diff --git a/docs/reference/environment.md b/docs/reference/environment.md index 47e4d8db9..5f06cfd3f 100644 --- a/docs/reference/environment.md +++ b/docs/reference/environment.md @@ -26,6 +26,10 @@ directory for caching instead of the default cache directory. Equivalent to the `--compile-bytecode` command-line argument. If set, uv will compile Python source files to bytecode after installation. +### `UV_COMPILE_BYTECODE_TIMEOUT` + +Timeout (in seconds) for bytecode compilation. + ### `UV_CONCURRENT_BUILDS` Sets the maximum number of source distributions that uv will build From 3884ab5715937fdf01dbc1c6bfb91cacf00e20ce Mon Sep 17 00:00:00 2001 From: adisbladis Date: Fri, 18 Jul 2025 01:35:25 +1200 Subject: [PATCH 056/130] Fix bytecode compilation debug message introduced by #14369 (#14682) ## Summary When refactoring the addition PR I accidentally introduced a bug where the debug message would not be output if the default value is used. cc @zanieb --- crates/uv-installer/src/compile.rs | 29 +++++++++++++++-------------- 1 file changed, 15 insertions(+), 14 deletions(-) diff --git a/crates/uv-installer/src/compile.rs b/crates/uv-installer/src/compile.rs index 4ee74f40d..8704d9542 100644 --- a/crates/uv-installer/src/compile.rs +++ b/crates/uv-installer/src/compile.rs @@ -91,27 +91,28 @@ pub async fn compile_tree( let pip_compileall_py = tempdir.path().join("pip_compileall.py"); let timeout: Option = match env::var(EnvVars::UV_COMPILE_BYTECODE_TIMEOUT) { - Ok(value) => { - if value == "0" { - debug!("Disabling bytecode compilation timeout"); - None - } else { - if let Ok(duration) = value.parse::().map(Duration::from_secs) { - debug!( - "Using bytecode compilation timeout of {}s", - duration.as_secs() - ); - Some(duration) - } else { + Ok(value) => match value.as_str() { + "0" => None, + _ => match value.parse::().map(Duration::from_secs) { + Ok(duration) => Some(duration), + Err(_) => { return Err(CompileError::EnvironmentError { var: "UV_COMPILE_BYTECODE_TIMEOUT", message: format!("Expected an integer number of seconds, got \"{value}\""), }); } - } - } + }, + }, Err(_) => Some(DEFAULT_COMPILE_TIMEOUT), }; + if let Some(duration) = timeout { + debug!( + "Using bytecode compilation timeout of {}s", + duration.as_secs() + ); + } else { + debug!("Disabling bytecode compilation timeout"); + } debug!("Starting {} bytecode compilation workers", worker_count); let mut worker_handles = Vec::new(); From 78d6d1134a50705d336ba209055eb0076feb017d Mon Sep 17 00:00:00 2001 From: Zanie Blue Date: Thu, 17 Jul 2025 11:27:15 -0500 Subject: [PATCH 057/130] Bump version to 0.7.22 (#14685) --- CHANGELOG.md | 34 +++++++++++++++++++++++++++ Cargo.lock | 6 ++--- crates/uv-build/Cargo.toml | 2 +- crates/uv-build/pyproject.toml | 2 +- crates/uv-version/Cargo.toml | 2 +- crates/uv/Cargo.toml | 2 +- docs/concepts/build-backend.md | 2 +- docs/getting-started/installation.md | 4 ++-- docs/guides/integration/aws-lambda.md | 4 ++-- docs/guides/integration/docker.md | 10 ++++---- docs/guides/integration/github.md | 2 +- docs/guides/integration/pre-commit.md | 10 ++++---- pyproject.toml | 2 +- 13 files changed, 58 insertions(+), 24 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 38be00d2d..87cf0c9e8 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -3,6 +3,40 @@ +## 0.7.22 + +### Python + +- Upgrade GraalPy to 24.2.2 + +See the [GraalPy release notes](https://github.com/oracle/graalpython/releases/tag/graal-24.2.2) for more details. + +### Configuration + +- Add `UV_COMPILE_BYTECODE_TIMEOUT` environment variable ([#14369](https://github.com/astral-sh/uv/pull/14369)) +- Allow users to override index `cache-control` headers ([#14620](https://github.com/astral-sh/uv/pull/14620)) +- Add `UV_LIBC` to override libc selection in multi-libc environment ([#14646](https://github.com/astral-sh/uv/pull/14646)) + +### Bug fixes + +- Fix `--all-arches` when paired with `--only-downloads` ([#14629](https://github.com/astral-sh/uv/pull/14629)) +- Skip Windows Python interpreters that return a broken MSIX package code ([#14636](https://github.com/astral-sh/uv/pull/14636)) +- Warn on invalid `uv.toml` when provided via direct path ([#14653](https://github.com/astral-sh/uv/pull/14653)) +- Improve async signal safety in Windows exception handler ([#14619](https://github.com/astral-sh/uv/pull/14619)) + +### Documentation + +- Mention the `revision` in the lockfile versioning doc ([#14634](https://github.com/astral-sh/uv/pull/14634)) +- Move "Conflicting dependencies" to the "Resolution" page ([#14633](https://github.com/astral-sh/uv/pull/14633)) +- Rename "Dependency specifiers" section to exclude PEP 508 reference ([#14631](https://github.com/astral-sh/uv/pull/14631)) +- Suggest `uv cache clean` prior to `--reinstall` ([#14659](https://github.com/astral-sh/uv/pull/14659)) + +### Preview features + +- Make preview Python registration on Windows non-fatal ([#14614](https://github.com/astral-sh/uv/pull/14614)) +- Update preview installation of Python executables to be non-fatal ([#14612](https://github.com/astral-sh/uv/pull/14612)) +- Add `uv python update-shell` ([#14627](https://github.com/astral-sh/uv/pull/14627)) + ## 0.7.21 ### Python diff --git a/Cargo.lock b/Cargo.lock index 3ff7ad6d0..8a95f655d 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4633,7 +4633,7 @@ dependencies = [ [[package]] name = "uv" -version = "0.7.21" +version = "0.7.22" dependencies = [ "anstream", "anyhow", @@ -4800,7 +4800,7 @@ dependencies = [ [[package]] name = "uv-build" -version = "0.7.21" +version = "0.7.22" dependencies = [ "anyhow", "uv-build-backend", @@ -5993,7 +5993,7 @@ dependencies = [ [[package]] name = "uv-version" -version = "0.7.21" +version = "0.7.22" [[package]] name = "uv-virtualenv" diff --git a/crates/uv-build/Cargo.toml b/crates/uv-build/Cargo.toml index f943010ae..8014fa445 100644 --- a/crates/uv-build/Cargo.toml +++ b/crates/uv-build/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "uv-build" -version = "0.7.21" +version = "0.7.22" edition.workspace = true rust-version.workspace = true homepage.workspace = true diff --git a/crates/uv-build/pyproject.toml b/crates/uv-build/pyproject.toml index 5a2209155..1a78d34dc 100644 --- a/crates/uv-build/pyproject.toml +++ b/crates/uv-build/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "uv-build" -version = "0.7.21" +version = "0.7.22" description = "The uv build backend" authors = [{ name = "Astral Software Inc.", email = "hey@astral.sh" }] requires-python = ">=3.8" diff --git a/crates/uv-version/Cargo.toml b/crates/uv-version/Cargo.toml index a9fe788a5..e1a424af8 100644 --- a/crates/uv-version/Cargo.toml +++ b/crates/uv-version/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "uv-version" -version = "0.7.21" +version = "0.7.22" edition = { workspace = true } rust-version = { workspace = true } homepage = { workspace = true } diff --git a/crates/uv/Cargo.toml b/crates/uv/Cargo.toml index d72035467..975495904 100644 --- a/crates/uv/Cargo.toml +++ b/crates/uv/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "uv" -version = "0.7.21" +version = "0.7.22" edition = { workspace = true } rust-version = { workspace = true } homepage = { workspace = true } diff --git a/docs/concepts/build-backend.md b/docs/concepts/build-backend.md index 69694f317..5f52463bf 100644 --- a/docs/concepts/build-backend.md +++ b/docs/concepts/build-backend.md @@ -36,7 +36,7 @@ To use uv as a build backend in an existing project, add `uv_build` to the ```toml title="pyproject.toml" [build-system] -requires = ["uv_build>=0.7.21,<0.8.0"] +requires = ["uv_build>=0.7.22,<0.8.0"] build-backend = "uv_build" ``` diff --git a/docs/getting-started/installation.md b/docs/getting-started/installation.md index fa68d210a..3e31a5003 100644 --- a/docs/getting-started/installation.md +++ b/docs/getting-started/installation.md @@ -25,7 +25,7 @@ uv provides a standalone installer to download and install uv: Request a specific version by including it in the URL: ```console - $ curl -LsSf https://astral.sh/uv/0.7.21/install.sh | sh + $ curl -LsSf https://astral.sh/uv/0.7.22/install.sh | sh ``` === "Windows" @@ -41,7 +41,7 @@ uv provides a standalone installer to download and install uv: Request a specific version by including it in the URL: ```pwsh-session - PS> powershell -ExecutionPolicy ByPass -c "irm https://astral.sh/uv/0.7.21/install.ps1 | iex" + PS> powershell -ExecutionPolicy ByPass -c "irm https://astral.sh/uv/0.7.22/install.ps1 | iex" ``` !!! tip diff --git a/docs/guides/integration/aws-lambda.md b/docs/guides/integration/aws-lambda.md index 4cdb75b7a..14224b3fe 100644 --- a/docs/guides/integration/aws-lambda.md +++ b/docs/guides/integration/aws-lambda.md @@ -92,7 +92,7 @@ the second stage, we'll copy this directory over to the final image, omitting th other unnecessary files. ```dockerfile title="Dockerfile" -FROM ghcr.io/astral-sh/uv:0.7.21 AS uv +FROM ghcr.io/astral-sh/uv:0.7.22 AS uv # First, bundle the dependencies into the task root. FROM public.ecr.aws/lambda/python:3.13 AS builder @@ -334,7 +334,7 @@ And confirm that opening http://127.0.0.1:8000/ in a web browser displays, "Hell Finally, we'll update the Dockerfile to include the local library in the deployment package: ```dockerfile title="Dockerfile" -FROM ghcr.io/astral-sh/uv:0.7.21 AS uv +FROM ghcr.io/astral-sh/uv:0.7.22 AS uv # First, bundle the dependencies into the task root. FROM public.ecr.aws/lambda/python:3.13 AS builder diff --git a/docs/guides/integration/docker.md b/docs/guides/integration/docker.md index bbea9b264..2ea14c9b0 100644 --- a/docs/guides/integration/docker.md +++ b/docs/guides/integration/docker.md @@ -31,7 +31,7 @@ $ docker run --rm -it ghcr.io/astral-sh/uv:debian uv --help The following distroless images are available: - `ghcr.io/astral-sh/uv:latest` -- `ghcr.io/astral-sh/uv:{major}.{minor}.{patch}`, e.g., `ghcr.io/astral-sh/uv:0.7.21` +- `ghcr.io/astral-sh/uv:{major}.{minor}.{patch}`, e.g., `ghcr.io/astral-sh/uv:0.7.22` - `ghcr.io/astral-sh/uv:{major}.{minor}`, e.g., `ghcr.io/astral-sh/uv:0.7` (the latest patch version) @@ -75,7 +75,7 @@ And the following derived images are available: As with the distroless image, each derived image is published with uv version tags as `ghcr.io/astral-sh/uv:{major}.{minor}.{patch}-{base}` and -`ghcr.io/astral-sh/uv:{major}.{minor}-{base}`, e.g., `ghcr.io/astral-sh/uv:0.7.21-alpine`. +`ghcr.io/astral-sh/uv:{major}.{minor}-{base}`, e.g., `ghcr.io/astral-sh/uv:0.7.22-alpine`. For more details, see the [GitHub Container](https://github.com/astral-sh/uv/pkgs/container/uv) page. @@ -113,7 +113,7 @@ Note this requires `curl` to be available. In either case, it is best practice to pin to a specific uv version, e.g., with: ```dockerfile -COPY --from=ghcr.io/astral-sh/uv:0.7.21 /uv /uvx /bin/ +COPY --from=ghcr.io/astral-sh/uv:0.7.22 /uv /uvx /bin/ ``` !!! tip @@ -131,7 +131,7 @@ COPY --from=ghcr.io/astral-sh/uv:0.7.21 /uv /uvx /bin/ Or, with the installer: ```dockerfile -ADD https://astral.sh/uv/0.7.21/install.sh /uv-installer.sh +ADD https://astral.sh/uv/0.7.22/install.sh /uv-installer.sh ``` ### Installing a project @@ -557,5 +557,5 @@ Verified OK !!! tip These examples use `latest`, but best practice is to verify the attestation for a specific - version tag, e.g., `ghcr.io/astral-sh/uv:0.7.21`, or (even better) the specific image digest, + version tag, e.g., `ghcr.io/astral-sh/uv:0.7.22`, or (even better) the specific image digest, such as `ghcr.io/astral-sh/uv:0.5.27@sha256:5adf09a5a526f380237408032a9308000d14d5947eafa687ad6c6a2476787b4f`. diff --git a/docs/guides/integration/github.md b/docs/guides/integration/github.md index d206febd1..956b47660 100644 --- a/docs/guides/integration/github.md +++ b/docs/guides/integration/github.md @@ -47,7 +47,7 @@ jobs: uses: astral-sh/setup-uv@v6 with: # Install a specific version of uv. - version: "0.7.21" + version: "0.7.22" ``` ## Setting up Python diff --git a/docs/guides/integration/pre-commit.md b/docs/guides/integration/pre-commit.md index 912ff0213..d2598fed8 100644 --- a/docs/guides/integration/pre-commit.md +++ b/docs/guides/integration/pre-commit.md @@ -19,7 +19,7 @@ To make sure your `uv.lock` file is up to date even if your `pyproject.toml` fil repos: - repo: https://github.com/astral-sh/uv-pre-commit # uv version. - rev: 0.7.21 + rev: 0.7.22 hooks: - id: uv-lock ``` @@ -30,7 +30,7 @@ To keep a `requirements.txt` file in sync with your `uv.lock` file: repos: - repo: https://github.com/astral-sh/uv-pre-commit # uv version. - rev: 0.7.21 + rev: 0.7.22 hooks: - id: uv-export ``` @@ -41,7 +41,7 @@ To compile requirements files: repos: - repo: https://github.com/astral-sh/uv-pre-commit # uv version. - rev: 0.7.21 + rev: 0.7.22 hooks: # Compile requirements - id: pip-compile @@ -54,7 +54,7 @@ To compile alternative requirements files, modify `args` and `files`: repos: - repo: https://github.com/astral-sh/uv-pre-commit # uv version. - rev: 0.7.21 + rev: 0.7.22 hooks: # Compile requirements - id: pip-compile @@ -68,7 +68,7 @@ To run the hook over multiple files at the same time, add additional entries: repos: - repo: https://github.com/astral-sh/uv-pre-commit # uv version. - rev: 0.7.21 + rev: 0.7.22 hooks: # Compile requirements - id: pip-compile diff --git a/pyproject.toml b/pyproject.toml index f3c9c4f64..a079d53b2 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "maturin" [project] name = "uv" -version = "0.7.21" +version = "0.7.22" description = "An extremely fast Python package and project manager, written in Rust." authors = [{ name = "Astral Software Inc.", email = "hey@astral.sh" }] requires-python = ">=3.8" From 868ecd7b3a4855b3b84ec121a826fb218b843084 Mon Sep 17 00:00:00 2001 From: Zanie Blue Date: Thu, 17 Jul 2025 12:33:43 -0500 Subject: [PATCH 058/130] Add support for toggling Python bin and registry install options via env vars (#14662) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Adds environment variables for https://github.com/astral-sh/uv/pull/14612 and https://github.com/astral-sh/uv/pull/14614 We can't use the Clap `BoolishValueParser` here, and the reasoning is a little hard to explain. If we used `UV_PYTHON_INSTALL_NO_BIN`, as is our typical pattern, it'd work, but here we allow opt-in to hard errors with `UV_PYTHON_INSTALL_BIN=1` and I don't think we should have both `UV_PYTHON_INSTALL_BIN` and `UV_PYTHON_INSTALL_NO_BIN`. Consequently, this pull request introduces a new `EnvironmentOptions` abstraction which allows us to express semantics that Clap cannot — which we probably want anyway because we have an increasing number of environment variables we're parsing downstream, e.g., #14544 and #14369. --- crates/uv-cli/src/lib.rs | 8 +++ crates/uv-settings/src/lib.rs | 82 ++++++++++++++++++++++++++++ crates/uv-static/src/env_vars.rs | 6 ++ crates/uv/src/lib.rs | 7 ++- crates/uv/src/settings.rs | 15 +++-- crates/uv/tests/it/help.rs | 8 ++- crates/uv/tests/it/python_install.rs | 20 ++++++- docs/reference/cli.md | 6 +- docs/reference/environment.md | 8 +++ 9 files changed, 147 insertions(+), 13 deletions(-) diff --git a/crates/uv-cli/src/lib.rs b/crates/uv-cli/src/lib.rs index a846aec59..94b79558d 100644 --- a/crates/uv-cli/src/lib.rs +++ b/crates/uv-cli/src/lib.rs @@ -4959,11 +4959,15 @@ pub struct PythonInstallArgs { /// This is the default behavior. If this flag is provided explicitly, uv will error if the /// executable cannot be installed. /// + /// This can also be set with `UV_PYTHON_INSTALL_BIN=1`. + /// /// See `UV_PYTHON_BIN_DIR` to customize the target directory. #[arg(long, overrides_with("no_bin"), hide = true)] pub bin: bool, /// Do not install a Python executable into the `bin` directory. + /// + /// This can also be set with `UV_PYTHON_INSTALL_BIN=0`. #[arg(long, overrides_with("bin"), conflicts_with("default"))] pub no_bin: bool, @@ -4971,10 +4975,14 @@ pub struct PythonInstallArgs { /// /// This is the default behavior on Windows. If this flag is provided explicitly, uv will error if the /// registry entry cannot be created. + /// + /// This can also be set with `UV_PYTHON_INSTALL_REGISTRY=1`. #[arg(long, overrides_with("no_registry"), hide = true)] pub registry: bool, /// Do not register the Python installation in the Windows registry. + /// + /// This can also be set with `UV_PYTHON_INSTALL_REGISTRY=0`. #[arg(long, overrides_with("registry"))] pub no_registry: bool, diff --git a/crates/uv-settings/src/lib.rs b/crates/uv-settings/src/lib.rs index d676cc060..cad600cfc 100644 --- a/crates/uv-settings/src/lib.rs +++ b/crates/uv-settings/src/lib.rs @@ -3,6 +3,7 @@ use std::path::{Path, PathBuf}; use uv_dirs::{system_config_file, user_config_dir}; use uv_fs::Simplified; +use uv_static::EnvVars; use uv_warnings::warn_user; pub use crate::combine::*; @@ -246,4 +247,85 @@ pub enum Error { #[error("Failed to parse: `{}`. The `{}` field is not allowed in a `uv.toml` file. `{}` is only applicable in the context of a project, and should be placed in a `pyproject.toml` file instead.", _0.user_display(), _1, _1)] PyprojectOnlyField(PathBuf, &'static str), + + #[error("Failed to parse environment variable `{name}` with invalid value `{value}`: {err}")] + InvalidEnvironmentVariable { + name: String, + value: String, + err: String, + }, +} + +/// Options loaded from environment variables. +/// +/// This is currently a subset of all respected environment variables, most are parsed via Clap at +/// the CLI level, however there are limited semantics in that context. +#[derive(Debug, Clone)] +pub struct EnvironmentOptions { + pub python_install_bin: Option, + pub python_install_registry: Option, +} + +impl EnvironmentOptions { + /// Create a new [`EnvironmentOptions`] from environment variables. + pub fn new() -> Result { + Ok(Self { + python_install_bin: parse_boolish_environment_variable(EnvVars::UV_PYTHON_INSTALL_BIN)?, + python_install_registry: parse_boolish_environment_variable( + EnvVars::UV_PYTHON_INSTALL_REGISTRY, + )?, + }) + } +} + +/// Parse a boolean environment variable. +/// +/// Adapted from Clap's `BoolishValueParser` which is dual licensed under the MIT and Apache-2.0. +fn parse_boolish_environment_variable(name: &'static str) -> Result, Error> { + // See `clap_builder/src/util/str_to_bool.rs` + // We want to match Clap's accepted values + + // True values are `y`, `yes`, `t`, `true`, `on`, and `1`. + const TRUE_LITERALS: [&str; 6] = ["y", "yes", "t", "true", "on", "1"]; + + // False values are `n`, `no`, `f`, `false`, `off`, and `0`. + const FALSE_LITERALS: [&str; 6] = ["n", "no", "f", "false", "off", "0"]; + + // Converts a string literal representation of truth to true or false. + // + // `false` values are `n`, `no`, `f`, `false`, `off`, and `0` (case insensitive). + // + // Any other value will be considered as `true`. + fn str_to_bool(val: impl AsRef) -> Option { + let pat: &str = &val.as_ref().to_lowercase(); + if TRUE_LITERALS.contains(&pat) { + Some(true) + } else if FALSE_LITERALS.contains(&pat) { + Some(false) + } else { + None + } + } + + let Some(value) = std::env::var_os(name) else { + return Ok(None); + }; + + let Some(value) = value.to_str() else { + return Err(Error::InvalidEnvironmentVariable { + name: name.to_string(), + value: value.to_string_lossy().to_string(), + err: "expected a valid UTF-8 string".to_string(), + }); + }; + + let Some(value) = str_to_bool(value) else { + return Err(Error::InvalidEnvironmentVariable { + name: name.to_string(), + value: value.to_string(), + err: "expected a boolish value".to_string(), + }); + }; + + Ok(Some(value)) } diff --git a/crates/uv-static/src/env_vars.rs b/crates/uv-static/src/env_vars.rs index 216228ff2..58458e8ca 100644 --- a/crates/uv-static/src/env_vars.rs +++ b/crates/uv-static/src/env_vars.rs @@ -269,6 +269,12 @@ impl EnvVars { /// Specifies the directory for storing managed Python installations. pub const UV_PYTHON_INSTALL_DIR: &'static str = "UV_PYTHON_INSTALL_DIR"; + /// Whether to install the Python executable into the `UV_PYTHON_BIN_DIR` directory. + pub const UV_PYTHON_INSTALL_BIN: &'static str = "UV_PYTHON_INSTALL_BIN"; + + /// Whether to install the Python executable into the Windows registry. + pub const UV_PYTHON_INSTALL_REGISTRY: &'static str = "UV_PYTHON_INSTALL_REGISTRY"; + /// Managed Python installations information is hardcoded in the `uv` binary. /// /// This variable can be set to a URL pointing to JSON to use as a list for Python installations. diff --git a/crates/uv/src/lib.rs b/crates/uv/src/lib.rs index 384f48ac4..995738638 100644 --- a/crates/uv/src/lib.rs +++ b/crates/uv/src/lib.rs @@ -39,7 +39,7 @@ use uv_python::PythonRequest; use uv_requirements::RequirementsSource; use uv_requirements_txt::RequirementsTxtRequirement; use uv_scripts::{Pep723Error, Pep723Item, Pep723ItemRef, Pep723Metadata, Pep723Script}; -use uv_settings::{Combine, FilesystemOptions, Options}; +use uv_settings::{Combine, EnvironmentOptions, FilesystemOptions, Options}; use uv_static::EnvVars; use uv_warnings::{warn_user, warn_user_once}; use uv_workspace::{DiscoveryOptions, Workspace, WorkspaceCache}; @@ -304,6 +304,9 @@ async fn run(mut cli: Cli) -> Result { .map(FilesystemOptions::from) .combine(filesystem); + // Load environment variables not handled by Clap + let environment = EnvironmentOptions::new()?; + // Resolve the global settings. let globals = GlobalSettings::resolve(&cli.top_level.global_args, filesystem.as_ref()); @@ -1391,7 +1394,7 @@ async fn run(mut cli: Cli) -> Result { command: PythonCommand::Install(args), }) => { // Resolve the settings from the command-line arguments and workspace configuration. - let args = settings::PythonInstallSettings::resolve(args, filesystem); + let args = settings::PythonInstallSettings::resolve(args, filesystem, environment); show_settings!(args); // TODO(john): If we later want to support `--upgrade`, we need to replace this. let upgrade = false; diff --git a/crates/uv/src/settings.rs b/crates/uv/src/settings.rs index b221f0f5d..b246f228f 100644 --- a/crates/uv/src/settings.rs +++ b/crates/uv/src/settings.rs @@ -38,8 +38,8 @@ use uv_resolver::{ AnnotationStyle, DependencyMode, ExcludeNewer, ForkStrategy, PrereleaseMode, ResolutionMode, }; use uv_settings::{ - Combine, FilesystemOptions, Options, PipOptions, PublishOptions, PythonInstallMirrors, - ResolverInstallerOptions, ResolverOptions, + Combine, EnvironmentOptions, FilesystemOptions, Options, PipOptions, PublishOptions, + PythonInstallMirrors, ResolverInstallerOptions, ResolverOptions, }; use uv_static::EnvVars; use uv_torch::TorchMode; @@ -944,7 +944,11 @@ pub(crate) struct PythonInstallSettings { impl PythonInstallSettings { /// Resolve the [`PythonInstallSettings`] from the CLI and filesystem configuration. #[allow(clippy::needless_pass_by_value)] - pub(crate) fn resolve(args: PythonInstallArgs, filesystem: Option) -> Self { + pub(crate) fn resolve( + args: PythonInstallArgs, + filesystem: Option, + environment: EnvironmentOptions, + ) -> Self { let options = filesystem.map(FilesystemOptions::into_options); let (python_mirror, pypy_mirror, python_downloads_json_url) = match options { Some(options) => ( @@ -979,8 +983,9 @@ impl PythonInstallSettings { targets, reinstall, force, - bin: flag(bin, no_bin, "bin"), - registry: flag(registry, no_registry, "registry"), + bin: flag(bin, no_bin, "bin").or(environment.python_install_bin), + registry: flag(registry, no_registry, "registry") + .or(environment.python_install_registry), python_install_mirror: python_mirror, pypy_install_mirror: pypy_mirror, python_downloads_json_url, diff --git a/crates/uv/tests/it/help.rs b/crates/uv/tests/it/help.rs index 39de4c6f9..d9353f7c3 100644 --- a/crates/uv/tests/it/help.rs +++ b/crates/uv/tests/it/help.rs @@ -506,10 +506,14 @@ fn help_subsubcommand() { [env: UV_PYTHON_INSTALL_DIR=] --no-bin - Do not install a Python executable into the `bin` directory + Do not install a Python executable into the `bin` directory. + + This can also be set with `UV_PYTHON_INSTALL_BIN=0`. --no-registry - Do not register the Python installation in the Windows registry + Do not register the Python installation in the Windows registry. + + This can also be set with `UV_PYTHON_INSTALL_REGISTRY=0`. --mirror Set the URL to use as the source for downloading Python installations. diff --git a/crates/uv/tests/it/python_install.rs b/crates/uv/tests/it/python_install.rs index 62b3254b8..50b0b3cf5 100644 --- a/crates/uv/tests/it/python_install.rs +++ b/crates/uv/tests/it/python_install.rs @@ -445,6 +445,15 @@ fn python_install_preview() { exit_code: 1 ----- stdout ----- + ----- stderr ----- + error: Failed to install executable for cpython-3.13.5-[PLATFORM] + Caused by: Executable already exists at `[BIN]/python3.13` but is not managed by uv; use `--force` to replace it + "); + uv_snapshot!(context.filters(), context.python_install().arg("--preview").arg("3.13").env(EnvVars::UV_PYTHON_INSTALL_BIN, "1"), @r" + success: false + exit_code: 1 + ----- stdout ----- + ----- stderr ----- error: Failed to install executable for cpython-3.13.5-[PLATFORM] Caused by: Executable already exists at `[BIN]/python3.13` but is not managed by uv; use `--force` to replace it @@ -456,6 +465,13 @@ fn python_install_preview() { exit_code: 0 ----- stdout ----- + ----- stderr ----- + "); + uv_snapshot!(context.filters(), context.python_install().arg("--preview").arg("3.13").env(EnvVars::UV_PYTHON_INSTALL_BIN, "0"), @r" + success: true + exit_code: 0 + ----- stdout ----- + ----- stderr ----- "); @@ -643,7 +659,7 @@ fn python_install_preview_upgrade() { .child(format!("python3.12{}", std::env::consts::EXE_SUFFIX)); // Install 3.12.5 - uv_snapshot!(context.filters(), context.python_install().arg("--preview").arg("3.12.5"), @r###" + uv_snapshot!(context.filters(), context.python_install().arg("--preview").arg("3.12.5"), @r" success: true exit_code: 0 ----- stdout ----- @@ -651,7 +667,7 @@ fn python_install_preview_upgrade() { ----- stderr ----- Installed Python 3.12.5 in [TIME] + cpython-3.12.5-[PLATFORM] (python3.12) - "###); + "); // Installing with a patch version should cause the link to be to the patch installation. if cfg!(unix) { diff --git a/docs/reference/cli.md b/docs/reference/cli.md index 66c46ae0c..5aea00f32 100644 --- a/docs/reference/cli.md +++ b/docs/reference/cli.md @@ -2796,7 +2796,8 @@ uv python install [OPTIONS] [TARGETS]...

    May also be set with the UV_PYTHON_INSTALL_MIRROR environment variable.

    --native-tls

    Whether to load TLS certificates from the platform's native certificate store.

    By default, uv loads certificates from the bundled webpki-roots crate. The webpki-roots are a reliable set of trust roots from Mozilla, and including them in uv improves portability and performance (especially on macOS).

    However, in some cases, you may want to use the platform's native certificate store, especially if you're relying on a corporate trust root (e.g., for a mandatory proxy) that's included in your system's certificate store.

    -

    May also be set with the UV_NATIVE_TLS environment variable.

    --no-bin

    Do not install a Python executable into the bin directory

    +

    May also be set with the UV_NATIVE_TLS environment variable.

    --no-bin

    Do not install a Python executable into the bin directory.

    +

    This can also be set with UV_PYTHON_INSTALL_BIN=0.

    --no-cache, --no-cache-dir, -n

    Avoid reading from or writing to the cache, instead using a temporary directory for the duration of the operation

    May also be set with the UV_NO_CACHE environment variable.

    --no-config

    Avoid discovering configuration files (pyproject.toml, uv.toml).

    Normally, configuration files are discovered in the current directory, parent directories, or user configuration directories.

    @@ -2805,7 +2806,8 @@ uv python install [OPTIONS] [TARGETS]...

    May also be set with the UV_NO_MANAGED_PYTHON environment variable.

    --no-progress

    Hide all progress outputs.

    For example, spinners or progress bars.

    May also be set with the UV_NO_PROGRESS environment variable.

    --no-python-downloads

    Disable automatic downloads of Python.

    -
    --no-registry

    Do not register the Python installation in the Windows registry

    +
    --no-registry

    Do not register the Python installation in the Windows registry.

    +

    This can also be set with UV_PYTHON_INSTALL_REGISTRY=0.

    --offline

    Disable network access.

    When disabled, uv will only use locally cached data and locally available files.

    May also be set with the UV_OFFLINE environment variable.

    --project project

    Run the command within the given project directory.

    diff --git a/docs/reference/environment.md b/docs/reference/environment.md index 5f06cfd3f..a64869edb 100644 --- a/docs/reference/environment.md +++ b/docs/reference/environment.md @@ -376,6 +376,10 @@ This will allow for setting each property of the Python installation, mostly the Note that currently, only local paths are supported. +### `UV_PYTHON_INSTALL_BIN` + +Whether to install the Python executable into the `UV_PYTHON_BIN_DIR` directory. + ### `UV_PYTHON_INSTALL_DIR` Specifies the directory for storing managed Python installations. @@ -390,6 +394,10 @@ The provided URL will replace `https://github.com/astral-sh/python-build-standal `https://github.com/astral-sh/python-build-standalone/releases/download/20240713/cpython-3.12.4%2B20240713-aarch64-apple-darwin-install_only.tar.gz`. Distributions can be read from a local directory by using the `file://` URL scheme. +### `UV_PYTHON_INSTALL_REGISTRY` + +Whether to install the Python executable into the Windows registry. + ### `UV_PYTHON_PREFERENCE` Whether uv should prefer system or managed Python versions. From 35e2f67b5e4529050e615f6e812d0d727afc20d3 Mon Sep 17 00:00:00 2001 From: samypr100 <3933065+samypr100@users.noreply.github.com> Date: Sat, 21 Jun 2025 15:42:28 -0400 Subject: [PATCH 059/130] feat(docker): set default `UV_TOOL_BIN_DIR` on docker images (#13391) Closes #13057 Sets `UV_TOOL_BIN_DIR` to `/usr/local/bin` for all derived images to allow `uv tool install` to work out of the box. Note, when the default image user is overwritten (e.g. `USER `) by a less privileged one, an alternative writable location would now need to be set by downstream consumers to prevent issues, hence I'm labeling this as a breaking change for 0.8.x release. Relates to https://github.com/astral-sh/uv-docker-example/pull/55 Each image was tested to work with uv tool with `UV_TOOL_BIN_DIR` set to `/usr/local/bin` with the default root user and alternative non-root users to confirm breaking nature of the change. --- .github/workflows/build-docker.yml | 1 + docs/guides/integration/docker.md | 3 +++ 2 files changed, 4 insertions(+) diff --git a/.github/workflows/build-docker.yml b/.github/workflows/build-docker.yml index 843ee8dfb..3c080b63f 100644 --- a/.github/workflows/build-docker.yml +++ b/.github/workflows/build-docker.yml @@ -225,6 +225,7 @@ jobs: cat < Dockerfile FROM ${BASE_IMAGE} COPY --from=${{ env.UV_GHCR_IMAGE }}:latest /uv /uvx /usr/local/bin/ + ENV UV_TOOL_BIN_DIR="/usr/local/bin" ENTRYPOINT [] CMD ["/usr/local/bin/uv"] EOF diff --git a/docs/guides/integration/docker.md b/docs/guides/integration/docker.md index 2ea14c9b0..a75228723 100644 --- a/docs/guides/integration/docker.md +++ b/docs/guides/integration/docker.md @@ -77,6 +77,9 @@ As with the distroless image, each derived image is published with uv version ta `ghcr.io/astral-sh/uv:{major}.{minor}.{patch}-{base}` and `ghcr.io/astral-sh/uv:{major}.{minor}-{base}`, e.g., `ghcr.io/astral-sh/uv:0.7.22-alpine`. +In addition, starting with `0.8` each derived image also sets `UV_TOOL_BIN_DIR` to `/usr/local/bin` +to allow `uv tool install` to work as expected with the default user. + For more details, see the [GitHub Container](https://github.com/astral-sh/uv/pkgs/container/uv) page. From c8925e2541ae451148cdd2d4b12fa7904004dceb Mon Sep 17 00:00:00 2001 From: Zanie Blue Date: Thu, 26 Jun 2025 12:22:38 -0500 Subject: [PATCH 060/130] Require `--global` for removal of the global Python pin (#14169) While reviewing https://github.com/astral-sh/uv/pull/14107, @oconnor663 pointed out a bug where we allow `uv python pin --rm` to delete the global pin without the `--global` flag. I think that shouldn't be allowed? I'm not 100% certain though. --- Cargo.lock | 1 - crates/uv-python/src/version_files.rs | 16 +++++++++++++ crates/uv/Cargo.toml | 1 - crates/uv/src/commands/python/pin.rs | 22 ++++++++++++------ crates/uv/tests/it/python_pin.rs | 33 +++++++++++++++++++++++++-- 5 files changed, 62 insertions(+), 11 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 8a95f655d..2963b6374 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4696,7 +4696,6 @@ dependencies = [ "uv-client", "uv-configuration", "uv-console", - "uv-dirs", "uv-dispatch", "uv-distribution", "uv-distribution-filename", diff --git a/crates/uv-python/src/version_files.rs b/crates/uv-python/src/version_files.rs index a9cd05b7e..595a18f0f 100644 --- a/crates/uv-python/src/version_files.rs +++ b/crates/uv-python/src/version_files.rs @@ -217,6 +217,19 @@ impl PythonVersionFile { } } + /// Create a new representation of a global Python version file. + /// + /// Returns [`None`] if the user configuration directory cannot be determined. + pub fn global() -> Option { + let path = user_uv_config_dir()?.join(PYTHON_VERSION_FILENAME); + Some(Self::new(path)) + } + + /// Returns `true` if the version file is a global version file. + pub fn is_global(&self) -> bool { + PythonVersionFile::global().is_some_and(|global| self.path() == global.path()) + } + /// Return the first request declared in the file, if any. pub fn version(&self) -> Option<&PythonRequest> { self.versions.first() @@ -260,6 +273,9 @@ impl PythonVersionFile { /// Update the version file on the file system. pub async fn write(&self) -> Result<(), std::io::Error> { debug!("Writing Python versions to `{}`", self.path.display()); + if let Some(parent) = self.path.parent() { + fs_err::tokio::create_dir_all(parent).await?; + } fs::tokio::write( &self.path, self.versions diff --git a/crates/uv/Cargo.toml b/crates/uv/Cargo.toml index 975495904..ff389f033 100644 --- a/crates/uv/Cargo.toml +++ b/crates/uv/Cargo.toml @@ -24,7 +24,6 @@ uv-cli = { workspace = true } uv-client = { workspace = true } uv-configuration = { workspace = true } uv-console = { workspace = true } -uv-dirs = { workspace = true } uv-dispatch = { workspace = true } uv-distribution = { workspace = true } uv-distribution-filename = { workspace = true } diff --git a/crates/uv/src/commands/python/pin.rs b/crates/uv/src/commands/python/pin.rs index f4d10cdfa..0e78e6b5c 100644 --- a/crates/uv/src/commands/python/pin.rs +++ b/crates/uv/src/commands/python/pin.rs @@ -9,7 +9,6 @@ use tracing::debug; use uv_cache::Cache; use uv_client::BaseClientBuilder; use uv_configuration::{DependencyGroupsWithDefaults, PreviewMode}; -use uv_dirs::user_uv_config_dir; use uv_fs::Simplified; use uv_python::{ EnvironmentPreference, PYTHON_VERSION_FILENAME, PythonDownloads, PythonInstallation, @@ -72,10 +71,20 @@ pub(crate) async fn pin( } bail!("No Python version file found"); }; + + if !global && file.is_global() { + bail!("No Python version file found; use `--rm --global` to remove the global pin"); + } + fs_err::tokio::remove_file(file.path()).await?; writeln!( printer.stdout(), - "Removed Python version file at `{}`", + "Removed {} at `{}`", + if global { + "global Python pin" + } else { + "Python version file" + }, file.path().user_display() )?; return Ok(ExitStatus::Success); @@ -194,12 +203,11 @@ pub(crate) async fn pin( let existing = version_file.ok().flatten(); // TODO(zanieb): Allow updating the discovered version file with an `--update` flag. let new = if global { - let Some(config_dir) = user_uv_config_dir() else { - return Err(anyhow::anyhow!("No user-level config directory found.")); + let Some(new) = PythonVersionFile::global() else { + // TODO(zanieb): We should find a nice way to surface that as an error + bail!("Failed to determine directory for global Python pin"); }; - fs_err::tokio::create_dir_all(&config_dir).await?; - PythonVersionFile::new(config_dir.join(PYTHON_VERSION_FILENAME)) - .with_versions(vec![request]) + new.with_versions(vec![request]) } else { PythonVersionFile::new(project_dir.join(PYTHON_VERSION_FILENAME)) .with_versions(vec![request]) diff --git a/crates/uv/tests/it/python_pin.rs b/crates/uv/tests/it/python_pin.rs index cf8849f42..97093831c 100644 --- a/crates/uv/tests/it/python_pin.rs +++ b/crates/uv/tests/it/python_pin.rs @@ -847,7 +847,7 @@ fn python_pin_rm() { error: No Python version file found "); - // Remove the local pin + // Create and remove a local pin context.python_pin().arg("3.12").assert().success(); uv_snapshot!(context.filters(), context.python_pin().arg("--rm"), @r" success: true @@ -884,12 +884,41 @@ fn python_pin_rm() { .arg("--global") .assert() .success(); + uv_snapshot!(context.filters(), context.python_pin().arg("--rm").arg("--global"), @r" success: true exit_code: 0 ----- stdout ----- - Removed Python version file at `[UV_USER_CONFIG_DIR]/.python-version` + Removed global Python pin at `[UV_USER_CONFIG_DIR]/.python-version` ----- stderr ----- "); + + // Add the global pin again + context + .python_pin() + .arg("3.12") + .arg("--global") + .assert() + .success(); + + // Remove the local pin + uv_snapshot!(context.filters(), context.python_pin().arg("--rm"), @r" + success: true + exit_code: 0 + ----- stdout ----- + Removed Python version file at `.python-version` + + ----- stderr ----- + "); + + // The global pin should not be removed without `--global` + uv_snapshot!(context.filters(), context.python_pin().arg("--rm"), @r" + success: false + exit_code: 2 + ----- stdout ----- + + ----- stderr ----- + error: No Python version file found; use `--rm --global` to remove the global pin + "); } From e4c04af32d297693801582999178775b7627f9d8 Mon Sep 17 00:00:00 2001 From: Charlie Marsh Date: Thu, 26 Jun 2025 22:45:45 -0400 Subject: [PATCH 061/130] Bump `--python-platform linux` to `manylinux_2_28` (#14300) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Right now, `--python-platform linux` to defaults to `manylinux_2_17`. Defaulting to `manylinux_2_17` causes some problems for users, since it means we can't use (e.g.) `manylinux_2_28` wheels, and end up having to build from source. cibuildwheel made `manylinux_2_28` their default in https://github.com/pypa/cibuildwheel/pull/1988, and there's a lot of discussion in https://github.com/pypa/cibuildwheel/issues/1772 and https://github.com/pypa/cibuildwheel/issues/2047. In short, the `manylinux_2014` image is EOL, and the vast majority of consumers now run at least glibc 2.28 (https://mayeut.github.io/manylinux-timeline/): ![Screenshot 2025-06-26 at 7 47 23 PM](https://github.com/user-attachments/assets/2672d91b-f9eb-4442-b680-7e4cd7cade91) Note that this only changes the _default_. Users can still compile against `manylinux_2_17` by specifying it. --- crates/uv-configuration/src/target_triple.rs | 8 ++++---- crates/uv/tests/it/pip_compile.rs | 2 +- docs/reference/cli.md | 16 ++++++++-------- uv.schema.json | 4 ++-- 4 files changed, 15 insertions(+), 15 deletions(-) diff --git a/crates/uv-configuration/src/target_triple.rs b/crates/uv-configuration/src/target_triple.rs index 81499deff..842fb39a7 100644 --- a/crates/uv-configuration/src/target_triple.rs +++ b/crates/uv-configuration/src/target_triple.rs @@ -33,7 +33,7 @@ pub enum TargetTriple { #[serde(rename = "i686-pc-windows-msvc")] I686PcWindowsMsvc, - /// An x86 Linux target. Equivalent to `x86_64-manylinux_2_17`. + /// An x86 Linux target. Equivalent to `x86_64-manylinux_2_28`. #[cfg_attr(feature = "clap", value(name = "x86_64-unknown-linux-gnu"))] #[serde(rename = "x86_64-unknown-linux-gnu")] #[serde(alias = "x8664-unknown-linux-gnu")] @@ -56,7 +56,7 @@ pub enum TargetTriple { #[serde(alias = "x8664-apple-darwin")] X8664AppleDarwin, - /// An ARM64 Linux target. Equivalent to `aarch64-manylinux_2_17`. + /// An ARM64 Linux target. Equivalent to `aarch64-manylinux_2_28`. #[cfg_attr(feature = "clap", value(name = "aarch64-unknown-linux-gnu"))] #[serde(rename = "aarch64-unknown-linux-gnu")] Aarch64UnknownLinuxGnu, @@ -240,7 +240,7 @@ impl TargetTriple { Self::Linux | Self::X8664UnknownLinuxGnu => Platform::new( Os::Manylinux { major: 2, - minor: 17, + minor: 28, }, Arch::X86_64, ), @@ -262,7 +262,7 @@ impl TargetTriple { Self::Aarch64UnknownLinuxGnu => Platform::new( Os::Manylinux { major: 2, - minor: 17, + minor: 28, }, Arch::Aarch64, ), diff --git a/crates/uv/tests/it/pip_compile.rs b/crates/uv/tests/it/pip_compile.rs index b99be1296..f04c16b86 100644 --- a/crates/uv/tests/it/pip_compile.rs +++ b/crates/uv/tests/it/pip_compile.rs @@ -14728,7 +14728,7 @@ fn invalid_platform() -> Result<()> { uv_snapshot!(context .pip_compile() .arg("--python-platform") - .arg("linux") + .arg("x86_64-manylinux_2_17") .arg("requirements.in"), @r" success: false exit_code: 1 diff --git a/docs/reference/cli.md b/docs/reference/cli.md index 5aea00f32..aa6213eff 100644 --- a/docs/reference/cli.md +++ b/docs/reference/cli.md @@ -1736,10 +1736,10 @@ interpreter. Use --universal to display the tree for all platforms,
  • macos: An alias for aarch64-apple-darwin, the default target for macOS
  • x86_64-pc-windows-msvc: A 64-bit x86 Windows target
  • i686-pc-windows-msvc: A 32-bit x86 Windows target
  • -
  • x86_64-unknown-linux-gnu: An x86 Linux target. Equivalent to x86_64-manylinux_2_17
  • +
  • x86_64-unknown-linux-gnu: An x86 Linux target. Equivalent to x86_64-manylinux_2_28
  • aarch64-apple-darwin: An ARM-based macOS target, as seen on Apple Silicon devices
  • x86_64-apple-darwin: An x86 macOS target
  • -
  • aarch64-unknown-linux-gnu: An ARM64 Linux target. Equivalent to aarch64-manylinux_2_17
  • +
  • aarch64-unknown-linux-gnu: An ARM64 Linux target. Equivalent to aarch64-manylinux_2_28
  • aarch64-unknown-linux-musl: An ARM64 Linux target
  • x86_64-unknown-linux-musl: An x86_64 Linux target
  • x86_64-manylinux2014: An x86_64 target for the manylinux2014 platform. Equivalent to x86_64-manylinux_2_17
  • @@ -3490,10 +3490,10 @@ by --python-version.

  • macos: An alias for aarch64-apple-darwin, the default target for macOS
  • x86_64-pc-windows-msvc: A 64-bit x86 Windows target
  • i686-pc-windows-msvc: A 32-bit x86 Windows target
  • -
  • x86_64-unknown-linux-gnu: An x86 Linux target. Equivalent to x86_64-manylinux_2_17
  • +
  • x86_64-unknown-linux-gnu: An x86 Linux target. Equivalent to x86_64-manylinux_2_28
  • aarch64-apple-darwin: An ARM-based macOS target, as seen on Apple Silicon devices
  • x86_64-apple-darwin: An x86 macOS target
  • -
  • aarch64-unknown-linux-gnu: An ARM64 Linux target. Equivalent to aarch64-manylinux_2_17
  • +
  • aarch64-unknown-linux-gnu: An ARM64 Linux target. Equivalent to aarch64-manylinux_2_28
  • aarch64-unknown-linux-musl: An ARM64 Linux target
  • x86_64-unknown-linux-musl: An x86_64 Linux target
  • x86_64-manylinux2014: An x86_64 target for the manylinux2014 platform. Equivalent to x86_64-manylinux_2_17
  • @@ -3747,10 +3747,10 @@ be used with caution, as it can modify the system Python installation.

  • macos: An alias for aarch64-apple-darwin, the default target for macOS
  • x86_64-pc-windows-msvc: A 64-bit x86 Windows target
  • i686-pc-windows-msvc: A 32-bit x86 Windows target
  • -
  • x86_64-unknown-linux-gnu: An x86 Linux target. Equivalent to x86_64-manylinux_2_17
  • +
  • x86_64-unknown-linux-gnu: An x86 Linux target. Equivalent to x86_64-manylinux_2_28
  • aarch64-apple-darwin: An ARM-based macOS target, as seen on Apple Silicon devices
  • x86_64-apple-darwin: An x86 macOS target
  • -
  • aarch64-unknown-linux-gnu: An ARM64 Linux target. Equivalent to aarch64-manylinux_2_17
  • +
  • aarch64-unknown-linux-gnu: An ARM64 Linux target. Equivalent to aarch64-manylinux_2_28
  • aarch64-unknown-linux-musl: An ARM64 Linux target
  • x86_64-unknown-linux-musl: An x86_64 Linux target
  • x86_64-manylinux2014: An x86_64 target for the manylinux2014 platform. Equivalent to x86_64-manylinux_2_17
  • @@ -4029,10 +4029,10 @@ should be used with caution, as it can modify the system Python installation.

    macos: An alias for aarch64-apple-darwin, the default target for macOS
  • x86_64-pc-windows-msvc: A 64-bit x86 Windows target
  • i686-pc-windows-msvc: A 32-bit x86 Windows target
  • -
  • x86_64-unknown-linux-gnu: An x86 Linux target. Equivalent to x86_64-manylinux_2_17
  • +
  • x86_64-unknown-linux-gnu: An x86 Linux target. Equivalent to x86_64-manylinux_2_28
  • aarch64-apple-darwin: An ARM-based macOS target, as seen on Apple Silicon devices
  • x86_64-apple-darwin: An x86 macOS target
  • -
  • aarch64-unknown-linux-gnu: An ARM64 Linux target. Equivalent to aarch64-manylinux_2_17
  • +
  • aarch64-unknown-linux-gnu: An ARM64 Linux target. Equivalent to aarch64-manylinux_2_28
  • aarch64-unknown-linux-musl: An ARM64 Linux target
  • x86_64-unknown-linux-musl: An x86_64 Linux target
  • x86_64-manylinux2014: An x86_64 target for the manylinux2014 platform. Equivalent to x86_64-manylinux_2_17
  • diff --git a/uv.schema.json b/uv.schema.json index e418f37f0..ba89f65f4 100644 --- a/uv.schema.json +++ b/uv.schema.json @@ -2114,7 +2114,7 @@ "const": "i686-pc-windows-msvc" }, { - "description": "An x86 Linux target. Equivalent to `x86_64-manylinux_2_17`.", + "description": "An x86 Linux target. Equivalent to `x86_64-manylinux_2_28`.", "type": "string", "const": "x86_64-unknown-linux-gnu" }, @@ -2129,7 +2129,7 @@ "const": "x86_64-apple-darwin" }, { - "description": "An ARM64 Linux target. Equivalent to `aarch64-manylinux_2_17`.", + "description": "An ARM64 Linux target. Equivalent to `aarch64-manylinux_2_28`.", "type": "string", "const": "aarch64-unknown-linux-gnu" }, From c3d7d3899c435d528d34f242a3750aeed1bb8c50 Mon Sep 17 00:00:00 2001 From: Charlie Marsh Date: Thu, 10 Jul 2025 22:05:49 -0400 Subject: [PATCH 062/130] Default to `--workspace` when adding subdirectories (#14529) If `--workspace` is provided, we add all paths as workspace members. If `--no-workspace` is provided, we add all paths as direct path dependencies. If neither is provided, then we add any paths that are under the workspace root as workspace members, and the rest as direct path dependencies. Closes #14524. --- crates/uv-cli/src/lib.rs | 15 +- crates/uv/src/commands/project/add.rs | 80 +++-- crates/uv/src/settings.rs | 5 +- crates/uv/tests/it/edit.rs | 454 +++++++++++++++++++++++++- docs/reference/cli.md | 10 +- 5 files changed, 522 insertions(+), 42 deletions(-) diff --git a/crates/uv-cli/src/lib.rs b/crates/uv-cli/src/lib.rs index 94b79558d..4c01fd780 100644 --- a/crates/uv-cli/src/lib.rs +++ b/crates/uv-cli/src/lib.rs @@ -3726,10 +3726,19 @@ pub struct AddArgs { /// Add the dependency as a workspace member. /// - /// When used with a path dependency, the package will be added to the workspace's `members` - /// list in the root `pyproject.toml` file. - #[arg(long)] + /// By default, uv will add path dependencies that are within the workspace directory + /// as workspace members. When used with a path dependency, the package will be added + /// to the workspace's `members` list in the root `pyproject.toml` file. + #[arg(long, overrides_with = "no_workspace")] pub workspace: bool, + + /// Don't add the dependency as a workspace member. + /// + /// By default, when adding a dependency that's a local path and is within the workspace + /// directory, uv will add it as a workspace member; pass `--no-workspace` to add the package + /// as direct path dependency instead. + #[arg(long, overrides_with = "workspace")] + pub no_workspace: bool, } #[derive(Args)] diff --git a/crates/uv/src/commands/project/add.rs b/crates/uv/src/commands/project/add.rs index d65866483..28cc2dcd5 100644 --- a/crates/uv/src/commands/project/add.rs +++ b/crates/uv/src/commands/project/add.rs @@ -83,7 +83,7 @@ pub(crate) async fn add( extras_of_dependency: Vec, package: Option, python: Option, - workspace: bool, + workspace: Option, install_mirrors: PythonInstallMirrors, settings: ResolverInstallerSettings, network_settings: NetworkSettings, @@ -497,16 +497,41 @@ pub(crate) async fn add( // Track modification status, for reverts. let mut modified = false; - // If `--workspace` is provided, add any members to the `workspace` section of the + // Determine whether to use workspace mode. + let use_workspace = match workspace { + Some(workspace) => workspace, + None => { + // Check if we're in a project (not a script), and if any requirements are path + // dependencies within the workspace. + if let AddTarget::Project(ref project, _) = target { + let workspace_root = project.workspace().install_path(); + requirements.iter().any(|req| { + if let RequirementSource::Directory { install_path, .. } = &req.source { + let absolute_path = if install_path.is_absolute() { + install_path.to_path_buf() + } else { + project.root().join(install_path) + }; + absolute_path.starts_with(workspace_root) + } else { + false + } + }) + } else { + false + } + } + }; + + // If workspace mode is enabled, add any members to the `workspace` section of the // `pyproject.toml` file. - if workspace { + if use_workspace { let AddTarget::Project(project, python_target) = target else { unreachable!("`--workspace` and `--script` are conflicting options"); }; - let workspace = project.workspace(); let mut toml = PyProjectTomlMut::from_toml( - &workspace.pyproject_toml().raw, + &project.workspace().pyproject_toml().raw, DependencyTarget::PyProjectToml, )?; @@ -519,21 +544,32 @@ pub(crate) async fn add( project.root().join(install_path) }; - // Check if the path is not already included in the workspace. - if !workspace.includes(&absolute_path)? { - let relative_path = absolute_path - .strip_prefix(workspace.install_path()) - .unwrap_or(&absolute_path); - - toml.add_workspace(relative_path)?; - modified |= true; - - writeln!( - printer.stderr(), - "Added `{}` to workspace members", - relative_path.user_display().cyan() - )?; + // Either `--workspace` was provided explicitly, or it was omitted but the path is + // within the workspace root. + let use_workspace = workspace.unwrap_or_else(|| { + absolute_path.starts_with(project.workspace().install_path()) + }); + if !use_workspace { + continue; } + + // If the project is already a member of the workspace, skip it. + if project.workspace().includes(&absolute_path)? { + continue; + } + + let relative_path = absolute_path + .strip_prefix(project.workspace().install_path()) + .unwrap_or(&absolute_path); + + toml.add_workspace(relative_path)?; + modified |= true; + + writeln!( + printer.stderr(), + "Added `{}` to workspace members", + relative_path.user_display().cyan() + )?; } } @@ -542,7 +578,7 @@ pub(crate) async fn add( target = if modified { let workspace_content = toml.to_string(); fs_err::write( - workspace.install_path().join("pyproject.toml"), + project.workspace().install_path().join("pyproject.toml"), &workspace_content, )?; @@ -747,13 +783,13 @@ fn edits( .and_then(|tool| tool.uv.as_ref()) .and_then(|uv| uv.sources.as_ref()) .map(ToolUvSources::inner); - let workspace = project + let is_workspace_member = project .workspace() .packages() .contains_key(&requirement.name); resolve_requirement( requirement, - workspace, + is_workspace_member, editable, index.cloned(), rev.map(ToString::to_string), diff --git a/crates/uv/src/settings.rs b/crates/uv/src/settings.rs index b246f228f..bf3bca4a4 100644 --- a/crates/uv/src/settings.rs +++ b/crates/uv/src/settings.rs @@ -1351,7 +1351,7 @@ pub(crate) struct AddSettings { pub(crate) package: Option, pub(crate) script: Option, pub(crate) python: Option, - pub(crate) workspace: bool, + pub(crate) workspace: Option, pub(crate) install_mirrors: PythonInstallMirrors, pub(crate) refresh: Refresh, pub(crate) indexes: Vec, @@ -1390,6 +1390,7 @@ impl AddSettings { script, python, workspace, + no_workspace, } = args; let dependency_type = if let Some(extra) = optional { @@ -1490,7 +1491,7 @@ impl AddSettings { package, script, python: python.and_then(Maybe::into_option), - workspace, + workspace: flag(workspace, no_workspace, "workspace"), editable: flag(editable, no_editable, "editable"), extras: extra.unwrap_or_default(), refresh: Refresh::from(refresh), diff --git a/crates/uv/tests/it/edit.rs b/crates/uv/tests/it/edit.rs index ddaed434f..ccc0cabf2 100644 --- a/crates/uv/tests/it/edit.rs +++ b/crates/uv/tests/it/edit.rs @@ -2491,9 +2491,9 @@ fn add_workspace_path() -> Result<()> { Ok(()) } -/// Add a path dependency. +/// Add a path dependency, which should be implicitly added to the workspace. #[test] -fn add_path() -> Result<()> { +fn add_path_implicit_workspace() -> Result<()> { let context = TestContext::new("3.12"); let workspace = context.temp_dir.child("workspace"); @@ -2533,6 +2533,7 @@ fn add_path() -> Result<()> { ----- stderr ----- Using CPython 3.12.[X] interpreter at: [PYTHON-3.12] Creating virtual environment at: .venv + Added `packages/child` to workspace members Resolved 2 packages in [TIME] Prepared 1 package in [TIME] Installed 1 package in [TIME] @@ -2545,7 +2546,134 @@ fn add_path() -> Result<()> { filters => context.filters(), }, { assert_snapshot!( - pyproject_toml, @r###" + pyproject_toml, @r#" + [project] + name = "parent" + version = "0.1.0" + requires-python = ">=3.12" + dependencies = [ + "child", + ] + + [tool.uv.workspace] + members = [ + "packages/child", + ] + + [tool.uv.sources] + child = { workspace = true } + "# + ); + }); + + // `uv add` implies a full lock and sync, including development dependencies. + let lock = fs_err::read_to_string(workspace.join("uv.lock"))?; + + insta::with_settings!({ + filters => context.filters(), + }, { + assert_snapshot!( + lock, @r#" + version = 1 + revision = 2 + requires-python = ">=3.12" + + [options] + exclude-newer = "2024-03-25T00:00:00Z" + + [manifest] + members = [ + "child", + "parent", + ] + + [[package]] + name = "child" + version = "0.1.0" + source = { editable = "packages/child" } + + [[package]] + name = "parent" + version = "0.1.0" + source = { virtual = "." } + dependencies = [ + { name = "child" }, + ] + + [package.metadata] + requires-dist = [{ name = "child", editable = "packages/child" }] + "# + ); + }); + + // Install from the lockfile. + uv_snapshot!(context.filters(), context.sync().arg("--frozen").current_dir(workspace.path()), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Audited 1 package in [TIME] + "); + + Ok(()) +} + +/// Add a path dependency with `--no-workspace`, which should not be added to the workspace. +#[test] +fn add_path_no_workspace() -> Result<()> { + let context = TestContext::new("3.12"); + + let workspace = context.temp_dir.child("workspace"); + workspace.child("pyproject.toml").write_str(indoc! {r#" + [project] + name = "parent" + version = "0.1.0" + requires-python = ">=3.12" + dependencies = [] + "#})?; + + let child = workspace.child("packages").child("child"); + child.child("pyproject.toml").write_str(indoc! {r#" + [project] + name = "child" + version = "0.1.0" + requires-python = ">=3.12" + dependencies = [] + + [build-system] + requires = ["hatchling"] + build-backend = "hatchling.build" + "#})?; + workspace + .child("packages") + .child("child") + .child("src") + .child("child") + .child("__init__.py") + .touch()?; + + uv_snapshot!(context.filters(), context.add().arg(Path::new("packages").join("child")).current_dir(workspace.path()).arg("--no-workspace"), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Using CPython 3.12.[X] interpreter at: [PYTHON-3.12] + Creating virtual environment at: .venv + Resolved 2 packages in [TIME] + Prepared 1 package in [TIME] + Installed 1 package in [TIME] + + child==0.1.0 (from file://[TEMP_DIR]/workspace/packages/child) + "); + + let pyproject_toml = fs_err::read_to_string(workspace.join("pyproject.toml"))?; + + insta::with_settings!({ + filters => context.filters(), + }, { + assert_snapshot!( + pyproject_toml, @r#" [project] name = "parent" version = "0.1.0" @@ -2556,7 +2684,7 @@ fn add_path() -> Result<()> { [tool.uv.sources] child = { path = "packages/child" } - "### + "# ); }); @@ -2607,6 +2735,110 @@ fn add_path() -> Result<()> { Ok(()) } +/// Add a path dependency in an adjacent directory, which should not be added to the workspace. +#[test] +fn add_path_adjacent_directory() -> Result<()> { + let context = TestContext::new("3.12"); + + let project = context.temp_dir.child("project"); + project.child("pyproject.toml").write_str(indoc! {r#" + [project] + name = "project" + version = "0.1.0" + requires-python = ">=3.12" + dependencies = [] + "#})?; + + let dependency = context.temp_dir.child("dependency"); + dependency.child("pyproject.toml").write_str(indoc! {r#" + [project] + name = "dependency" + version = "0.1.0" + requires-python = ">=3.12" + dependencies = [] + + [build-system] + requires = ["hatchling"] + build-backend = "hatchling.build" + "#})?; + dependency + .child("src") + .child("dependency") + .child("__init__.py") + .touch()?; + + uv_snapshot!(context.filters(), context.add().arg(dependency.path()).current_dir(project.path()), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Using CPython 3.12.[X] interpreter at: [PYTHON-3.12] + Creating virtual environment at: .venv + Resolved 2 packages in [TIME] + Prepared 1 package in [TIME] + Installed 1 package in [TIME] + + dependency==0.1.0 (from file://[TEMP_DIR]/dependency) + "); + + let pyproject_toml = fs_err::read_to_string(project.join("pyproject.toml"))?; + + insta::with_settings!({ + filters => context.filters(), + }, { + assert_snapshot!( + pyproject_toml, @r#" + [project] + name = "project" + version = "0.1.0" + requires-python = ">=3.12" + dependencies = [ + "dependency", + ] + + [tool.uv.sources] + dependency = { path = "../dependency" } + "# + ); + }); + + // `uv add` implies a full lock and sync, including development dependencies. + let lock = fs_err::read_to_string(project.join("uv.lock"))?; + + insta::with_settings!({ + filters => context.filters(), + }, { + assert_snapshot!( + lock, @r#" + version = 1 + revision = 2 + requires-python = ">=3.12" + + [options] + exclude-newer = "2024-03-25T00:00:00Z" + + [[package]] + name = "dependency" + version = "0.1.0" + source = { directory = "../dependency" } + + [[package]] + name = "project" + version = "0.1.0" + source = { virtual = "." } + dependencies = [ + { name = "dependency" }, + ] + + [package.metadata] + requires-dist = [{ name = "dependency", directory = "../dependency" }] + "# + ); + }); + + Ok(()) +} + /// Update a requirement, modifying the source and extras. #[test] #[cfg(feature = "git")] @@ -7249,7 +7481,7 @@ fn fail_to_add_revert_project() -> Result<()> { .child("setup.py") .write_str("1/0")?; - uv_snapshot!(context.filters(), context.add().arg("./child"), @r#" + uv_snapshot!(context.filters(), context.add().arg("./child").arg("--no-workspace"), @r#" success: false exit_code: 1 ----- stdout ----- @@ -7351,7 +7583,7 @@ fn fail_to_edit_revert_project() -> Result<()> { .child("setup.py") .write_str("1/0")?; - uv_snapshot!(context.filters(), context.add().arg("./child"), @r#" + uv_snapshot!(context.filters(), context.add().arg("./child").arg("--no-workspace"), @r#" success: false exit_code: 1 ----- stdout ----- @@ -7460,7 +7692,7 @@ fn fail_to_add_revert_workspace_root() -> Result<()> { .child("setup.py") .write_str("1/0")?; - uv_snapshot!(context.filters(), context.add().arg("--workspace").arg("./broken"), @r#" + uv_snapshot!(context.filters(), context.add().arg("./broken"), @r#" success: false exit_code: 1 ----- stdout ----- @@ -7575,7 +7807,7 @@ fn fail_to_add_revert_workspace_member() -> Result<()> { .child("setup.py") .write_str("1/0")?; - uv_snapshot!(context.filters(), context.add().current_dir(&project).arg("--workspace").arg("../broken"), @r#" + uv_snapshot!(context.filters(), context.add().current_dir(&project).arg("../broken"), @r#" success: false exit_code: 1 ----- stdout ----- @@ -12928,12 +13160,12 @@ fn add_path_with_existing_workspace() -> Result<()> { dependencies = [] "#})?; - // Add the dependency with `--workspace` flag from the project directory. + // Add the dependency from the project directory. It should automatically be added as a + // workspace member, since it's in the same directory as the workspace. uv_snapshot!(context.filters(), context .add() .current_dir(&project_dir) - .arg("../dep") - .arg("--workspace"), @r" + .arg("../dep"), @r" success: true exit_code: 0 ----- stdout ----- @@ -13044,3 +13276,203 @@ fn add_path_with_workspace() -> Result<()> { Ok(()) } + +/// Add a path dependency within the workspace directory without --workspace flag. +/// It should automatically be added as a workspace member. +#[test] +fn add_path_within_workspace_defaults_to_workspace() -> Result<()> { + let context = TestContext::new("3.12"); + + let workspace_toml = context.temp_dir.child("pyproject.toml"); + workspace_toml.write_str(indoc! {r#" + [project] + name = "parent" + version = "0.1.0" + requires-python = ">=3.12" + dependencies = [] + + [tool.uv.workspace] + members = [] + "#})?; + + let dep_dir = context.temp_dir.child("dep"); + dep_dir.child("pyproject.toml").write_str(indoc! {r#" + [project] + name = "dep" + version = "0.1.0" + requires-python = ">=3.12" + dependencies = [] + "#})?; + + // Add the dependency without --workspace flag - it should still be added as workspace member + // since it's within the workspace directory. + uv_snapshot!(context.filters(), context + .add() + .arg("./dep"), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Added `dep` to workspace members + Resolved 2 packages in [TIME] + Audited in [TIME] + "); + + let pyproject_toml = context.read("pyproject.toml"); + assert_snapshot!( + pyproject_toml, @r#" + [project] + name = "parent" + version = "0.1.0" + requires-python = ">=3.12" + dependencies = [ + "dep", + ] + + [tool.uv.workspace] + members = [ + "dep", + ] + + [tool.uv.sources] + dep = { workspace = true } + "# + ); + + Ok(()) +} + +/// Add a path dependency within the workspace directory with --no-workspace flag. +/// It should be added as a direct path dependency. +#[test] +fn add_path_with_no_workspace() -> Result<()> { + let context = TestContext::new("3.12"); + + let workspace_toml = context.temp_dir.child("pyproject.toml"); + workspace_toml.write_str(indoc! {r#" + [project] + name = "parent" + version = "0.1.0" + requires-python = ">=3.12" + dependencies = [] + + [tool.uv.workspace] + members = [] + "#})?; + + let dep_dir = context.temp_dir.child("dep"); + dep_dir.child("pyproject.toml").write_str(indoc! {r#" + [project] + name = "dep" + version = "0.1.0" + requires-python = ">=3.12" + dependencies = [] + "#})?; + + // Add the dependency with --no-workspace flag - it should be added as direct path dependency. + uv_snapshot!(context.filters(), context + .add() + .arg("./dep") + .arg("--no-workspace"), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Resolved 2 packages in [TIME] + Audited in [TIME] + "); + + let pyproject_toml = context.read("pyproject.toml"); + assert_snapshot!( + pyproject_toml, @r#" + [project] + name = "parent" + version = "0.1.0" + requires-python = ">=3.12" + dependencies = [ + "dep", + ] + + [tool.uv.workspace] + members = [] + + [tool.uv.sources] + dep = { path = "dep" } + "# + ); + + Ok(()) +} + +/// Add a path dependency outside the workspace directory. +/// It should be added as a direct path dependency, not a workspace member. +#[test] +fn add_path_outside_workspace_no_default() -> Result<()> { + let context = TestContext::new("3.12"); + + // Create a workspace directory + let workspace_dir = context.temp_dir.child("workspace"); + workspace_dir.create_dir_all()?; + + let workspace_toml = workspace_dir.child("pyproject.toml"); + workspace_toml.write_str(indoc! {r#" + [project] + name = "parent" + version = "0.1.0" + requires-python = ">=3.12" + dependencies = [] + + [tool.uv.workspace] + members = [] + "#})?; + + // Create a dependency outside the workspace + let dep_dir = context.temp_dir.child("external_dep"); + dep_dir.child("pyproject.toml").write_str(indoc! {r#" + [project] + name = "dep" + version = "0.1.0" + requires-python = ">=3.12" + dependencies = [] + "#})?; + + // Add the dependency without --workspace flag - it should be a direct path dependency + // since it's outside the workspace directory. + uv_snapshot!(context.filters(), context + .add() + .current_dir(&workspace_dir) + .arg("../external_dep"), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Using CPython 3.12.[X] interpreter at: [PYTHON-3.12] + Creating virtual environment at: .venv + Resolved 2 packages in [TIME] + Audited in [TIME] + "); + + let pyproject_toml = fs_err::read_to_string(workspace_toml)?; + assert_snapshot!( + pyproject_toml, @r#" + [project] + name = "parent" + version = "0.1.0" + requires-python = ">=3.12" + dependencies = [ + "dep", + ] + + [tool.uv.workspace] + members = [] + + [tool.uv.sources] + dep = { path = "../external_dep" } + "# + ); + + Ok(()) +} diff --git a/docs/reference/cli.md b/docs/reference/cli.md index aa6213eff..881c96697 100644 --- a/docs/reference/cli.md +++ b/docs/reference/cli.md @@ -535,7 +535,9 @@ uv add [OPTIONS] >

    May also be set with the UV_NO_PROGRESS environment variable.

    --no-python-downloads

    Disable automatic downloads of Python.

    --no-sources

    Ignore the tool.uv.sources table when resolving dependencies. Used to lock against the standards-compliant, publishable package metadata, as opposed to using any workspace, Git, URL, or local path sources

    --no-sync

    Avoid syncing the virtual environment

    -

    May also be set with the UV_NO_SYNC environment variable.

    --offline

    Disable network access.

    +

    May also be set with the UV_NO_SYNC environment variable.

    --no-workspace

    Don't add the dependency as a workspace member.

    +

    By default, when adding a dependency that's a local path and is within the workspace directory, uv will add it as a workspace member; pass --no-workspace to add the package as direct path dependency instead.

    +
    --offline

    Disable network access.

    When disabled, uv will only use locally cached data and locally available files.

    May also be set with the UV_OFFLINE environment variable.

    --optional optional

    Add the requirements to the package's optional dependencies for the specified extra.

    The group may then be activated when installing the project with the --extra flag.

    @@ -583,7 +585,7 @@ uv add [OPTIONS] >
    --verbose, -v

    Use verbose output.

    You can configure fine-grained logging using the RUST_LOG environment variable. (https://docs.rs/tracing-subscriber/latest/tracing_subscriber/filter/struct.EnvFilter.html#directives)

    --workspace

    Add the dependency as a workspace member.

    -

    When used with a path dependency, the package will be added to the workspace's members list in the root pyproject.toml file.

    +

    By default, uv will add path dependencies that are within the workspace directory as workspace members. When used with a path dependency, the package will be added to the workspace's members list in the root pyproject.toml file.

    ## uv remove @@ -1154,10 +1156,10 @@ environment in the project.

  • macos: An alias for aarch64-apple-darwin, the default target for macOS
  • x86_64-pc-windows-msvc: A 64-bit x86 Windows target
  • i686-pc-windows-msvc: A 32-bit x86 Windows target
  • -
  • x86_64-unknown-linux-gnu: An x86 Linux target. Equivalent to x86_64-manylinux_2_17
  • +
  • x86_64-unknown-linux-gnu: An x86 Linux target. Equivalent to x86_64-manylinux_2_28
  • aarch64-apple-darwin: An ARM-based macOS target, as seen on Apple Silicon devices
  • x86_64-apple-darwin: An x86 macOS target
  • -
  • aarch64-unknown-linux-gnu: An ARM64 Linux target. Equivalent to aarch64-manylinux_2_17
  • +
  • aarch64-unknown-linux-gnu: An ARM64 Linux target. Equivalent to aarch64-manylinux_2_28
  • aarch64-unknown-linux-musl: An ARM64 Linux target
  • x86_64-unknown-linux-musl: An x86_64 Linux target
  • x86_64-manylinux2014: An x86_64 target for the manylinux2014 platform. Equivalent to x86_64-manylinux_2_17
  • From dff9ced40ab2633d32f7e9bcdcb6484500caf621 Mon Sep 17 00:00:00 2001 From: Charlie Marsh Date: Thu, 10 Jul 2025 22:20:01 -0400 Subject: [PATCH 063/130] Support conflicting editable settings across groups (#14197) If a user specifies `-e /path/to/dir` and `/path/to/dir` in a `uv pip install` command, we want the editable to "win" (rather than erroring due to conflicting URLs). Unfortunately, this behavior meant that when you requested a package as editable and non-editable in conflicting groups, the editable version was _always_ used. This PR modifies the requisite types to use `Option` rather than `bool` for the `editable` field, so we can determine whether a requirement was explicitly requested as editable, explicitly requested as non-editable, or not specified (as in the case of `/path/to/dir` in a `requirements.txt` file). In the latter case, we allow editables to override the "unspecified" requirement. If a project includes a path dependency twice, once with `editable = true` and once without any `editable` annotation, those are now considered conflicting URLs, and lead to an error, so I've marked this change as breaking. Closes https://github.com/astral-sh/uv/issues/14139. --- crates/uv-distribution-types/src/buildable.rs | 7 +- crates/uv-distribution-types/src/lib.rs | 12 +- .../uv-distribution-types/src/requirement.rs | 32 +- .../src/index/built_wheel_index.rs | 2 +- .../uv-distribution/src/metadata/lowering.rs | 20 +- crates/uv-distribution/src/source/mod.rs | 4 +- crates/uv-installer/src/satisfies.rs | 2 +- crates/uv-pypi-types/src/parsed_url.rs | 29 +- crates/uv-requirements-txt/src/lib.rs | 6 +- crates/uv-requirements-txt/src/requirement.rs | 4 +- ...ts_txt__test__parse-unix-bare-url.txt.snap | 24 +- ...ts_txt__test__parse-unix-editable.txt.snap | 48 ++- ...txt__test__parse-windows-bare-url.txt.snap | 24 +- ...txt__test__parse-windows-editable.txt.snap | 48 ++- crates/uv-requirements/src/source_tree.rs | 2 +- .../src/lock/export/pylock_toml.rs | 8 +- crates/uv-resolver/src/lock/mod.rs | 20 +- crates/uv-resolver/src/resolver/mod.rs | 1 + crates/uv-resolver/src/resolver/urls.rs | 9 +- crates/uv-workspace/src/workspace.rs | 16 +- crates/uv/src/commands/project/sync.rs | 10 +- crates/uv/tests/it/lock.rs | 86 +---- crates/uv/tests/it/sync.rs | 332 ++++++++++++++++++ 23 files changed, 530 insertions(+), 216 deletions(-) diff --git a/crates/uv-distribution-types/src/buildable.rs b/crates/uv-distribution-types/src/buildable.rs index c97bb362f..75997e406 100644 --- a/crates/uv-distribution-types/src/buildable.rs +++ b/crates/uv-distribution-types/src/buildable.rs @@ -124,7 +124,10 @@ impl SourceUrl<'_> { pub fn is_editable(&self) -> bool { matches!( self, - Self::Directory(DirectorySourceUrl { editable: true, .. }) + Self::Directory(DirectorySourceUrl { + editable: Some(true), + .. + }) ) } @@ -210,7 +213,7 @@ impl<'a> From<&'a PathSourceDist> for PathSourceUrl<'a> { pub struct DirectorySourceUrl<'a> { pub url: &'a DisplaySafeUrl, pub install_path: Cow<'a, Path>, - pub editable: bool, + pub editable: Option, } impl std::fmt::Display for DirectorySourceUrl<'_> { diff --git a/crates/uv-distribution-types/src/lib.rs b/crates/uv-distribution-types/src/lib.rs index 1e3ad7eba..0b25669b0 100644 --- a/crates/uv-distribution-types/src/lib.rs +++ b/crates/uv-distribution-types/src/lib.rs @@ -343,9 +343,9 @@ pub struct DirectorySourceDist { /// The absolute path to the distribution which we use for installing. pub install_path: Box, /// Whether the package should be installed in editable mode. - pub editable: bool, + pub editable: Option, /// Whether the package should be built and installed. - pub r#virtual: bool, + pub r#virtual: Option, /// The URL as it was provided by the user. pub url: VerbatimUrl, } @@ -452,8 +452,8 @@ impl Dist { name: PackageName, url: VerbatimUrl, install_path: &Path, - editable: bool, - r#virtual: bool, + editable: Option, + r#virtual: Option, ) -> Result { // Convert to an absolute path. let install_path = path::absolute(install_path)?; @@ -655,7 +655,7 @@ impl SourceDist { /// Returns `true` if the distribution is editable. pub fn is_editable(&self) -> bool { match self { - Self::Directory(DirectorySourceDist { editable, .. }) => *editable, + Self::Directory(DirectorySourceDist { editable, .. }) => editable.unwrap_or(false), _ => false, } } @@ -663,7 +663,7 @@ impl SourceDist { /// Returns `true` if the distribution is virtual. pub fn is_virtual(&self) -> bool { match self { - Self::Directory(DirectorySourceDist { r#virtual, .. }) => *r#virtual, + Self::Directory(DirectorySourceDist { r#virtual, .. }) => r#virtual.unwrap_or(false), _ => false, } } diff --git a/crates/uv-distribution-types/src/requirement.rs b/crates/uv-distribution-types/src/requirement.rs index 432cc4e12..104cf396c 100644 --- a/crates/uv-distribution-types/src/requirement.rs +++ b/crates/uv-distribution-types/src/requirement.rs @@ -429,9 +429,9 @@ pub enum RequirementSource { /// The absolute path to the distribution which we use for installing. install_path: Box, /// For a source tree (a directory), whether to install as an editable. - editable: bool, + editable: Option, /// For a source tree (a directory), whether the project should be built and installed. - r#virtual: bool, + r#virtual: Option, /// The PEP 508 style URL in the format /// `file:///#subdirectory=`. url: VerbatimUrl, @@ -545,7 +545,13 @@ impl RequirementSource { /// Returns `true` if the source is editable. pub fn is_editable(&self) -> bool { - matches!(self, Self::Directory { editable: true, .. }) + matches!( + self, + Self::Directory { + editable: Some(true), + .. + } + ) } /// Returns `true` if the source is empty. @@ -792,11 +798,11 @@ impl From for RequirementSourceWire { r#virtual, url: _, } => { - if editable { + if editable.unwrap_or(false) { Self::Editable { editable: PortablePathBuf::from(install_path), } - } else if r#virtual { + } else if r#virtual.unwrap_or(false) { Self::Virtual { r#virtual: PortablePathBuf::from(install_path), } @@ -908,8 +914,8 @@ impl TryFrom for RequirementSource { ))?; Ok(Self::Directory { install_path: directory, - editable: false, - r#virtual: false, + editable: Some(false), + r#virtual: Some(false), url, }) } @@ -920,8 +926,8 @@ impl TryFrom for RequirementSource { ))?; Ok(Self::Directory { install_path: editable, - editable: true, - r#virtual: false, + editable: Some(true), + r#virtual: Some(false), url, }) } @@ -932,8 +938,8 @@ impl TryFrom for RequirementSource { ))?; Ok(Self::Directory { install_path: r#virtual, - editable: false, - r#virtual: true, + editable: Some(false), + r#virtual: Some(true), url, }) } @@ -980,8 +986,8 @@ mod tests { marker: MarkerTree::TRUE, source: RequirementSource::Directory { install_path: PathBuf::from(path).into_boxed_path(), - editable: false, - r#virtual: false, + editable: Some(false), + r#virtual: Some(false), url: VerbatimUrl::from_absolute_path(path).unwrap(), }, origin: None, diff --git a/crates/uv-distribution/src/index/built_wheel_index.rs b/crates/uv-distribution/src/index/built_wheel_index.rs index fb376d1b4..9752e7e4f 100644 --- a/crates/uv-distribution/src/index/built_wheel_index.rs +++ b/crates/uv-distribution/src/index/built_wheel_index.rs @@ -119,7 +119,7 @@ impl<'a> BuiltWheelIndex<'a> { ) -> Result, Error> { let cache_shard = self.cache.shard( CacheBucket::SourceDistributions, - if source_dist.editable { + if source_dist.editable.unwrap_or(false) { WheelCache::Editable(&source_dist.url).root() } else { WheelCache::Path(&source_dist.url).root() diff --git a/crates/uv-distribution/src/metadata/lowering.rs b/crates/uv-distribution/src/metadata/lowering.rs index 330075842..54782c083 100644 --- a/crates/uv-distribution/src/metadata/lowering.rs +++ b/crates/uv-distribution/src/metadata/lowering.rs @@ -310,15 +310,15 @@ impl LoweredRequirement { RequirementSource::Directory { install_path: install_path.into_boxed_path(), url, - editable: true, - r#virtual: false, + editable: Some(true), + r#virtual: Some(false), } } else { RequirementSource::Directory { install_path: install_path.into_boxed_path(), url, - editable: false, - r#virtual: true, + editable: Some(false), + r#virtual: Some(true), } }; (source, marker) @@ -724,8 +724,8 @@ fn path_source( Ok(RequirementSource::Directory { install_path: install_path.into_boxed_path(), url, - editable: true, - r#virtual: false, + editable, + r#virtual: Some(false), }) } else { // Determine whether the project is a package or virtual. @@ -738,12 +738,14 @@ fn path_source( .unwrap_or(true) }); + // If the project is not a package, treat it as a virtual dependency. + let r#virtual = !is_package; + Ok(RequirementSource::Directory { install_path: install_path.into_boxed_path(), url, - editable: false, - // If a project is not a package, treat it as a virtual dependency. - r#virtual: !is_package, + editable: Some(false), + r#virtual: Some(r#virtual), }) } } else { diff --git a/crates/uv-distribution/src/source/mod.rs b/crates/uv-distribution/src/source/mod.rs index 92d83e6ce..1308e3d77 100644 --- a/crates/uv-distribution/src/source/mod.rs +++ b/crates/uv-distribution/src/source/mod.rs @@ -1060,7 +1060,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> { let cache_shard = self.build_context.cache().shard( CacheBucket::SourceDistributions, - if resource.editable { + if resource.editable.unwrap_or(false) { WheelCache::Editable(resource.url).root() } else { WheelCache::Path(resource.url).root() @@ -1173,7 +1173,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> { let cache_shard = self.build_context.cache().shard( CacheBucket::SourceDistributions, - if resource.editable { + if resource.editable.unwrap_or(false) { WheelCache::Editable(resource.url).root() } else { WheelCache::Path(resource.url).root() diff --git a/crates/uv-installer/src/satisfies.rs b/crates/uv-installer/src/satisfies.rs index a91676595..b7e824202 100644 --- a/crates/uv-installer/src/satisfies.rs +++ b/crates/uv-installer/src/satisfies.rs @@ -241,7 +241,7 @@ impl RequirementSatisfaction { return Self::Mismatch; }; - if *requested_editable != installed_editable.unwrap_or_default() { + if requested_editable != installed_editable { trace!( "Editable mismatch: {:?} vs. {:?}", *requested_editable, diff --git a/crates/uv-pypi-types/src/parsed_url.rs b/crates/uv-pypi-types/src/parsed_url.rs index 9517dfdc6..57afbcdf9 100644 --- a/crates/uv-pypi-types/src/parsed_url.rs +++ b/crates/uv-pypi-types/src/parsed_url.rs @@ -86,8 +86,8 @@ impl UnnamedRequirementUrl for VerbatimParsedUrl { ParsedUrl::Directory(ParsedDirectoryUrl { url, install_path, - editable: false, - r#virtual: false, + editable: None, + r#virtual: None, }) } else { ParsedUrl::Path(ParsedPathUrl { @@ -118,8 +118,8 @@ impl UnnamedRequirementUrl for VerbatimParsedUrl { ParsedUrl::Directory(ParsedDirectoryUrl { url, install_path, - editable: false, - r#virtual: false, + editable: None, + r#virtual: None, }) } else { ParsedUrl::Path(ParsedPathUrl { @@ -187,7 +187,10 @@ impl ParsedUrl { pub fn is_editable(&self) -> bool { matches!( self, - Self::Directory(ParsedDirectoryUrl { editable: true, .. }) + Self::Directory(ParsedDirectoryUrl { + editable: Some(true), + .. + }) ) } } @@ -226,16 +229,18 @@ pub struct ParsedDirectoryUrl { pub url: DisplaySafeUrl, /// The absolute path to the distribution which we use for installing. pub install_path: Box, - pub editable: bool, - pub r#virtual: bool, + /// Whether the project at the given URL should be installed in editable mode. + pub editable: Option, + /// Whether the project at the given URL should be treated as a virtual package. + pub r#virtual: Option, } impl ParsedDirectoryUrl { /// Construct a [`ParsedDirectoryUrl`] from a path requirement source. pub fn from_source( install_path: Box, - editable: bool, - r#virtual: bool, + editable: Option, + r#virtual: Option, url: DisplaySafeUrl, ) -> Self { Self { @@ -399,8 +404,8 @@ impl TryFrom for ParsedUrl { Ok(Self::Directory(ParsedDirectoryUrl { url, install_path: path.into_boxed_path(), - editable: false, - r#virtual: false, + editable: None, + r#virtual: None, })) } else { Ok(Self::Path(ParsedPathUrl { @@ -445,7 +450,7 @@ impl From<&ParsedDirectoryUrl> for DirectUrl { Self::LocalDirectory { url: value.url.to_string(), dir_info: DirInfo { - editable: value.editable.then_some(true), + editable: value.editable, }, subdirectory: None, } diff --git a/crates/uv-requirements-txt/src/lib.rs b/crates/uv-requirements-txt/src/lib.rs index b734bf8a2..b95875768 100644 --- a/crates/uv-requirements-txt/src/lib.rs +++ b/crates/uv-requirements-txt/src/lib.rs @@ -2064,8 +2064,10 @@ mod test { fragment: None, }, install_path: "/foo/bar", - editable: true, - virtual: false, + editable: Some( + true, + ), + virtual: None, }, ), verbatim: VerbatimUrl { diff --git a/crates/uv-requirements-txt/src/requirement.rs b/crates/uv-requirements-txt/src/requirement.rs index 285753ed8..6c7cf0b52 100644 --- a/crates/uv-requirements-txt/src/requirement.rs +++ b/crates/uv-requirements-txt/src/requirement.rs @@ -90,7 +90,7 @@ impl RequirementsTxtRequirement { version_or_url: Some(uv_pep508::VersionOrUrl::Url(VerbatimParsedUrl { verbatim: url.verbatim, parsed_url: ParsedUrl::Directory(ParsedDirectoryUrl { - editable: true, + editable: Some(true), ..parsed_url }), })), @@ -115,7 +115,7 @@ impl RequirementsTxtRequirement { url: VerbatimParsedUrl { verbatim: requirement.url.verbatim, parsed_url: ParsedUrl::Directory(ParsedDirectoryUrl { - editable: true, + editable: Some(true), ..parsed_url }), }, diff --git a/crates/uv-requirements-txt/src/snapshots/uv_requirements_txt__test__parse-unix-bare-url.txt.snap b/crates/uv-requirements-txt/src/snapshots/uv_requirements_txt__test__parse-unix-bare-url.txt.snap index f2187a1a2..dd03d09bf 100644 --- a/crates/uv-requirements-txt/src/snapshots/uv_requirements_txt__test__parse-unix-bare-url.txt.snap +++ b/crates/uv-requirements-txt/src/snapshots/uv_requirements_txt__test__parse-unix-bare-url.txt.snap @@ -22,8 +22,8 @@ RequirementsTxt { fragment: None, }, install_path: "/scripts/packages/black_editable", - editable: false, - virtual: false, + editable: None, + virtual: None, }, ), verbatim: VerbatimUrl { @@ -72,8 +72,8 @@ RequirementsTxt { fragment: None, }, install_path: "/scripts/packages/black_editable", - editable: false, - virtual: false, + editable: None, + virtual: None, }, ), verbatim: VerbatimUrl { @@ -126,8 +126,8 @@ RequirementsTxt { fragment: None, }, install_path: "/scripts/packages/black_editable", - editable: false, - virtual: false, + editable: None, + virtual: None, }, ), verbatim: VerbatimUrl { @@ -176,8 +176,8 @@ RequirementsTxt { fragment: None, }, install_path: "/scripts/packages/black editable", - editable: false, - virtual: false, + editable: None, + virtual: None, }, ), verbatim: VerbatimUrl { @@ -226,8 +226,8 @@ RequirementsTxt { fragment: None, }, install_path: "/scripts/packages/black editable", - editable: false, - virtual: false, + editable: None, + virtual: None, }, ), verbatim: VerbatimUrl { @@ -276,8 +276,8 @@ RequirementsTxt { fragment: None, }, install_path: "/scripts/packages/black editable", - editable: false, - virtual: false, + editable: None, + virtual: None, }, ), verbatim: VerbatimUrl { diff --git a/crates/uv-requirements-txt/src/snapshots/uv_requirements_txt__test__parse-unix-editable.txt.snap b/crates/uv-requirements-txt/src/snapshots/uv_requirements_txt__test__parse-unix-editable.txt.snap index 222ab6b10..39a4885dc 100644 --- a/crates/uv-requirements-txt/src/snapshots/uv_requirements_txt__test__parse-unix-editable.txt.snap +++ b/crates/uv-requirements-txt/src/snapshots/uv_requirements_txt__test__parse-unix-editable.txt.snap @@ -24,8 +24,10 @@ RequirementsTxt { fragment: None, }, install_path: "/editable", - editable: true, - virtual: false, + editable: Some( + true, + ), + virtual: None, }, ), verbatim: VerbatimUrl { @@ -81,8 +83,10 @@ RequirementsTxt { fragment: None, }, install_path: "/editable", - editable: true, - virtual: false, + editable: Some( + true, + ), + virtual: None, }, ), verbatim: VerbatimUrl { @@ -138,8 +142,10 @@ RequirementsTxt { fragment: None, }, install_path: "/editable", - editable: true, - virtual: false, + editable: Some( + true, + ), + virtual: None, }, ), verbatim: VerbatimUrl { @@ -195,8 +201,10 @@ RequirementsTxt { fragment: None, }, install_path: "/editable", - editable: true, - virtual: false, + editable: Some( + true, + ), + virtual: None, }, ), verbatim: VerbatimUrl { @@ -252,8 +260,10 @@ RequirementsTxt { fragment: None, }, install_path: "/editable", - editable: true, - virtual: false, + editable: Some( + true, + ), + virtual: None, }, ), verbatim: VerbatimUrl { @@ -302,8 +312,10 @@ RequirementsTxt { fragment: None, }, install_path: "/editable[d", - editable: true, - virtual: false, + editable: Some( + true, + ), + virtual: None, }, ), verbatim: VerbatimUrl { @@ -352,8 +364,10 @@ RequirementsTxt { fragment: None, }, install_path: "/editable", - editable: true, - virtual: false, + editable: Some( + true, + ), + virtual: None, }, ), verbatim: VerbatimUrl { @@ -402,8 +416,10 @@ RequirementsTxt { fragment: None, }, install_path: "/editable", - editable: true, - virtual: false, + editable: Some( + true, + ), + virtual: None, }, ), verbatim: VerbatimUrl { diff --git a/crates/uv-requirements-txt/src/snapshots/uv_requirements_txt__test__parse-windows-bare-url.txt.snap b/crates/uv-requirements-txt/src/snapshots/uv_requirements_txt__test__parse-windows-bare-url.txt.snap index 72e1c8635..be90c5c44 100644 --- a/crates/uv-requirements-txt/src/snapshots/uv_requirements_txt__test__parse-windows-bare-url.txt.snap +++ b/crates/uv-requirements-txt/src/snapshots/uv_requirements_txt__test__parse-windows-bare-url.txt.snap @@ -22,8 +22,8 @@ RequirementsTxt { fragment: None, }, install_path: "/scripts/packages/black_editable", - editable: false, - virtual: false, + editable: None, + virtual: None, }, ), verbatim: VerbatimUrl { @@ -72,8 +72,8 @@ RequirementsTxt { fragment: None, }, install_path: "/scripts/packages/black_editable", - editable: false, - virtual: false, + editable: None, + virtual: None, }, ), verbatim: VerbatimUrl { @@ -126,8 +126,8 @@ RequirementsTxt { fragment: None, }, install_path: "/scripts/packages/black_editable", - editable: false, - virtual: false, + editable: None, + virtual: None, }, ), verbatim: VerbatimUrl { @@ -176,8 +176,8 @@ RequirementsTxt { fragment: None, }, install_path: "/scripts/packages/black editable", - editable: false, - virtual: false, + editable: None, + virtual: None, }, ), verbatim: VerbatimUrl { @@ -226,8 +226,8 @@ RequirementsTxt { fragment: None, }, install_path: "/scripts/packages/black editable", - editable: false, - virtual: false, + editable: None, + virtual: None, }, ), verbatim: VerbatimUrl { @@ -276,8 +276,8 @@ RequirementsTxt { fragment: None, }, install_path: "/scripts/packages/black editable", - editable: false, - virtual: false, + editable: None, + virtual: None, }, ), verbatim: VerbatimUrl { diff --git a/crates/uv-requirements-txt/src/snapshots/uv_requirements_txt__test__parse-windows-editable.txt.snap b/crates/uv-requirements-txt/src/snapshots/uv_requirements_txt__test__parse-windows-editable.txt.snap index 84ae22816..dde16b40c 100644 --- a/crates/uv-requirements-txt/src/snapshots/uv_requirements_txt__test__parse-windows-editable.txt.snap +++ b/crates/uv-requirements-txt/src/snapshots/uv_requirements_txt__test__parse-windows-editable.txt.snap @@ -24,8 +24,10 @@ RequirementsTxt { fragment: None, }, install_path: "/editable", - editable: true, - virtual: false, + editable: Some( + true, + ), + virtual: None, }, ), verbatim: VerbatimUrl { @@ -81,8 +83,10 @@ RequirementsTxt { fragment: None, }, install_path: "/editable", - editable: true, - virtual: false, + editable: Some( + true, + ), + virtual: None, }, ), verbatim: VerbatimUrl { @@ -138,8 +142,10 @@ RequirementsTxt { fragment: None, }, install_path: "/editable", - editable: true, - virtual: false, + editable: Some( + true, + ), + virtual: None, }, ), verbatim: VerbatimUrl { @@ -195,8 +201,10 @@ RequirementsTxt { fragment: None, }, install_path: "/editable", - editable: true, - virtual: false, + editable: Some( + true, + ), + virtual: None, }, ), verbatim: VerbatimUrl { @@ -252,8 +260,10 @@ RequirementsTxt { fragment: None, }, install_path: "/editable", - editable: true, - virtual: false, + editable: Some( + true, + ), + virtual: None, }, ), verbatim: VerbatimUrl { @@ -302,8 +312,10 @@ RequirementsTxt { fragment: None, }, install_path: "/editable[d", - editable: true, - virtual: false, + editable: Some( + true, + ), + virtual: None, }, ), verbatim: VerbatimUrl { @@ -352,8 +364,10 @@ RequirementsTxt { fragment: None, }, install_path: "/editable", - editable: true, - virtual: false, + editable: Some( + true, + ), + virtual: None, }, ), verbatim: VerbatimUrl { @@ -402,8 +416,10 @@ RequirementsTxt { fragment: None, }, install_path: "/editable", - editable: true, - virtual: false, + editable: Some( + true, + ), + virtual: None, }, ), verbatim: VerbatimUrl { diff --git a/crates/uv-requirements/src/source_tree.rs b/crates/uv-requirements/src/source_tree.rs index 39fbe453b..a7a99c5a2 100644 --- a/crates/uv-requirements/src/source_tree.rs +++ b/crates/uv-requirements/src/source_tree.rs @@ -154,7 +154,7 @@ impl<'a, Context: BuildContext> SourceTreeResolver<'a, Context> { let source = SourceUrl::Directory(DirectorySourceUrl { url: &url, install_path: Cow::Borrowed(source_tree), - editable: false, + editable: None, }); // Determine the hash policy. Since we don't have a package name, we perform a diff --git a/crates/uv-resolver/src/lock/export/pylock_toml.rs b/crates/uv-resolver/src/lock/export/pylock_toml.rs index 8a53fd8f7..80cd54be2 100644 --- a/crates/uv-resolver/src/lock/export/pylock_toml.rs +++ b/crates/uv-resolver/src/lock/export/pylock_toml.rs @@ -500,7 +500,7 @@ impl<'lock> PylockToml { .unwrap_or_else(|_| dist.install_path.clone()); package.directory = Some(PylockTomlDirectory { path: PortablePathBuf::from(path), - editable: if dist.editable { Some(true) } else { None }, + editable: dist.editable, subdirectory: None, }); } @@ -737,7 +737,7 @@ impl<'lock> PylockToml { ), editable: match editable { EditableMode::NonEditable => None, - EditableMode::Editable => Some(sdist.editable), + EditableMode::Editable => sdist.editable, }, subdirectory: None, }), @@ -1394,8 +1394,8 @@ impl PylockTomlDirectory { Ok(DirectorySourceDist { name: name.clone(), install_path: path.into_boxed_path(), - editable: self.editable.unwrap_or(false), - r#virtual: false, + editable: self.editable, + r#virtual: Some(false), url, }) } diff --git a/crates/uv-resolver/src/lock/mod.rs b/crates/uv-resolver/src/lock/mod.rs index 7ca100fd8..7cbac67df 100644 --- a/crates/uv-resolver/src/lock/mod.rs +++ b/crates/uv-resolver/src/lock/mod.rs @@ -2396,8 +2396,8 @@ impl Package { name: self.id.name.clone(), url: verbatim_url(&install_path, &self.id)?, install_path: install_path.into_boxed_path(), - editable: false, - r#virtual: false, + editable: Some(false), + r#virtual: Some(false), }; uv_distribution_types::SourceDist::Directory(dir_dist) } @@ -2407,8 +2407,8 @@ impl Package { name: self.id.name.clone(), url: verbatim_url(&install_path, &self.id)?, install_path: install_path.into_boxed_path(), - editable: true, - r#virtual: false, + editable: Some(true), + r#virtual: Some(false), }; uv_distribution_types::SourceDist::Directory(dir_dist) } @@ -2418,8 +2418,8 @@ impl Package { name: self.id.name.clone(), url: verbatim_url(&install_path, &self.id)?, install_path: install_path.into_boxed_path(), - editable: false, - r#virtual: true, + editable: Some(false), + r#virtual: Some(true), }; uv_distribution_types::SourceDist::Directory(dir_dist) } @@ -3250,9 +3250,9 @@ impl Source { let path = relative_to(&directory_dist.install_path, root) .or_else(|_| std::path::absolute(&directory_dist.install_path)) .map_err(LockErrorKind::DistributionRelativePath)?; - if directory_dist.editable { + if directory_dist.editable.unwrap_or(false) { Ok(Source::Editable(path.into_boxed_path())) - } else if directory_dist.r#virtual { + } else if directory_dist.r#virtual.unwrap_or(false) { Ok(Source::Virtual(path.into_boxed_path())) } else { Ok(Source::Directory(path.into_boxed_path())) @@ -4800,8 +4800,8 @@ fn normalize_requirement( marker: requires_python.simplify_markers(requirement.marker), source: RequirementSource::Directory { install_path, - editable, - r#virtual, + editable: Some(editable.unwrap_or(false)), + r#virtual: Some(r#virtual.unwrap_or(false)), url, }, origin: None, diff --git a/crates/uv-resolver/src/resolver/mod.rs b/crates/uv-resolver/src/resolver/mod.rs index 32d684f04..c30c4e947 100644 --- a/crates/uv-resolver/src/resolver/mod.rs +++ b/crates/uv-resolver/src/resolver/mod.rs @@ -620,6 +620,7 @@ impl ResolverState { // Then here, if we get a reason that we consider unrecoverable, we should diff --git a/crates/uv-resolver/src/resolver/urls.rs b/crates/uv-resolver/src/resolver/urls.rs index 73d190b4a..57803ed0b 100644 --- a/crates/uv-resolver/src/resolver/urls.rs +++ b/crates/uv-resolver/src/resolver/urls.rs @@ -63,9 +63,9 @@ impl Urls { verbatim: _, } = package_url { - if !*editable { + if editable.is_none() { debug!("Allowing an editable variant of {}", &package_url.verbatim); - *editable = true; + *editable = Some(true); } } } @@ -201,8 +201,9 @@ fn same_resource(a: &ParsedUrl, b: &ParsedUrl, git: &GitResolver) -> bool { || is_same_file(&a.install_path, &b.install_path).unwrap_or(false) } (ParsedUrl::Directory(a), ParsedUrl::Directory(b)) => { - a.install_path == b.install_path - || is_same_file(&a.install_path, &b.install_path).unwrap_or(false) + (a.install_path == b.install_path + || is_same_file(&a.install_path, &b.install_path).unwrap_or(false)) + && a.editable.is_none_or(|a| b.editable.is_none_or(|b| a == b)) } _ => false, } diff --git a/crates/uv-workspace/src/workspace.rs b/crates/uv-workspace/src/workspace.rs index 1349d739c..8d09554d9 100644 --- a/crates/uv-workspace/src/workspace.rs +++ b/crates/uv-workspace/src/workspace.rs @@ -315,15 +315,15 @@ impl Workspace { source: if member.pyproject_toml.is_package() { RequirementSource::Directory { install_path: member.root.clone().into_boxed_path(), - editable: true, - r#virtual: false, + editable: Some(true), + r#virtual: Some(false), url, } } else { RequirementSource::Directory { install_path: member.root.clone().into_boxed_path(), - editable: false, - r#virtual: true, + editable: Some(false), + r#virtual: Some(true), url, } }, @@ -371,15 +371,15 @@ impl Workspace { source: if member.pyproject_toml.is_package() { RequirementSource::Directory { install_path: member.root.clone().into_boxed_path(), - editable: true, - r#virtual: false, + editable: Some(true), + r#virtual: Some(false), url, } } else { RequirementSource::Directory { install_path: member.root.clone().into_boxed_path(), - editable: false, - r#virtual: true, + editable: Some(false), + r#virtual: Some(true), url, } }, diff --git a/crates/uv/src/commands/project/sync.rs b/crates/uv/src/commands/project/sync.rs index 94586004f..5843df6be 100644 --- a/crates/uv/src/commands/project/sync.rs +++ b/crates/uv/src/commands/project/sync.rs @@ -747,7 +747,7 @@ fn apply_no_virtual_project(resolution: Resolution) -> Resolution { return true; }; - !dist.r#virtual + !dist.r#virtual.unwrap_or(false) }) } @@ -765,8 +765,8 @@ fn apply_editable_mode(resolution: Resolution, editable: EditableMode) -> Resolu let Dist::Source(SourceDist::Directory(DirectorySourceDist { name, install_path, - editable: true, - r#virtual: false, + editable: Some(true), + r#virtual, url, })) = dist.as_ref() else { @@ -777,8 +777,8 @@ fn apply_editable_mode(resolution: Resolution, editable: EditableMode) -> Resolu dist: Arc::new(Dist::Source(SourceDist::Directory(DirectorySourceDist { name: name.clone(), install_path: install_path.clone(), - editable: false, - r#virtual: false, + editable: Some(false), + r#virtual: *r#virtual, url: url.clone(), }))), version: version.clone(), diff --git a/crates/uv/tests/it/lock.rs b/crates/uv/tests/it/lock.rs index f91870762..477b4b039 100644 --- a/crates/uv/tests/it/lock.rs +++ b/crates/uv/tests/it/lock.rs @@ -10946,7 +10946,7 @@ fn lock_sources_source_tree() -> Result<()> { } /// Lock a project in which a given dependency is requested from two different members, once as -/// editable, and once as non-editable. +/// editable, and once as non-editable. This should trigger a conflicting URL error. #[test] fn lock_editable() -> Result<()> { let context = TestContext::new("3.12"); @@ -11086,86 +11086,16 @@ fn lock_editable() -> Result<()> { library = { path = "../../library", editable = true } "#})?; - uv_snapshot!(context.filters(), context.lock(), @r###" - success: true - exit_code: 0 + uv_snapshot!(context.filters(), context.lock(), @r" + success: false + exit_code: 2 ----- stdout ----- ----- stderr ----- - Resolved 3 packages in [TIME] - "###); - - let lock = context.read("uv.lock"); - - insta::with_settings!({ - filters => context.filters(), - }, { - assert_snapshot!( - lock, @r#" - version = 1 - revision = 2 - requires-python = ">=3.12" - - [options] - exclude-newer = "2024-03-25T00:00:00Z" - - [manifest] - members = [ - "leaf", - "workspace", - ] - - [[package]] - name = "leaf" - version = "0.1.0" - source = { editable = "packages/leaf" } - dependencies = [ - { name = "library" }, - ] - - [package.metadata] - requires-dist = [{ name = "library", editable = "library" }] - - [[package]] - name = "library" - version = "0.1.0" - source = { editable = "library" } - - [[package]] - name = "workspace" - version = "0.1.0" - source = { virtual = "." } - dependencies = [ - { name = "library" }, - ] - - [package.metadata] - requires-dist = [{ name = "library", directory = "library" }] - "# - ); - }); - - // Re-run with `--locked`. - uv_snapshot!(context.filters(), context.lock().arg("--locked"), @r###" - success: true - exit_code: 0 - ----- stdout ----- - - ----- stderr ----- - Resolved 3 packages in [TIME] - "###); - - // Install from the lockfile. - uv_snapshot!(context.filters(), context.sync().arg("--frozen"), @r###" - success: true - exit_code: 0 - ----- stdout ----- - - ----- stderr ----- - Prepared 1 package in [TIME] - Installed 1 package in [TIME] - + library==0.1.0 (from file://[TEMP_DIR]/library) - "###); + error: Requirements contain conflicting URLs for package `library` in all marker environments: + - file://[TEMP_DIR]/library + - file://[TEMP_DIR]/library (editable) + "); Ok(()) } diff --git a/crates/uv/tests/it/sync.rs b/crates/uv/tests/it/sync.rs index 9fecd50b0..0165cc7f6 100644 --- a/crates/uv/tests/it/sync.rs +++ b/crates/uv/tests/it/sync.rs @@ -10471,3 +10471,335 @@ fn sync_python_platform() -> Result<()> { Ok(()) } + +/// See: +#[test] +#[cfg(not(windows))] +fn conflicting_editable() -> Result<()> { + let context = TestContext::new("3.12"); + + let pyproject_toml = context.temp_dir.child("pyproject.toml"); + pyproject_toml.write_str( + r#" + [project] + name = "project" + version = "0.1.0" + requires-python = ">=3.12" + dependencies = [] + [dependency-groups] + foo = [ + "child", + ] + bar = [ + "child", + ] + [tool.uv] + conflicts = [ + [ + { group = "foo" }, + { group = "bar" }, + ], + ] + [tool.uv.sources] + child = [ + { path = "./child", editable = true, group = "foo" }, + { path = "./child", editable = false, group = "bar" }, + ] + "#, + )?; + + context + .temp_dir + .child("child") + .child("pyproject.toml") + .write_str( + r#" + [project] + name = "child" + version = "0.1.0" + requires-python = ">=3.12" + dependencies = [] + [build-system] + requires = ["hatchling"] + build-backend = "hatchling.build" + "#, + )?; + context + .temp_dir + .child("child") + .child("src") + .child("child") + .child("__init__.py") + .touch()?; + + uv_snapshot!(context.filters(), context.sync(), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Resolved 3 packages in [TIME] + Audited in [TIME] + "); + + let lock = context.read("uv.lock"); + + insta::with_settings!({ + filters => context.filters(), + }, { + assert_snapshot!( + lock, @r#" + version = 1 + revision = 2 + requires-python = ">=3.12" + conflicts = [[ + { package = "project", group = "bar" }, + { package = "project", group = "foo" }, + ]] + + [options] + exclude-newer = "2024-03-25T00:00:00Z" + + [[package]] + name = "child" + version = "0.1.0" + source = { directory = "child" } + + [[package]] + name = "child" + version = "0.1.0" + source = { editable = "child" } + + [[package]] + name = "project" + version = "0.1.0" + source = { virtual = "." } + + [package.dev-dependencies] + bar = [ + { name = "child", version = "0.1.0", source = { directory = "child" } }, + ] + foo = [ + { name = "child", version = "0.1.0", source = { editable = "child" } }, + ] + + [package.metadata] + + [package.metadata.requires-dev] + bar = [{ name = "child", directory = "child" }] + foo = [{ name = "child", editable = "child" }] + "# + ); + }); + + uv_snapshot!(context.filters(), context.sync().arg("--group").arg("foo"), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Resolved 3 packages in [TIME] + Prepared 1 package in [TIME] + Installed 1 package in [TIME] + + child==0.1.0 (from file://[TEMP_DIR]/child) + "); + + uv_snapshot!(context.filters(), context.pip_list().arg("--format").arg("json"), @r#" + success: true + exit_code: 0 + ----- stdout ----- + [{"name":"child","version":"0.1.0","editable_project_location":"[TEMP_DIR]/child"}] + + ----- stderr ----- + "#); + + uv_snapshot!(context.filters(), context.sync().arg("--group").arg("bar"), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Resolved 3 packages in [TIME] + Prepared 1 package in [TIME] + Uninstalled 1 package in [TIME] + Installed 1 package in [TIME] + ~ child==0.1.0 (from file://[TEMP_DIR]/child) + "); + + uv_snapshot!(context.filters(), context.pip_list().arg("--format").arg("json"), @r#" + success: true + exit_code: 0 + ----- stdout ----- + [{"name":"child","version":"0.1.0"}] + + ----- stderr ----- + "#); + + Ok(()) +} + +/// See: +#[test] +#[cfg(not(windows))] +fn undeclared_editable() -> Result<()> { + let context = TestContext::new("3.12"); + + let pyproject_toml = context.temp_dir.child("pyproject.toml"); + pyproject_toml.write_str( + r#" + [project] + name = "project" + version = "0.1.0" + requires-python = ">=3.12" + dependencies = [] + [dependency-groups] + foo = [ + "child", + ] + bar = [ + "child", + ] + [tool.uv] + conflicts = [ + [ + { group = "foo" }, + { group = "bar" }, + ], + ] + [tool.uv.sources] + child = [ + { path = "./child", editable = true, group = "foo" }, + { path = "./child", group = "bar" }, + ] + "#, + )?; + + context + .temp_dir + .child("child") + .child("pyproject.toml") + .write_str( + r#" + [project] + name = "child" + version = "0.1.0" + requires-python = ">=3.12" + dependencies = [] + [build-system] + requires = ["hatchling"] + build-backend = "hatchling.build" + "#, + )?; + context + .temp_dir + .child("child") + .child("src") + .child("child") + .child("__init__.py") + .touch()?; + + uv_snapshot!(context.filters(), context.sync(), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Resolved 3 packages in [TIME] + Audited in [TIME] + "); + + let lock = context.read("uv.lock"); + + insta::with_settings!({ + filters => context.filters(), + }, { + assert_snapshot!( + lock, @r#" + version = 1 + revision = 2 + requires-python = ">=3.12" + conflicts = [[ + { package = "project", group = "bar" }, + { package = "project", group = "foo" }, + ]] + + [options] + exclude-newer = "2024-03-25T00:00:00Z" + + [[package]] + name = "child" + version = "0.1.0" + source = { directory = "child" } + + [[package]] + name = "child" + version = "0.1.0" + source = { editable = "child" } + + [[package]] + name = "project" + version = "0.1.0" + source = { virtual = "." } + + [package.dev-dependencies] + bar = [ + { name = "child", version = "0.1.0", source = { directory = "child" } }, + ] + foo = [ + { name = "child", version = "0.1.0", source = { editable = "child" } }, + ] + + [package.metadata] + + [package.metadata.requires-dev] + bar = [{ name = "child", directory = "child" }] + foo = [{ name = "child", editable = "child" }] + "# + ); + }); + + uv_snapshot!(context.filters(), context.sync().arg("--group").arg("foo"), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Resolved 3 packages in [TIME] + Prepared 1 package in [TIME] + Installed 1 package in [TIME] + + child==0.1.0 (from file://[TEMP_DIR]/child) + "); + + uv_snapshot!(context.filters(), context.pip_list().arg("--format").arg("json"), @r#" + success: true + exit_code: 0 + ----- stdout ----- + [{"name":"child","version":"0.1.0","editable_project_location":"[TEMP_DIR]/child"}] + + ----- stderr ----- + "#); + + uv_snapshot!(context.filters(), context.sync().arg("--group").arg("bar"), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Resolved 3 packages in [TIME] + Prepared 1 package in [TIME] + Uninstalled 1 package in [TIME] + Installed 1 package in [TIME] + ~ child==0.1.0 (from file://[TEMP_DIR]/child) + "); + + uv_snapshot!(context.filters(), context.pip_list().arg("--format").arg("json"), @r#" + success: true + exit_code: 0 + ----- stdout ----- + [{"name":"child","version":"0.1.0"}] + + ----- stderr ----- + "#); + + Ok(()) +} From dbaec0537ae5cfd5a55a4fbf17d93dbe9bef04b5 Mon Sep 17 00:00:00 2001 From: Zanie Blue Date: Fri, 11 Jul 2025 07:47:06 -0500 Subject: [PATCH 064/130] Tear miette out of the `uv venv` command (#14546) This has some changes to the user-facing output, but makes it more consistent with the rest of uv. --- crates/uv/src/commands/venv.rs | 122 ++++++------------------------ crates/uv/tests/it/pip_compile.rs | 4 +- crates/uv/tests/it/venv.rs | 104 +++++++++---------------- 3 files changed, 62 insertions(+), 168 deletions(-) diff --git a/crates/uv/src/commands/venv.rs b/crates/uv/src/commands/venv.rs index 6d6e15758..02bc818f8 100644 --- a/crates/uv/src/commands/venv.rs +++ b/crates/uv/src/commands/venv.rs @@ -4,9 +4,7 @@ use std::str::FromStr; use std::sync::Arc; use std::vec; -use anstream::eprint; use anyhow::Result; -use miette::{Diagnostic, IntoDiagnostic}; use owo_colors::OwoColorize; use thiserror::Error; @@ -42,6 +40,21 @@ use crate::settings::NetworkSettings; use super::project::default_dependency_groups; +#[derive(Error, Debug)] +enum VenvError { + #[error("Failed to create virtual environment")] + Creation(#[source] uv_virtualenv::Error), + + #[error("Failed to install seed packages into virtual environment")] + Seed(#[source] AnyErrorBuild), + + #[error("Failed to extract interpreter tags for installing seed packages")] + Tags(#[source] uv_platform_tags::TagsError), + + #[error("Failed to resolve `--find-links` entry")] + FlatIndex(#[source] uv_client::FlatIndexError), +} + /// Create a virtual environment. #[allow(clippy::unnecessary_wraps, clippy::fn_params_excessive_bools)] pub(crate) async fn venv( @@ -70,89 +83,6 @@ pub(crate) async fn venv( relocatable: bool, preview: PreviewMode, ) -> Result { - match venv_impl( - project_dir, - path, - python_request, - install_mirrors, - link_mode, - index_locations, - index_strategy, - dependency_metadata, - keyring_provider, - network_settings, - prompt, - system_site_packages, - seed, - python_preference, - python_downloads, - allow_existing, - exclude_newer, - concurrency, - no_config, - no_project, - cache, - printer, - relocatable, - preview, - ) - .await - { - Ok(status) => Ok(status), - Err(err) => { - eprint!("{err:?}"); - Ok(ExitStatus::Failure) - } - } -} - -#[derive(Error, Debug, Diagnostic)] -enum VenvError { - #[error("Failed to create virtualenv")] - #[diagnostic(code(uv::venv::creation))] - Creation(#[source] uv_virtualenv::Error), - - #[error("Failed to install seed packages")] - #[diagnostic(code(uv::venv::seed))] - Seed(#[source] AnyErrorBuild), - - #[error("Failed to extract interpreter tags")] - #[diagnostic(code(uv::venv::tags))] - Tags(#[source] uv_platform_tags::TagsError), - - #[error("Failed to resolve `--find-links` entry")] - #[diagnostic(code(uv::venv::flat_index))] - FlatIndex(#[source] uv_client::FlatIndexError), -} - -/// Create a virtual environment. -#[allow(clippy::fn_params_excessive_bools)] -async fn venv_impl( - project_dir: &Path, - path: Option, - python_request: Option, - install_mirrors: PythonInstallMirrors, - link_mode: LinkMode, - index_locations: &IndexLocations, - index_strategy: IndexStrategy, - dependency_metadata: DependencyMetadata, - keyring_provider: KeyringProviderType, - network_settings: &NetworkSettings, - prompt: uv_virtualenv::Prompt, - system_site_packages: bool, - seed: bool, - python_preference: PythonPreference, - python_downloads: PythonDownloads, - allow_existing: bool, - exclude_newer: Option, - concurrency: Concurrency, - no_config: bool, - no_project: bool, - cache: &Cache, - printer: Printer, - relocatable: bool, - preview: PreviewMode, -) -> miette::Result { let workspace_cache = WorkspaceCache::default(); let project = if no_project { None @@ -206,7 +136,7 @@ async fn venv_impl( // If the default dependency-groups demand a higher requires-python // we should bias an empty venv to that to avoid churn. let default_groups = match &project { - Some(project) => default_dependency_groups(project.pyproject_toml()).into_diagnostic()?, + Some(project) => default_dependency_groups(project.pyproject_toml())?, None => DefaultGroups::default(), }; let groups = DependencyGroups::default().with_defaults(default_groups); @@ -221,8 +151,7 @@ async fn venv_impl( project_dir, no_config, ) - .await - .into_diagnostic()?; + .await?; // Locate the Python interpreter to use in the environment let interpreter = { @@ -239,9 +168,8 @@ async fn venv_impl( install_mirrors.python_downloads_json_url.as_deref(), preview, ) - .await - .into_diagnostic()?; - report_interpreter(&python, false, printer).into_diagnostic()?; + .await?; + report_interpreter(&python, false, printer)?; python.into_interpreter() }; @@ -268,8 +196,7 @@ async fn venv_impl( "Creating virtual environment {}at: {}", if seed { "with seed packages " } else { "" }, path.user_display().cyan() - ) - .into_diagnostic()?; + )?; let upgradeable = preview.is_enabled() && python_request @@ -307,8 +234,7 @@ async fn venv_impl( } // Instantiate a client. - let client = RegistryClientBuilder::try_from(client_builder) - .into_diagnostic()? + let client = RegistryClientBuilder::try_from(client_builder)? .cache(cache.clone()) .index_locations(index_locations) .index_strategy(index_strategy) @@ -400,9 +326,7 @@ async fn venv_impl( .map_err(|err| VenvError::Seed(err.into()))?; let changelog = Changelog::from_installed(installed); - DefaultInstallLogger - .on_complete(&changelog, printer) - .into_diagnostic()?; + DefaultInstallLogger.on_complete(&changelog, printer)?; } // Determine the appropriate activation command. @@ -431,7 +355,7 @@ async fn venv_impl( Some(Shell::Cmd) => Some(shlex_windows(venv.scripts().join("activate"), Shell::Cmd)), }; if let Some(act) = activation { - writeln!(printer.stderr(), "Activate with: {}", act.green()).into_diagnostic()?; + writeln!(printer.stderr(), "Activate with: {}", act.green())?; } Ok(ExitStatus::Success) diff --git a/crates/uv/tests/it/pip_compile.rs b/crates/uv/tests/it/pip_compile.rs index f04c16b86..ac3549874 100644 --- a/crates/uv/tests/it/pip_compile.rs +++ b/crates/uv/tests/it/pip_compile.rs @@ -17411,11 +17411,11 @@ fn compile_broken_active_venv() -> Result<()> { .arg(&broken_system_python) .arg("venv2"), @r" success: false - exit_code: 1 + exit_code: 2 ----- stdout ----- ----- stderr ----- - × No interpreter found at path `python3.14159` + error: No interpreter found at path `python3.14159` "); // Simulate a removed Python interpreter diff --git a/crates/uv/tests/it/venv.rs b/crates/uv/tests/it/venv.rs index 52291c05d..43cacb640 100644 --- a/crates/uv/tests/it/venv.rs +++ b/crates/uv/tests/it/venv.rs @@ -656,13 +656,13 @@ fn create_venv_respects_group_requires_python() -> Result<()> { uv_snapshot!(context.filters(), context.venv().arg("--python").arg("3.11"), @r" success: false - exit_code: 1 + exit_code: 2 ----- stdout ----- ----- stderr ----- - × Found conflicting Python requirements: - │ - foo: <3.12 - │ - foo:dev: >=3.12 + error: Found conflicting Python requirements: + - foo: <3.12 + - foo:dev: >=3.12 " ); @@ -808,7 +808,7 @@ fn seed_older_python_version() { #[test] fn create_venv_unknown_python_minor() { - let context = TestContext::new_with_versions(&["3.12"]); + let context = TestContext::new_with_versions(&["3.12"]).with_filtered_python_sources(); let mut command = context.venv(); command @@ -819,34 +819,22 @@ fn create_venv_unknown_python_minor() { // Unset this variable to force what the user would see .env_remove(EnvVars::UV_TEST_PYTHON_PATH); - if cfg!(windows) { - uv_snapshot!(&mut command, @r###" - success: false - exit_code: 1 - ----- stdout ----- + uv_snapshot!(context.filters(), &mut command, @r" + success: false + exit_code: 2 + ----- stdout ----- - ----- stderr ----- - × No interpreter found for Python 3.100 in managed installations, search path, or registry - "### - ); - } else { - uv_snapshot!(&mut command, @r###" - success: false - exit_code: 1 - ----- stdout ----- - - ----- stderr ----- - × No interpreter found for Python 3.100 in managed installations or search path - "### - ); - } + ----- stderr ----- + error: No interpreter found for Python 3.100 in [PYTHON SOURCES] + " + ); context.venv.assert(predicates::path::missing()); } #[test] fn create_venv_unknown_python_patch() { - let context = TestContext::new_with_versions(&["3.12"]); + let context = TestContext::new_with_versions(&["3.12"]).with_filtered_python_sources(); let mut command = context.venv(); command @@ -857,27 +845,15 @@ fn create_venv_unknown_python_patch() { // Unset this variable to force what the user would see .env_remove(EnvVars::UV_TEST_PYTHON_PATH); - if cfg!(windows) { - uv_snapshot!(&mut command, @r###" - success: false - exit_code: 1 - ----- stdout ----- + uv_snapshot!(context.filters(), &mut command, @r" + success: false + exit_code: 2 + ----- stdout ----- - ----- stderr ----- - × No interpreter found for Python 3.12.100 in managed installations, search path, or registry - "### - ); - } else { - uv_snapshot!(&mut command, @r" - success: false - exit_code: 1 - ----- stdout ----- - - ----- stderr ----- - × No interpreter found for Python 3.12.100 in managed installations or search path - " - ); - } + ----- stderr ----- + error: No interpreter found for Python 3.12.[X] in [PYTHON SOURCES] + " + ); context.venv.assert(predicates::path::missing()); } @@ -915,19 +891,17 @@ fn file_exists() -> Result<()> { uv_snapshot!(context.filters(), context.venv() .arg(context.venv.as_os_str()) .arg("--python") - .arg("3.12"), @r###" + .arg("3.12"), @r" success: false - exit_code: 1 + exit_code: 2 ----- stdout ----- ----- stderr ----- Using CPython 3.12.[X] interpreter at: [PYTHON-3.12] Creating virtual environment at: .venv - uv::venv::creation - - × Failed to create virtualenv - ╰─▶ File exists at `.venv` - "### + error: Failed to create virtual environment + Caused by: File exists at `.venv` + " ); Ok(()) @@ -970,19 +944,17 @@ fn non_empty_dir_exists() -> Result<()> { uv_snapshot!(context.filters(), context.venv() .arg(context.venv.as_os_str()) .arg("--python") - .arg("3.12"), @r###" + .arg("3.12"), @r" success: false - exit_code: 1 + exit_code: 2 ----- stdout ----- ----- stderr ----- Using CPython 3.12.[X] interpreter at: [PYTHON-3.12] Creating virtual environment at: .venv - uv::venv::creation - - × Failed to create virtualenv - ╰─▶ The directory `.venv` exists, but it's not a virtual environment - "### + error: Failed to create virtual environment + Caused by: The directory `.venv` exists, but it's not a virtual environment + " ); Ok(()) @@ -1000,19 +972,17 @@ fn non_empty_dir_exists_allow_existing() -> Result<()> { uv_snapshot!(context.filters(), context.venv() .arg(context.venv.as_os_str()) .arg("--python") - .arg("3.12"), @r###" + .arg("3.12"), @r" success: false - exit_code: 1 + exit_code: 2 ----- stdout ----- ----- stderr ----- Using CPython 3.12.[X] interpreter at: [PYTHON-3.12] Creating virtual environment at: .venv - uv::venv::creation - - × Failed to create virtualenv - ╰─▶ The directory `.venv` exists, but it's not a virtual environment - "### + error: Failed to create virtual environment + Caused by: The directory `.venv` exists, but it's not a virtual environment + " ); uv_snapshot!(context.filters(), context.venv() From 9cf78217413fe6bee28fbe44e67689802c7b7486 Mon Sep 17 00:00:00 2001 From: Aria Desires Date: Fri, 11 Jul 2025 13:01:41 -0400 Subject: [PATCH 065/130] Add missing validations for disallowed `uv.toml` fields (#14322) We weren't following our usual "destructure all the options" pattern in this function, and several "this isn't actually read from uv.toml" fields slipped through the cracks over time since folks forgot it existed. Fixes part of #14308, although we could still try to make the warning in FilesystemOptions more accurate? You could argue this is a breaking change, but I think it ultimately isn't really, because we were already silently ignoring these fields. Now we properly error. --- crates/uv-settings/src/lib.rs | 50 ++++++++++++++++++++++++++---- crates/uv-settings/src/settings.rs | 2 +- 2 files changed, 45 insertions(+), 7 deletions(-) diff --git a/crates/uv-settings/src/lib.rs b/crates/uv-settings/src/lib.rs index cad600cfc..84aef8f28 100644 --- a/crates/uv-settings/src/lib.rs +++ b/crates/uv-settings/src/lib.rs @@ -201,33 +201,71 @@ fn read_file(path: &Path) -> Result { /// Validate that an [`Options`] schema is compatible with `uv.toml`. fn validate_uv_toml(path: &Path, options: &Options) -> Result<(), Error> { + let Options { + globals: _, + top_level: _, + install_mirrors: _, + publish: _, + add: _, + pip: _, + cache_keys: _, + override_dependencies: _, + constraint_dependencies: _, + build_constraint_dependencies: _, + environments: _, + required_environments: _, + conflicts, + workspace, + sources, + dev_dependencies, + default_groups, + dependency_groups, + managed, + package, + build_backend, + } = options; // The `uv.toml` format is not allowed to include any of the following, which are // permitted by the schema since they _can_ be included in `pyproject.toml` files // (and we want to use `deny_unknown_fields`). - if options.workspace.is_some() { + if conflicts.is_some() { + return Err(Error::PyprojectOnlyField(path.to_path_buf(), "conflicts")); + } + if workspace.is_some() { return Err(Error::PyprojectOnlyField(path.to_path_buf(), "workspace")); } - if options.sources.is_some() { + if sources.is_some() { return Err(Error::PyprojectOnlyField(path.to_path_buf(), "sources")); } - if options.dev_dependencies.is_some() { + if dev_dependencies.is_some() { return Err(Error::PyprojectOnlyField( path.to_path_buf(), "dev-dependencies", )); } - if options.default_groups.is_some() { + if default_groups.is_some() { return Err(Error::PyprojectOnlyField( path.to_path_buf(), "default-groups", )); } - if options.managed.is_some() { + if dependency_groups.is_some() { + return Err(Error::PyprojectOnlyField( + path.to_path_buf(), + "dependency-groups", + )); + } + if managed.is_some() { return Err(Error::PyprojectOnlyField(path.to_path_buf(), "managed")); } - if options.package.is_some() { + if package.is_some() { return Err(Error::PyprojectOnlyField(path.to_path_buf(), "package")); } + if build_backend.is_some() { + return Err(Error::PyprojectOnlyField( + path.to_path_buf(), + "build-backend", + )); + } Ok(()) } diff --git a/crates/uv-settings/src/settings.rs b/crates/uv-settings/src/settings.rs index d80ccce2f..e057cb40a 100644 --- a/crates/uv-settings/src/settings.rs +++ b/crates/uv-settings/src/settings.rs @@ -103,7 +103,7 @@ pub struct Options { cache-keys = [{ file = "pyproject.toml" }, { file = "requirements.txt" }, { git = { commit = true } }] "# )] - cache_keys: Option>, + pub cache_keys: Option>, // NOTE(charlie): These fields are shared with `ToolUv` in // `crates/uv-workspace/src/pyproject.rs`. The documentation lives on that struct. From 6df7dab2df6e5a9b3bf36183851dd9d7c0824c9f Mon Sep 17 00:00:00 2001 From: Charlie Marsh Date: Mon, 14 Jul 2025 13:18:39 -0400 Subject: [PATCH 066/130] Use an ephemeral environment for `uv run --with` invocations (#14447) This PR creates separation between the `--with` environment and the environment we actually run in, which in turn solves issues like https://github.com/astral-sh/uv/issues/12889 whereby two invocations share the same `--with` environment, causing them to collide by way of sharing an overlay. Closes https://github.com/astral-sh/uv/issues/7643. --- crates/uv-python/src/environment.rs | 2 +- crates/uv/src/commands/project/environment.rs | 166 ++++++++---------- crates/uv/src/commands/project/mod.rs | 3 - crates/uv/src/commands/project/run.rs | 116 ++++++++---- crates/uv/src/commands/tool/run.rs | 4 - crates/uv/tests/it/lock.rs | 2 +- crates/uv/tests/it/run.rs | 13 +- 7 files changed, 159 insertions(+), 147 deletions(-) diff --git a/crates/uv-python/src/environment.rs b/crates/uv-python/src/environment.rs index 02f9fd683..07f3ddb54 100644 --- a/crates/uv-python/src/environment.rs +++ b/crates/uv-python/src/environment.rs @@ -174,7 +174,7 @@ impl PythonEnvironment { /// N.B. This function also works for system Python environments and users depend on this. pub fn from_root(root: impl AsRef, cache: &Cache) -> Result { debug!( - "Checking for Python environment at `{}`", + "Checking for Python environment at: `{}`", root.as_ref().user_display() ); match root.as_ref().try_exists() { diff --git a/crates/uv/src/commands/project/environment.rs b/crates/uv/src/commands/project/environment.rs index a3cda28c1..cf1add99a 100644 --- a/crates/uv/src/commands/project/environment.rs +++ b/crates/uv/src/commands/project/environment.rs @@ -17,6 +17,69 @@ use crate::commands::project::{ use crate::printer::Printer; use crate::settings::{NetworkSettings, ResolverInstallerSettings}; +/// An ephemeral [`PythonEnvironment`] for running an individual command. +#[derive(Debug)] +pub(crate) struct EphemeralEnvironment(PythonEnvironment); + +impl From for EphemeralEnvironment { + fn from(environment: PythonEnvironment) -> Self { + Self(environment) + } +} + +impl From for PythonEnvironment { + fn from(environment: EphemeralEnvironment) -> Self { + environment.0 + } +} + +impl EphemeralEnvironment { + /// Set the ephemeral overlay for a Python environment. + #[allow(clippy::result_large_err)] + pub(crate) fn set_overlay(&self, contents: impl AsRef<[u8]>) -> Result<(), ProjectError> { + let site_packages = self + .0 + .site_packages() + .next() + .ok_or(ProjectError::NoSitePackages)?; + let overlay_path = site_packages.join("_uv_ephemeral_overlay.pth"); + fs_err::write(overlay_path, contents)?; + Ok(()) + } + + /// Enable system site packages for a Python environment. + #[allow(clippy::result_large_err)] + pub(crate) fn set_system_site_packages(&self) -> Result<(), ProjectError> { + self.0 + .set_pyvenv_cfg("include-system-site-packages", "true")?; + Ok(()) + } + + /// Set the `extends-environment` key in the `pyvenv.cfg` file to the given path. + /// + /// Ephemeral environments created by `uv run --with` extend a parent (virtual or system) + /// environment by adding a `.pth` file to the ephemeral environment's `site-packages` + /// directory. The `pth` file contains Python code to dynamically add the parent + /// environment's `site-packages` directory to Python's import search paths in addition to + /// the ephemeral environment's `site-packages` directory. This works well at runtime, but + /// is too dynamic for static analysis tools like ty to understand. As such, we + /// additionally write the `sys.prefix` of the parent environment to to the + /// `extends-environment` key of the ephemeral environment's `pyvenv.cfg` file, making it + /// easier for these tools to statically and reliably understand the relationship between + /// the two environments. + #[allow(clippy::result_large_err)] + pub(crate) fn set_parent_environment( + &self, + parent_environment_sys_prefix: &Path, + ) -> Result<(), ProjectError> { + self.0.set_pyvenv_cfg( + "extends-environment", + &parent_environment_sys_prefix.escape_for_python(), + )?; + Ok(()) + } +} + /// A [`PythonEnvironment`] stored in the cache. #[derive(Debug)] pub(crate) struct CachedEnvironment(PythonEnvironment); @@ -44,15 +107,13 @@ impl CachedEnvironment { printer: Printer, preview: PreviewMode, ) -> Result { - // Resolve the "base" interpreter, which resolves to an underlying parent interpreter if the - // given interpreter is a virtual environment. - let base_interpreter = Self::base_interpreter(interpreter, cache)?; + let interpreter = Self::base_interpreter(interpreter, cache)?; // Resolve the requirements with the interpreter. let resolution = Resolution::from( resolve_environment( spec, - &base_interpreter, + &interpreter, build_constraints.clone(), &settings.resolver, network_settings, @@ -80,29 +141,20 @@ impl CachedEnvironment { // Use the canonicalized base interpreter path since that's the interpreter we performed the // resolution with and the interpreter the environment will be created with. // - // We also include the canonicalized `sys.prefix` of the non-base interpreter, that is, the - // virtual environment's path. Originally, we shared cached environments independent of the - // environment they'd be layered on top of. However, this causes collisions as the overlay - // `.pth` file can be overridden by another instance of uv. Including this element in the key - // avoids this problem at the cost of creating separate cached environments for identical - // `--with` invocations across projects. We use `sys.prefix` rather than `sys.executable` so - // we can canonicalize it without invalidating the purpose of the element — it'd probably be - // safe to just use the absolute `sys.executable` as well. - // - // TODO(zanieb): Since we're not sharing these environmments across projects, we should move - // [`CachedEvnvironment::set_overlay`] etc. here since the values there should be constant - // now. + // We cache environments independent of the environment they'd be layered on top of. The + // assumption is such that the environment will _not_ be modified by the user or uv; + // otherwise, we risk cache poisoning. For example, if we were to write a `.pth` file to + // the cached environment, it would be shared across all projects that use the same + // interpreter and the same cached dependencies. // // TODO(zanieb): We should include the version of the base interpreter in the hash, so if // the interpreter at the canonicalized path changes versions we construct a new // environment. - let environment_hash = cache_digest(&( - &canonicalize_executable(base_interpreter.sys_executable())?, - &interpreter.sys_prefix().canonicalize()?, - )); + let interpreter_hash = + cache_digest(&canonicalize_executable(interpreter.sys_executable())?); // Search in the content-addressed cache. - let cache_entry = cache.entry(CacheBucket::Environments, environment_hash, resolution_hash); + let cache_entry = cache.entry(CacheBucket::Environments, interpreter_hash, resolution_hash); if cache.refresh().is_none() { if let Ok(root) = cache.resolve_link(cache_entry.path()) { @@ -116,7 +168,7 @@ impl CachedEnvironment { let temp_dir = cache.venv_dir()?; let venv = uv_virtualenv::create_venv( temp_dir.path(), - base_interpreter, + interpreter, uv_virtualenv::Prompt::None, false, false, @@ -150,76 +202,6 @@ impl CachedEnvironment { Ok(Self(PythonEnvironment::from_root(root, cache)?)) } - /// Set the ephemeral overlay for a Python environment. - #[allow(clippy::result_large_err)] - pub(crate) fn set_overlay(&self, contents: impl AsRef<[u8]>) -> Result<(), ProjectError> { - let site_packages = self - .0 - .site_packages() - .next() - .ok_or(ProjectError::NoSitePackages)?; - let overlay_path = site_packages.join("_uv_ephemeral_overlay.pth"); - fs_err::write(overlay_path, contents)?; - Ok(()) - } - - /// Clear the ephemeral overlay for a Python environment, if it exists. - #[allow(clippy::result_large_err)] - pub(crate) fn clear_overlay(&self) -> Result<(), ProjectError> { - let site_packages = self - .0 - .site_packages() - .next() - .ok_or(ProjectError::NoSitePackages)?; - let overlay_path = site_packages.join("_uv_ephemeral_overlay.pth"); - match fs_err::remove_file(overlay_path) { - Ok(()) => (), - Err(err) if err.kind() == std::io::ErrorKind::NotFound => (), - Err(err) => return Err(ProjectError::OverlayRemoval(err)), - } - Ok(()) - } - - /// Enable system site packages for a Python environment. - #[allow(clippy::result_large_err)] - pub(crate) fn set_system_site_packages(&self) -> Result<(), ProjectError> { - self.0 - .set_pyvenv_cfg("include-system-site-packages", "true")?; - Ok(()) - } - - /// Disable system site packages for a Python environment. - #[allow(clippy::result_large_err)] - pub(crate) fn clear_system_site_packages(&self) -> Result<(), ProjectError> { - self.0 - .set_pyvenv_cfg("include-system-site-packages", "false")?; - Ok(()) - } - - /// Set the `extends-environment` key in the `pyvenv.cfg` file to the given path. - /// - /// Ephemeral environments created by `uv run --with` extend a parent (virtual or system) - /// environment by adding a `.pth` file to the ephemeral environment's `site-packages` - /// directory. The `pth` file contains Python code to dynamically add the parent - /// environment's `site-packages` directory to Python's import search paths in addition to - /// the ephemeral environment's `site-packages` directory. This works well at runtime, but - /// is too dynamic for static analysis tools like ty to understand. As such, we - /// additionally write the `sys.prefix` of the parent environment to the - /// `extends-environment` key of the ephemeral environment's `pyvenv.cfg` file, making it - /// easier for these tools to statically and reliably understand the relationship between - /// the two environments. - #[allow(clippy::result_large_err)] - pub(crate) fn set_parent_environment( - &self, - parent_environment_sys_prefix: &Path, - ) -> Result<(), ProjectError> { - self.0.set_pyvenv_cfg( - "extends-environment", - &parent_environment_sys_prefix.escape_for_python(), - )?; - Ok(()) - } - /// Return the [`Interpreter`] to use for the cached environment, based on a given /// [`Interpreter`]. /// diff --git a/crates/uv/src/commands/project/mod.rs b/crates/uv/src/commands/project/mod.rs index 774009f63..eaccaefa6 100644 --- a/crates/uv/src/commands/project/mod.rs +++ b/crates/uv/src/commands/project/mod.rs @@ -200,9 +200,6 @@ pub(crate) enum ProjectError { #[error("Failed to parse PEP 723 script metadata")] Pep723ScriptTomlParse(#[source] toml::de::Error), - #[error("Failed to remove ephemeral overlay")] - OverlayRemoval(#[source] std::io::Error), - #[error("Failed to find `site-packages` directory for environment")] NoSitePackages, diff --git a/crates/uv/src/commands/project/run.rs b/crates/uv/src/commands/project/run.rs index 63850f563..16ebf88fb 100644 --- a/crates/uv/src/commands/project/run.rs +++ b/crates/uv/src/commands/project/run.rs @@ -45,7 +45,7 @@ use crate::commands::pip::loggers::{ DefaultInstallLogger, DefaultResolveLogger, SummaryInstallLogger, SummaryResolveLogger, }; use crate::commands::pip::operations::Modifications; -use crate::commands::project::environment::CachedEnvironment; +use crate::commands::project::environment::{CachedEnvironment, EphemeralEnvironment}; use crate::commands::project::install_target::InstallTarget; use crate::commands::project::lock::LockMode; use crate::commands::project::lock_target::LockTarget; @@ -944,7 +944,7 @@ hint: If you are running a script with `{}` in the shebang, you may need to incl // If necessary, create an environment for the ephemeral requirements or command. let base_site_packages = SitePackages::from_interpreter(&base_interpreter)?; - let ephemeral_env = match spec { + let requirements_env = match spec { None => None, Some(spec) if can_skip_ephemeral(&spec, &base_interpreter, &base_site_packages, &settings) => @@ -952,7 +952,7 @@ hint: If you are running a script with `{}` in the shebang, you may need to incl None } Some(spec) => { - debug!("Syncing ephemeral requirements"); + debug!("Syncing `--with` requirements to cached environment"); // Read the build constraints from the lock file. let build_constraints = base_lock @@ -1013,54 +1013,92 @@ hint: If you are running a script with `{}` in the shebang, you may need to incl Err(err) => return Err(err.into()), }; - Some(environment) + Some(PythonEnvironment::from(environment)) } }; - // If we're running in an ephemeral environment, add a path file to enable loading of - // the base environment's site packages. Setting `PYTHONPATH` is insufficient, as it doesn't - // resolve `.pth` files in the base environment. + // If we're layering requirements atop the project environment, run the command in an ephemeral, + // isolated environment. Otherwise, modifications to the "active virtual environment" would + // poison the cache. + let ephemeral_dir = requirements_env + .as_ref() + .map(|_| cache.venv_dir()) + .transpose()?; + + let ephemeral_env = ephemeral_dir + .as_ref() + .map(|dir| { + debug!( + "Creating ephemeral environment at: `{}`", + dir.path().simplified_display() + ); + + uv_virtualenv::create_venv( + dir.path(), + base_interpreter.clone(), + uv_virtualenv::Prompt::None, + false, + false, + false, + false, + false, + preview, + ) + }) + .transpose()? + .map(EphemeralEnvironment::from); + + // If we're running in an ephemeral environment, add a path file to enable loading from the + // `--with` requirements environment and the project environment site packages. // - // `sitecustomize.py` would be an alternative, but it can be shadowed by an existing such - // module in the python installation. + // Setting `PYTHONPATH` is insufficient, as it doesn't resolve `.pth` files in the base + // environment. Adding `sitecustomize.py` would be an alternative, but it can be shadowed by an + // existing such module in the python installation. if let Some(ephemeral_env) = ephemeral_env.as_ref() { - let site_packages = base_interpreter - .site_packages() - .next() - .ok_or_else(|| ProjectError::NoSitePackages)?; - ephemeral_env.set_overlay(format!( - "import site; site.addsitedir(\"{}\")", - site_packages.escape_for_python() - ))?; + if let Some(requirements_env) = requirements_env.as_ref() { + let requirements_site_packages = + requirements_env.site_packages().next().ok_or_else(|| { + anyhow!("Requirements environment has no site packages directory") + })?; + let base_site_packages = base_interpreter + .site_packages() + .next() + .ok_or_else(|| anyhow!("Base environment has no site packages directory"))?; - // Write the `sys.prefix` of the parent environment to the `extends-environment` key of the `pyvenv.cfg` - // file. This helps out static-analysis tools such as ty (see docs on - // `CachedEnvironment::set_parent_environment`). - // - // Note that we do this even if the parent environment is not a virtual environment. - // For ephemeral environments created by `uv run --with`, the parent environment's - // `site-packages` directory is added to `sys.path` even if the parent environment is not - // a virtual environment and even if `--system-site-packages` was not explicitly selected. - ephemeral_env.set_parent_environment(base_interpreter.sys_prefix())?; + ephemeral_env.set_overlay(format!( + "import site; site.addsitedir(\"{}\"); site.addsitedir(\"{}\");", + base_site_packages.escape_for_python(), + requirements_site_packages.escape_for_python(), + ))?; - // If `--system-site-packages` is enabled, add the system site packages to the ephemeral - // environment. - if base_interpreter.is_virtualenv() - && PyVenvConfiguration::parse(base_interpreter.sys_prefix().join("pyvenv.cfg")) - .is_ok_and(|cfg| cfg.include_system_site_packages()) - { - ephemeral_env.set_system_site_packages()?; - } else { - ephemeral_env.clear_system_site_packages()?; + // Write the `sys.prefix` of the parent environment to the `extends-environment` key of the `pyvenv.cfg` + // file. This helps out static-analysis tools such as ty (see docs on + // `CachedEnvironment::set_parent_environment`). + // + // Note that we do this even if the parent environment is not a virtual environment. + // For ephemeral environments created by `uv run --with`, the parent environment's + // `site-packages` directory is added to `sys.path` even if the parent environment is not + // a virtual environment and even if `--system-site-packages` was not explicitly selected. + ephemeral_env.set_parent_environment(base_interpreter.sys_prefix())?; + + // If `--system-site-packages` is enabled, add the system site packages to the ephemeral + // environment. + if base_interpreter.is_virtualenv() + && PyVenvConfiguration::parse(base_interpreter.sys_prefix().join("pyvenv.cfg")) + .is_ok_and(|cfg| cfg.include_system_site_packages()) + { + ephemeral_env.set_system_site_packages()?; + } } } - // Cast from `CachedEnvironment` to `PythonEnvironment`. + // Cast to `PythonEnvironment`. let ephemeral_env = ephemeral_env.map(PythonEnvironment::from); // Determine the Python interpreter to use for the command, if necessary. let interpreter = ephemeral_env .as_ref() + .or(requirements_env.as_ref()) .map_or_else(|| &base_interpreter, |env| env.interpreter()); // Check if any run command is given. @@ -1143,6 +1181,12 @@ hint: If you are running a script with `{}` in the shebang, you may need to incl .as_ref() .map(PythonEnvironment::scripts) .into_iter() + .chain( + requirements_env + .as_ref() + .map(PythonEnvironment::scripts) + .into_iter(), + ) .chain(std::iter::once(base_interpreter.scripts())) .chain( // On Windows, non-virtual Python distributions put `python.exe` in the top-level diff --git a/crates/uv/src/commands/tool/run.rs b/crates/uv/src/commands/tool/run.rs index f6b79774c..a1faa1153 100644 --- a/crates/uv/src/commands/tool/run.rs +++ b/crates/uv/src/commands/tool/run.rs @@ -1081,9 +1081,5 @@ async fn get_or_create_environment( }, }; - // Clear any existing overlay. - environment.clear_overlay()?; - environment.clear_system_site_packages()?; - Ok((from, environment.into())) } diff --git a/crates/uv/tests/it/lock.rs b/crates/uv/tests/it/lock.rs index 477b4b039..d7ac9b47a 100644 --- a/crates/uv/tests/it/lock.rs +++ b/crates/uv/tests/it/lock.rs @@ -15806,7 +15806,7 @@ fn lock_explicit_default_index() -> Result<()> { DEBUG Adding root workspace member: `[TEMP_DIR]/` DEBUG No Python version file found in workspace: [TEMP_DIR]/ DEBUG Using Python request `>=3.12` from `requires-python` metadata - DEBUG Checking for Python environment at `.venv` + DEBUG Checking for Python environment at: `.venv` DEBUG The project environment's Python version satisfies the request: `Python >=3.12` DEBUG Using request timeout of [TIME] DEBUG Found static `pyproject.toml` for: project @ file://[TEMP_DIR]/ diff --git a/crates/uv/tests/it/run.rs b/crates/uv/tests/it/run.rs index 93420cca0..6a1eb6093 100644 --- a/crates/uv/tests/it/run.rs +++ b/crates/uv/tests/it/run.rs @@ -1302,7 +1302,6 @@ fn run_with_pyvenv_cfg_file() -> Result<()> { uv = [UV_VERSION] version_info = 3.12.[X] include-system-site-packages = false - relocatable = true extends-environment = [PARENT_VENV] @@ -4778,7 +4777,6 @@ fn run_groups_include_requires_python() -> Result<()> { baz = ["iniconfig"] dev = ["sniffio", {include-group = "foo"}, {include-group = "baz"}] - [tool.uv.dependency-groups] foo = {requires-python="<3.13"} bar = {requires-python=">=3.13"} @@ -4923,8 +4921,8 @@ fn run_repeated() -> Result<()> { Resolved 1 package in [TIME] "###); - // Re-running as a tool does require reinstalling `typing-extensions`, since the base venv is - // different. + // Re-running as a tool doesn't require reinstalling `typing-extensions`, since the environment + // is cached. uv_snapshot!( context.filters(), context.tool_run().arg("--with").arg("typing-extensions").arg("python").arg("-c").arg("import typing_extensions; import iniconfig"), @r#" @@ -4934,8 +4932,6 @@ fn run_repeated() -> Result<()> { ----- stderr ----- Resolved 1 package in [TIME] - Installed 1 package in [TIME] - + typing-extensions==4.10.0 Traceback (most recent call last): File "", line 1, in import typing_extensions; import iniconfig @@ -4982,8 +4978,7 @@ fn run_without_overlay() -> Result<()> { + typing-extensions==4.10.0 "###); - // Import `iniconfig` in the context of a `tool run` command, which should fail. Note that - // typing-extensions gets installed again, because the venv is not shared. + // Import `iniconfig` in the context of a `tool run` command, which should fail. uv_snapshot!( context.filters(), context.tool_run().arg("--with").arg("typing-extensions").arg("python").arg("-c").arg("import typing_extensions; import iniconfig"), @r#" @@ -4993,8 +4988,6 @@ fn run_without_overlay() -> Result<()> { ----- stderr ----- Resolved 1 package in [TIME] - Installed 1 package in [TIME] - + typing-extensions==4.10.0 Traceback (most recent call last): File "", line 1, in import typing_extensions; import iniconfig From 2850dc05992c47a10a1f968d65c59c1e21cf1df2 Mon Sep 17 00:00:00 2001 From: Aria Desires Date: Mon, 14 Jul 2025 13:47:52 -0400 Subject: [PATCH 067/130] make `--check` outdated a non-error status 1 (#14167) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit In the case of `uv sync` all we really need to do is handle the `OutdatedEnvironment` error (precisely the error we yield only on dry-runs when everything Works but we determine things are outdated) in `OperationDiagnostic::report` (the post-processor on all `operations::install` calls) because any diagnostic handled by that gets downgraded to from status 2 to status 1 (although I don't know if that's really intentional or a random other bug in our status handling... but I figured it's best to highlight that other potential status code incongruence than not rely on it 😄). Fixes #12603 --------- Co-authored-by: John Mumm --- crates/uv/src/commands/diagnostics.rs | 4 + crates/uv/src/commands/project/lock.rs | 11 +- crates/uv/src/commands/project/mod.rs | 2 +- crates/uv/src/commands/project/sync.rs | 37 +++- crates/uv/tests/it/lock.rs | 233 +++++++++++++++++-------- crates/uv/tests/it/sync.rs | 28 +-- 6 files changed, 217 insertions(+), 98 deletions(-) diff --git a/crates/uv/src/commands/diagnostics.rs b/crates/uv/src/commands/diagnostics.rs index 7a9fcbd35..02412d683 100644 --- a/crates/uv/src/commands/diagnostics.rs +++ b/crates/uv/src/commands/diagnostics.rs @@ -127,6 +127,10 @@ impl OperationDiagnostic { native_tls_hint(err); None } + pip::operations::Error::OutdatedEnvironment => { + anstream::eprint!("{}", err); + None + } err => Some(err), } } diff --git a/crates/uv/src/commands/project/lock.rs b/crates/uv/src/commands/project/lock.rs index f79557d9e..833e59a13 100644 --- a/crates/uv/src/commands/project/lock.rs +++ b/crates/uv/src/commands/project/lock.rs @@ -234,6 +234,10 @@ pub(crate) async fn lock( Ok(ExitStatus::Success) } + Err(err @ ProjectError::LockMismatch(..)) => { + writeln!(printer.stderr(), "{}", err.to_string().bold())?; + Ok(ExitStatus::Failure) + } Err(ProjectError::Operation(err)) => { diagnostics::OperationDiagnostic::native_tls(network_settings.native_tls) .report(err) @@ -346,8 +350,11 @@ impl<'env> LockOperation<'env> { .await?; // If the lockfile changed, return an error. - if matches!(result, LockResult::Changed(_, _)) { - return Err(ProjectError::LockMismatch(Box::new(result.into_lock()))); + if let LockResult::Changed(prev, cur) = result { + return Err(ProjectError::LockMismatch( + prev.map(Box::new), + Box::new(cur), + )); } Ok(result) diff --git a/crates/uv/src/commands/project/mod.rs b/crates/uv/src/commands/project/mod.rs index eaccaefa6..fde2b638c 100644 --- a/crates/uv/src/commands/project/mod.rs +++ b/crates/uv/src/commands/project/mod.rs @@ -75,7 +75,7 @@ pub(crate) enum ProjectError { #[error( "The lockfile at `uv.lock` needs to be updated, but `--locked` was provided. To update the lockfile, run `uv lock`." )] - LockMismatch(Box), + LockMismatch(Option>, Box), #[error( "Unable to find lockfile at `uv.lock`. To create a lockfile, run `uv lock` or `uv sync`." diff --git a/crates/uv/src/commands/project/sync.rs b/crates/uv/src/commands/project/sync.rs index 5843df6be..40aa1b352 100644 --- a/crates/uv/src/commands/project/sync.rs +++ b/crates/uv/src/commands/project/sync.rs @@ -330,10 +330,19 @@ pub(crate) async fn sync( .report(err) .map_or(Ok(ExitStatus::Failure), |err| Err(err.into())); } - Err(ProjectError::LockMismatch(lock)) if dry_run.enabled() => { - // The lockfile is mismatched, but we're in dry-run mode. We should proceed with the - // sync operation, but exit with a non-zero status. - Outcome::LockMismatch(lock) + Err(ProjectError::LockMismatch(prev, cur)) => { + if dry_run.enabled() { + // The lockfile is mismatched, but we're in dry-run mode. We should proceed with the + // sync operation, but exit with a non-zero status. + Outcome::LockMismatch(prev, cur) + } else { + writeln!( + printer.stderr(), + "{}", + ProjectError::LockMismatch(prev, cur).to_string().bold() + )?; + return Ok(ExitStatus::Failure); + } } Err(err) => return Err(err.into()), }; @@ -398,7 +407,14 @@ pub(crate) async fn sync( match outcome { Outcome::Success(..) => Ok(ExitStatus::Success), - Outcome::LockMismatch(lock) => Err(ProjectError::LockMismatch(lock).into()), + Outcome::LockMismatch(prev, cur) => { + writeln!( + printer.stderr(), + "{}", + ProjectError::LockMismatch(prev, cur).to_string().bold() + )?; + Ok(ExitStatus::Failure) + } } } @@ -409,15 +425,18 @@ enum Outcome { /// The `lock` operation was successful. Success(LockResult), /// The `lock` operation successfully resolved, but failed due to a mismatch (e.g., with `--locked`). - LockMismatch(Box), + LockMismatch(Option>, Box), } impl Outcome { /// Return the [`Lock`] associated with this outcome. fn lock(&self) -> &Lock { match self { - Self::Success(lock) => lock.lock(), - Self::LockMismatch(lock) => lock, + Self::Success(lock) => match lock { + LockResult::Changed(_, lock) => lock, + LockResult::Unchanged(lock) => lock, + }, + Self::LockMismatch(_prev, cur) => cur, } } } @@ -1179,7 +1198,7 @@ impl From<(&LockTarget<'_>, &LockMode<'_>, &Outcome)> for LockReport { } } // TODO(zanieb): We don't have a way to report the outcome of the lock yet - Outcome::LockMismatch(_) => LockAction::Check, + Outcome::LockMismatch(..) => LockAction::Check, }, dry_run: matches!(mode, LockMode::DryRun(_)), } diff --git a/crates/uv/tests/it/lock.rs b/crates/uv/tests/it/lock.rs index d7ac9b47a..faf37a83a 100644 --- a/crates/uv/tests/it/lock.rs +++ b/crates/uv/tests/it/lock.rs @@ -6660,15 +6660,15 @@ fn lock_invalid_hash() -> Result<()> { "#)?; // Re-run with `--locked`. - uv_snapshot!(context.filters(), context.lock().arg("--locked"), @r###" + uv_snapshot!(context.filters(), context.lock().arg("--locked"), @r" success: false - exit_code: 2 + exit_code: 1 ----- stdout ----- ----- stderr ----- Resolved 4 packages in [TIME] - error: The lockfile at `uv.lock` needs to be updated, but `--locked` was provided. To update the lockfile, run `uv lock`. - "###); + The lockfile at `uv.lock` needs to be updated, but `--locked` was provided. To update the lockfile, run `uv lock`. + "); // Install from the lockfile. uv_snapshot!(context.filters(), context.sync().arg("--frozen"), @r###" @@ -11743,6 +11743,95 @@ fn unconditional_overlapping_marker_disjoint_version_constraints() -> Result<()> Ok(()) } +/// Checks the output of `uv lock --check` when there isn't a lock +#[test] +fn check_no_lock() -> Result<()> { + let context = TestContext::new("3.12"); + + let pyproject_toml = context.temp_dir.child("pyproject.toml"); + pyproject_toml.write_str( + r#" + [project] + name = "myproject" + version = "0.1.0" + requires-python = ">=3.11" + dependencies = ["sortedcollections"] + "#, + )?; + + uv_snapshot!(context.filters(), context.lock() + .arg("--check"), @r" + success: false + exit_code: 2 + ----- stdout ----- + + ----- stderr ----- + error: Unable to find lockfile at `uv.lock`. To create a lockfile, run `uv lock` or `uv sync`. + "); + Ok(()) +} + +/// Checks the output of `uv lock --check` when the lock is outdated +#[test] +fn check_outdated_lock() -> Result<()> { + let context = TestContext::new("3.12"); + + let pyproject_toml = context.temp_dir.child("pyproject.toml"); + pyproject_toml.write_str( + r#" + [project] + name = "myproject" + version = "0.1.0" + requires-python = ">=3.11" + dependencies = ["sortedcollections"] + "#, + )?; + + // Generate the lock + uv_snapshot!(context.filters(), context.lock(), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Resolved 3 packages in [TIME] + "); + + // Check the --check returns fine + uv_snapshot!(context.filters(), context.lock() + .arg("--check"), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Resolved 3 packages in [TIME] + "); + + // Edit dependencies so the lock is invalid + pyproject_toml.write_str( + r#" + [project] + name = "project" + version = "0.1.0" + requires-python = ">=3.11" + dependencies = ["iniconfig"] + "#, + )?; + + uv_snapshot!(context.filters(), context.lock() + .arg("--check"), @r" + success: false + exit_code: 1 + ----- stdout ----- + + ----- stderr ----- + Resolved 2 packages in [TIME] + The lockfile at `uv.lock` needs to be updated, but `--locked` was provided. To update the lockfile, run `uv lock`. + "); + Ok(()) +} + /// This checks that markers that normalize to 'false', which are serialized /// to the lockfile as `python_full_version < '0'`, get read back as false. /// Otherwise `uv lock --check` will always fail. @@ -12094,15 +12183,15 @@ fn lock_remove_member() -> Result<()> { )?; // Re-run with `--locked`. This should fail. - uv_snapshot!(context.filters(), context.lock().arg("--locked"), @r###" + uv_snapshot!(context.filters(), context.lock().arg("--locked"), @r" success: false - exit_code: 2 + exit_code: 1 ----- stdout ----- ----- stderr ----- Resolved 1 package in [TIME] - error: The lockfile at `uv.lock` needs to be updated, but `--locked` was provided. To update the lockfile, run `uv lock`. - "###); + The lockfile at `uv.lock` needs to be updated, but `--locked` was provided. To update the lockfile, run `uv lock`. + "); // Re-run without `--locked`. uv_snapshot!(context.filters(), context.lock(), @r###" @@ -12239,15 +12328,15 @@ fn lock_add_member() -> Result<()> { )?; // Re-run with `--locked`. This should fail. - uv_snapshot!(context.filters(), context.lock().arg("--locked"), @r###" + uv_snapshot!(context.filters(), context.lock().arg("--locked"), @r" success: false - exit_code: 2 + exit_code: 1 ----- stdout ----- ----- stderr ----- Resolved 5 packages in [TIME] - error: The lockfile at `uv.lock` needs to be updated, but `--locked` was provided. To update the lockfile, run `uv lock`. - "###); + The lockfile at `uv.lock` needs to be updated, but `--locked` was provided. To update the lockfile, run `uv lock`. + "); // Re-run with `--offline`. This should also fail, during the resolve phase. uv_snapshot!(context.filters(), context.lock().arg("--locked").arg("--offline").arg("--no-cache"), @r###" @@ -12476,15 +12565,15 @@ fn lock_redundant_add_member() -> Result<()> { // Re-run with `--locked`. This will fail, though in theory it could succeed, since the current // _resolution_ satisfies the requirements, even if the inputs are not identical - uv_snapshot!(context.filters(), context.lock().arg("--locked"), @r###" + uv_snapshot!(context.filters(), context.lock().arg("--locked"), @r" success: false - exit_code: 2 + exit_code: 1 ----- stdout ----- ----- stderr ----- Resolved 4 packages in [TIME] - error: The lockfile at `uv.lock` needs to be updated, but `--locked` was provided. To update the lockfile, run `uv lock`. - "###); + The lockfile at `uv.lock` needs to be updated, but `--locked` was provided. To update the lockfile, run `uv lock`. + "); // Re-run without `--locked`. uv_snapshot!(context.filters(), context.lock(), @r###" @@ -12674,15 +12763,15 @@ fn lock_new_constraints() -> Result<()> { )?; // Re-run with `--locked`. This should fail. - uv_snapshot!(context.filters(), context.lock().arg("--locked"), @r###" + uv_snapshot!(context.filters(), context.lock().arg("--locked"), @r" success: false - exit_code: 2 + exit_code: 1 ----- stdout ----- ----- stderr ----- Resolved 4 packages in [TIME] - error: The lockfile at `uv.lock` needs to be updated, but `--locked` was provided. To update the lockfile, run `uv lock`. - "###); + The lockfile at `uv.lock` needs to be updated, but `--locked` was provided. To update the lockfile, run `uv lock`. + "); // Re-run without `--locked`. uv_snapshot!(context.filters(), context.lock(), @r###" @@ -12883,16 +12972,16 @@ fn lock_remove_member_non_project() -> Result<()> { )?; // Re-run with `--locked`. This should fail. - uv_snapshot!(context.filters(), context.lock().arg("--locked"), @r###" + uv_snapshot!(context.filters(), context.lock().arg("--locked"), @r" success: false - exit_code: 2 + exit_code: 1 ----- stdout ----- ----- stderr ----- warning: No `requires-python` value found in the workspace. Defaulting to `>=3.12`. Resolved in [TIME] - error: The lockfile at `uv.lock` needs to be updated, but `--locked` was provided. To update the lockfile, run `uv lock`. - "###); + The lockfile at `uv.lock` needs to be updated, but `--locked` was provided. To update the lockfile, run `uv lock`. + "); // Re-run without `--locked`. uv_snapshot!(context.filters(), context.lock(), @r###" @@ -13015,15 +13104,15 @@ fn lock_rename_project() -> Result<()> { )?; // Re-run with `--locked`. This should fail. - uv_snapshot!(context.filters(), context.lock().arg("--locked"), @r###" + uv_snapshot!(context.filters(), context.lock().arg("--locked"), @r" success: false - exit_code: 2 + exit_code: 1 ----- stdout ----- ----- stderr ----- Resolved 2 packages in [TIME] - error: The lockfile at `uv.lock` needs to be updated, but `--locked` was provided. To update the lockfile, run `uv lock`. - "###); + The lockfile at `uv.lock` needs to be updated, but `--locked` was provided. To update the lockfile, run `uv lock`. + "); // Re-run without `--locked`. uv_snapshot!(context.filters(), context.lock(), @r###" @@ -14015,15 +14104,15 @@ fn lock_constrained_environment() -> Result<()> { )?; // Re-run with `--locked`. This should fail. - uv_snapshot!(context.filters(), context.lock().arg("--locked"), @r###" + uv_snapshot!(context.filters(), context.lock().arg("--locked"), @r" success: false - exit_code: 2 + exit_code: 1 ----- stdout ----- ----- stderr ----- Resolved 8 packages in [TIME] - error: The lockfile at `uv.lock` needs to be updated, but `--locked` was provided. To update the lockfile, run `uv lock`. - "###); + The lockfile at `uv.lock` needs to be updated, but `--locked` was provided. To update the lockfile, run `uv lock`. + "); uv_snapshot!(context.filters(), context.lock(), @r###" success: true @@ -15278,15 +15367,15 @@ fn lock_add_empty_dependency_group() -> Result<()> { )?; // Re-run with `--locked`; this should fail. - uv_snapshot!(context.filters(), context.lock().arg("--locked"), @r###" + uv_snapshot!(context.filters(), context.lock().arg("--locked"), @r" success: false - exit_code: 2 + exit_code: 1 ----- stdout ----- ----- stderr ----- Resolved 2 packages in [TIME] - error: The lockfile at `uv.lock` needs to be updated, but `--locked` was provided. To update the lockfile, run `uv lock`. - "###); + The lockfile at `uv.lock` needs to be updated, but `--locked` was provided. To update the lockfile, run `uv lock`. + "); // Re-lock the project. uv_snapshot!(context.filters(), context.lock(), @r###" @@ -15360,15 +15449,15 @@ fn lock_add_empty_dependency_group() -> Result<()> { )?; // Re-run with `--locked`; this should fail. - uv_snapshot!(context.filters(), context.lock().arg("--locked"), @r###" + uv_snapshot!(context.filters(), context.lock().arg("--locked"), @r" success: false - exit_code: 2 + exit_code: 1 ----- stdout ----- ----- stderr ----- Resolved 2 packages in [TIME] - error: The lockfile at `uv.lock` needs to be updated, but `--locked` was provided. To update the lockfile, run `uv lock`. - "###); + The lockfile at `uv.lock` needs to be updated, but `--locked` was provided. To update the lockfile, run `uv lock`. + "); // Re-lock the project. uv_snapshot!(context.filters(), context.lock(), @r###" @@ -23253,15 +23342,15 @@ fn lock_dynamic_to_static() -> Result<()> { )?; // Rerunning with `--locked` should fail, since the project is no longer dynamic. - uv_snapshot!(context.filters(), context.lock().arg("--locked"), @r###" + uv_snapshot!(context.filters(), context.lock().arg("--locked"), @r" success: false - exit_code: 2 + exit_code: 1 ----- stdout ----- ----- stderr ----- Resolved 1 package in [TIME] - error: The lockfile at `uv.lock` needs to be updated, but `--locked` was provided. To update the lockfile, run `uv lock`. - "###); + The lockfile at `uv.lock` needs to be updated, but `--locked` was provided. To update the lockfile, run `uv lock`. + "); uv_snapshot!(context.filters(), context.lock(), @r###" success: true @@ -23384,15 +23473,15 @@ fn lock_static_to_dynamic() -> Result<()> { .write_str("__version__ = '0.1.0'")?; // Rerunning with `--locked` should fail, since the project is no longer static. - uv_snapshot!(context.filters(), context.lock().arg("--locked"), @r###" + uv_snapshot!(context.filters(), context.lock().arg("--locked"), @r" success: false - exit_code: 2 + exit_code: 1 ----- stdout ----- ----- stderr ----- Resolved 1 package in [TIME] - error: The lockfile at `uv.lock` needs to be updated, but `--locked` was provided. To update the lockfile, run `uv lock`. - "###); + The lockfile at `uv.lock` needs to be updated, but `--locked` was provided. To update the lockfile, run `uv lock`. + "); uv_snapshot!(context.filters(), context.lock(), @r###" success: true @@ -23486,15 +23575,15 @@ fn lock_bump_static_version() -> Result<()> { )?; // Rerunning with `--locked` should fail. - uv_snapshot!(context.filters(), context.lock().arg("--locked"), @r###" + uv_snapshot!(context.filters(), context.lock().arg("--locked"), @r" success: false - exit_code: 2 + exit_code: 1 ----- stdout ----- ----- stderr ----- Resolved 1 package in [TIME] - error: The lockfile at `uv.lock` needs to be updated, but `--locked` was provided. To update the lockfile, run `uv lock`. - "###); + The lockfile at `uv.lock` needs to be updated, but `--locked` was provided. To update the lockfile, run `uv lock`. + "); uv_snapshot!(context.filters(), context.lock(), @r###" success: true @@ -25302,15 +25391,15 @@ fn lock_script() -> Result<()> { })?; // Re-run with `--locked`. - uv_snapshot!(context.filters(), context.lock().arg("--script").arg("script.py").arg("--locked"), @r###" + uv_snapshot!(context.filters(), context.lock().arg("--script").arg("script.py").arg("--locked"), @r" success: false - exit_code: 2 + exit_code: 1 ----- stdout ----- ----- stderr ----- Resolved 4 packages in [TIME] - error: The lockfile at `uv.lock` needs to be updated, but `--locked` was provided. To update the lockfile, run `uv lock`. - "###); + The lockfile at `uv.lock` needs to be updated, but `--locked` was provided. To update the lockfile, run `uv lock`. + "); Ok(()) } @@ -27631,15 +27720,15 @@ fn lock_empty_extra() -> Result<()> { )?; // Re-run with `--locked`. We expect this to fail, since we've added an extra. - uv_snapshot!(context.filters(), context.lock().arg("--locked"), @r###" + uv_snapshot!(context.filters(), context.lock().arg("--locked"), @r" success: false - exit_code: 2 + exit_code: 1 ----- stdout ----- ----- stderr ----- Resolved 3 packages in [TIME] - error: The lockfile at `uv.lock` needs to be updated, but `--locked` was provided. To update the lockfile, run `uv lock`. - "###); + The lockfile at `uv.lock` needs to be updated, but `--locked` was provided. To update the lockfile, run `uv lock`. + "); uv_snapshot!(context.filters(), context.lock(), @r###" success: true @@ -27667,15 +27756,15 @@ fn lock_empty_extra() -> Result<()> { )?; // Re-run with `--locked`. We expect this to fail, since we've added an extra. - uv_snapshot!(context.filters(), context.lock().arg("--locked"), @r###" + uv_snapshot!(context.filters(), context.lock().arg("--locked"), @r" success: false - exit_code: 2 + exit_code: 1 ----- stdout ----- ----- stderr ----- Resolved 3 packages in [TIME] - error: The lockfile at `uv.lock` needs to be updated, but `--locked` was provided. To update the lockfile, run `uv lock`. - "###); + The lockfile at `uv.lock` needs to be updated, but `--locked` was provided. To update the lockfile, run `uv lock`. + "); uv_snapshot!(context.filters(), context.lock(), @r###" success: true @@ -28341,12 +28430,12 @@ fn lock_trailing_slash_index_url_in_pyproject_not_index_argument() -> Result<()> // Re-run with `--locked`. uv_snapshot!(context.filters(), context.lock().arg("--locked"), @r" success: false - exit_code: 2 + exit_code: 1 ----- stdout ----- ----- stderr ----- Resolved 4 packages in [TIME] - error: The lockfile at `uv.lock` needs to be updated, but `--locked` was provided. To update the lockfile, run `uv lock`. + The lockfile at `uv.lock` needs to be updated, but `--locked` was provided. To update the lockfile, run `uv lock`. "); Ok(()) @@ -28432,12 +28521,12 @@ fn lock_trailing_slash_index_url_in_lockfile_not_pyproject() -> Result<()> { // Run `uv lock --locked`. uv_snapshot!(context.filters(), context.lock().arg("--locked"), @r" success: false - exit_code: 2 + exit_code: 1 ----- stdout ----- ----- stderr ----- Resolved 4 packages in [TIME] - error: The lockfile at `uv.lock` needs to be updated, but `--locked` was provided. To update the lockfile, run `uv lock`. + The lockfile at `uv.lock` needs to be updated, but `--locked` was provided. To update the lockfile, run `uv lock`. "); Ok(()) @@ -28523,12 +28612,12 @@ fn lock_trailing_slash_index_url_in_pyproject_and_not_lockfile() -> Result<()> { // Run `uv lock --locked`. uv_snapshot!(context.filters(), context.lock().arg("--locked"), @r" success: false - exit_code: 2 + exit_code: 1 ----- stdout ----- ----- stderr ----- Resolved 4 packages in [TIME] - error: The lockfile at `uv.lock` needs to be updated, but `--locked` was provided. To update the lockfile, run `uv lock`. + The lockfile at `uv.lock` needs to be updated, but `--locked` was provided. To update the lockfile, run `uv lock`. "); Ok(()) @@ -28714,12 +28803,12 @@ fn lock_trailing_slash_find_links() -> Result<()> { // Re-run with `--locked` uv_snapshot!(context.filters(), context.lock().arg("--locked"), @r" success: false - exit_code: 2 + exit_code: 1 ----- stdout ----- ----- stderr ----- Resolved 2 packages in [TIME] - error: The lockfile at `uv.lock` needs to be updated, but `--locked` was provided. To update the lockfile, run `uv lock`. + The lockfile at `uv.lock` needs to be updated, but `--locked` was provided. To update the lockfile, run `uv lock`. "); uv_snapshot!(context.filters(), context.lock(), @r" diff --git a/crates/uv/tests/it/sync.rs b/crates/uv/tests/it/sync.rs index 0165cc7f6..c225225b8 100644 --- a/crates/uv/tests/it/sync.rs +++ b/crates/uv/tests/it/sync.rs @@ -88,12 +88,12 @@ fn locked() -> Result<()> { // Running with `--locked` should error. uv_snapshot!(context.filters(), context.sync().arg("--locked"), @r" success: false - exit_code: 2 + exit_code: 1 ----- stdout ----- ----- stderr ----- Resolved 2 packages in [TIME] - error: The lockfile at `uv.lock` needs to be updated, but `--locked` was provided. To update the lockfile, run `uv lock`. + The lockfile at `uv.lock` needs to be updated, but `--locked` was provided. To update the lockfile, run `uv lock`. "); let updated = context.read("uv.lock"); @@ -424,12 +424,12 @@ fn sync_json() -> Result<()> { .arg("--locked") .arg("--output-format").arg("json"), @r" success: false - exit_code: 2 + exit_code: 1 ----- stdout ----- ----- stderr ----- Resolved 2 packages in [TIME] - error: The lockfile at `uv.lock` needs to be updated, but `--locked` was provided. To update the lockfile, run `uv lock`. + The lockfile at `uv.lock` needs to be updated, but `--locked` was provided. To update the lockfile, run `uv lock`. "); Ok(()) @@ -894,7 +894,7 @@ fn check() -> Result<()> { // Running `uv sync --check` should fail. uv_snapshot!(context.filters(), context.sync().arg("--check"), @r" success: false - exit_code: 2 + exit_code: 1 ----- stdout ----- ----- stderr ----- @@ -904,7 +904,7 @@ fn check() -> Result<()> { Would download 1 package Would install 1 package + iniconfig==2.0.0 - error: The environment is outdated; run `uv sync` to update the environment + The environment is outdated; run `uv sync` to update the environment "); // Sync the environment. @@ -8626,7 +8626,7 @@ fn sync_dry_run_and_locked() -> Result<()> { // Running with `--locked` and `--dry-run` should error. uv_snapshot!(context.filters(), context.sync().arg("--locked").arg("--dry-run"), @r" success: false - exit_code: 2 + exit_code: 1 ----- stdout ----- ----- stderr ----- @@ -8635,7 +8635,7 @@ fn sync_dry_run_and_locked() -> Result<()> { Would download 1 package Would install 1 package + iniconfig==2.0.0 - error: The lockfile at `uv.lock` needs to be updated, but `--locked` was provided. To update the lockfile, run `uv lock`. + The lockfile at `uv.lock` needs to be updated, but `--locked` was provided. To update the lockfile, run `uv lock`. "); let updated = context.read("uv.lock"); @@ -8962,13 +8962,13 @@ fn sync_locked_script() -> Result<()> { // Re-run with `--locked`. uv_snapshot!(&filters, context.sync().arg("--script").arg("script.py").arg("--locked"), @r" success: false - exit_code: 2 + exit_code: 1 ----- stdout ----- ----- stderr ----- Using script environment at: [CACHE_DIR]/environments-v2/script-[HASH] Resolved 4 packages in [TIME] - error: The lockfile at `uv.lock` needs to be updated, but `--locked` was provided. To update the lockfile, run `uv lock`. + The lockfile at `uv.lock` needs to be updated, but `--locked` was provided. To update the lockfile, run `uv lock`. "); uv_snapshot!(&filters, context.sync().arg("--script").arg("script.py"), @r" @@ -9064,14 +9064,14 @@ fn sync_locked_script() -> Result<()> { // Re-run with `--locked`. uv_snapshot!(&filters, context.sync().arg("--script").arg("script.py").arg("--locked"), @r" success: false - exit_code: 2 + exit_code: 1 ----- stdout ----- ----- stderr ----- Updating script environment at: [CACHE_DIR]/environments-v2/script-[HASH] warning: Ignoring existing lockfile due to fork markers being disjoint with `requires-python`: `python_full_version >= '3.11'` vs `python_full_version >= '3.8' and python_full_version < '3.11'` Resolved 6 packages in [TIME] - error: The lockfile at `uv.lock` needs to be updated, but `--locked` was provided. To update the lockfile, run `uv lock`. + The lockfile at `uv.lock` needs to be updated, but `--locked` was provided. To update the lockfile, run `uv lock`. "); uv_snapshot!(&filters, context.sync().arg("--script").arg("script.py"), @r" @@ -9944,12 +9944,12 @@ fn sync_build_constraints() -> Result<()> { // This should fail, given that the build constraints have changed. uv_snapshot!(context.filters(), context.sync().arg("--locked"), @r" success: false - exit_code: 2 + exit_code: 1 ----- stdout ----- ----- stderr ----- Resolved 2 packages in [TIME] - error: The lockfile at `uv.lock` needs to be updated, but `--locked` was provided. To update the lockfile, run `uv lock`. + The lockfile at `uv.lock` needs to be updated, but `--locked` was provided. To update the lockfile, run `uv lock`. "); // Changing the build constraints should lead to a re-resolve. From 95c0b71f7709e3097b78ef39d9d13be26b4c7f2c Mon Sep 17 00:00:00 2001 From: Aria Desires Date: Wed, 16 Jul 2025 09:24:05 -0400 Subject: [PATCH 068/130] Remove `uv version` fallback (#14161) Fixes #14157 --------- Co-authored-by: John Mumm --- crates/uv/src/commands/project/version.rs | 18 +--------- crates/uv/src/lib.rs | 10 ------ crates/uv/tests/it/version.rs | 43 ++++++++--------------- 3 files changed, 16 insertions(+), 55 deletions(-) diff --git a/crates/uv/src/commands/project/version.rs b/crates/uv/src/commands/project/version.rs index ed1e9e246..efba226b9 100644 --- a/crates/uv/src/commands/project/version.rs +++ b/crates/uv/src/commands/project/version.rs @@ -19,7 +19,6 @@ use uv_pep440::{BumpCommand, PrereleaseKind, Version}; use uv_pep508::PackageName; use uv_python::{PythonDownloads, PythonPreference, PythonRequest}; use uv_settings::PythonInstallMirrors; -use uv_warnings::warn_user; use uv_workspace::pyproject_mut::Error; use uv_workspace::{ DiscoveryOptions, WorkspaceCache, @@ -58,7 +57,6 @@ pub(crate) async fn project_version( mut bump: Vec, short: bool, output_format: VersionFormat, - strict: bool, project_dir: &Path, package: Option, dry_run: bool, @@ -80,21 +78,7 @@ pub(crate) async fn project_version( preview: PreviewMode, ) -> Result { // Read the metadata - let project = match find_target(project_dir, package.as_ref()).await { - Ok(target) => target, - Err(err) => { - // If strict, hard bail on failing to find the pyproject.toml - if strict { - return Err(err)?; - } - // Otherwise, warn and provide fallback to the old `uv version` from before 0.7.0 - warn_user!( - "Failed to read project metadata ({err}). Running `{}` for compatibility. This fallback will be removed in the future; pass `--preview` to force an error.", - "uv self version".green() - ); - return self_version(short, output_format, printer); - } - }; + let project = find_target(project_dir, package.as_ref()).await?; let pyproject_path = project.root().join("pyproject.toml"); let Some(name) = project.project_name().cloned() else { diff --git a/crates/uv/src/lib.rs b/crates/uv/src/lib.rs index 995738638..433f5afd3 100644 --- a/crates/uv/src/lib.rs +++ b/crates/uv/src/lib.rs @@ -1062,7 +1062,6 @@ async fn run(mut cli: Cli) -> Result { } Commands::Project(project) => { Box::pin(run_project( - cli.top_level.global_args.project.is_some(), project, &project_dir, run_command, @@ -1663,7 +1662,6 @@ async fn run(mut cli: Cli) -> Result { /// Run a [`ProjectCommand`]. async fn run_project( - project_was_explicit: bool, project_command: Box, project_dir: &Path, command: Option, @@ -2055,19 +2053,11 @@ async fn run_project( .combine(Refresh::from(args.settings.resolver.upgrade.clone())), ); - // If they specified any of these flags, they probably don't mean `uv self version` - let strict = project_was_explicit - || globals.preview.is_enabled() - || args.dry_run - || !args.bump.is_empty() - || args.value.is_some() - || args.package.is_some(); Box::pin(commands::project_version( args.value, args.bump, args.short, args.output_format, - strict, project_dir, args.package, args.dry_run, diff --git a/crates/uv/tests/it/version.rs b/crates/uv/tests/it/version.rs index 3c5e28e0f..78dd64252 100644 --- a/crates/uv/tests/it/version.rs +++ b/crates/uv/tests/it/version.rs @@ -1437,8 +1437,8 @@ fn version_set_dynamic() -> Result<()> { Ok(()) } -// Should fallback to `uv --version` if this pyproject.toml isn't usable for whatever reason -// (In this case, because tool.uv.managed = false) +/// Previously would fallback to `uv --version` if this pyproject.toml isn't usable for whatever reason +/// (In this case, because tool.uv.managed = false) #[test] fn version_get_fallback_unmanaged() -> Result<()> { let context = TestContext::new("3.12"); @@ -1456,13 +1456,12 @@ fn version_get_fallback_unmanaged() -> Result<()> { )?; uv_snapshot!(context.filters(), context.version(), @r" - success: true - exit_code: 0 + success: false + exit_code: 2 ----- stdout ----- - uv [VERSION] ([COMMIT] DATE) ----- stderr ----- - warning: Failed to read project metadata (The project is marked as unmanaged: `[TEMP_DIR]/`). Running `uv self version` for compatibility. This fallback will be removed in the future; pass `--preview` to force an error. + error: The project is marked as unmanaged: `[TEMP_DIR]/` "); let pyproject = fs_err::read_to_string(&pyproject_toml)?; @@ -1507,13 +1506,12 @@ fn version_get_fallback_unmanaged_short() -> Result<()> { .collect::>(); uv_snapshot!(filters, context.version() .arg("--short"), @r" - success: true - exit_code: 0 + success: false + exit_code: 2 ----- stdout ----- - [VERSION] ([COMMIT] DATE) ----- stderr ----- - warning: Failed to read project metadata (The project is marked as unmanaged: `[TEMP_DIR]/`). Running `uv self version` for compatibility. This fallback will be removed in the future; pass `--preview` to force an error. + error: The project is marked as unmanaged: `[TEMP_DIR]/` "); let pyproject = fs_err::read_to_string(&pyproject_toml)?; @@ -1587,25 +1585,14 @@ fn version_get_fallback_unmanaged_json() -> Result<()> { .collect::>(); if git_version_info_expected() { uv_snapshot!(filters, context.version() - .arg("--output-format").arg("json"), @r#" - success: true - exit_code: 0 - ----- stdout ----- - { - "package_name": "uv", - "version": "[VERSION]", - "commit_info": { - "short_commit_hash": "[LONGHASH]", - "commit_hash": "[LONGHASH]", - "commit_date": "[DATE]", - "last_tag": "[TAG]", - "commits_since_last_tag": [COUNT] - } - } + .arg("--output-format").arg("json"), @r" + success: false + exit_code: 2 + ----- stdout ----- - ----- stderr ----- - warning: Failed to read project metadata (The project is marked as unmanaged: `[TEMP_DIR]/`). Running `uv self version` for compatibility. This fallback will be removed in the future; pass `--preview` to force an error. - "#); + ----- stderr ----- + error: The project is marked as unmanaged: `[TEMP_DIR]/` + "); } else { uv_snapshot!(filters, context.version() .arg("--output-format").arg("json"), @r#" From 3c9aea87b41f01b26775facd1e3d34828ef53b9f Mon Sep 17 00:00:00 2001 From: konsti Date: Wed, 16 Jul 2025 19:07:08 +0100 Subject: [PATCH 069/130] `uv init`: Make `uv_build` the default build backend (from `hatchling`) (#14661) Closes https://github.com/astral-sh/uv/issues/14298 Switch the default build backend for `uv init` from `hatchling` to `uv_build`. This change affects the following two commands: * `uv init --lib` * `uv init [--app] --package` It does not affect `uv init` or `uv init --app` without `--package`. `uv init --build-backend <...>` also works as before. **Before** ``` $ uv init --lib project $ cat project/pyproject.toml [project] name = "project" version = "0.1.0" description = "Add your description here" readme = "README.md" authors = [ { name = "konstin", email = "konstin@mailbox.org" } ] requires-python = ">=3.13.2" dependencies = [] [build-system] requires = ["hatchling"] build-backend = "hatchling.build" ``` **After** ``` $ uv init --lib project $ cat project/pyproject.toml [project] name = "project" version = "0.1.0" description = "Add your description here" readme = "README.md" authors = [ { name = "konstin", email = "konstin@mailbox.org" } ] requires-python = ">=3.13.2" dependencies = [] [build-system] requires = ["uv_build>=0.7.20,<0.8"] build-backend = "uv_build" ``` I cleaned up some tests for consistency in the second commit. --- crates/uv-build-backend/src/metadata.rs | 14 +- crates/uv/src/commands/project/init.rs | 21 +-- crates/uv/tests/it/build.rs | 17 +- crates/uv/tests/it/build_backend.rs | 20 +- crates/uv/tests/it/common/mod.rs | 8 + crates/uv/tests/it/init.rs | 235 +++++++----------------- docs/concepts/build-backend.md | 7 +- 7 files changed, 99 insertions(+), 223 deletions(-) diff --git a/crates/uv-build-backend/src/metadata.rs b/crates/uv-build-backend/src/metadata.rs index 00a207c7a..296c76a2b 100644 --- a/crates/uv-build-backend/src/metadata.rs +++ b/crates/uv-build-backend/src/metadata.rs @@ -171,7 +171,7 @@ impl PyProjectToml { /// /// ```toml /// [build-system] - /// requires = ["uv_build>=0.4.15,<5"] + /// requires = ["uv_build>=0.4.15,<0.5"] /// build-backend = "uv_build" /// ``` pub fn check_build_system(&self, uv_version: &str) -> Vec { @@ -826,7 +826,7 @@ mod tests { {payload} [build-system] - requires = ["uv_build>=0.4.15,<5"] + requires = ["uv_build>=0.4.15,<0.5"] build-backend = "uv_build" "# } @@ -909,7 +909,7 @@ mod tests { foo-bar = "foo:bar" [build-system] - requires = ["uv_build>=0.4.15,<5"] + requires = ["uv_build>=0.4.15,<0.5"] build-backend = "uv_build" "# }; @@ -1036,7 +1036,7 @@ mod tests { foo-bar = "foo:bar" [build-system] - requires = ["uv_build>=0.4.15,<5"] + requires = ["uv_build>=0.4.15,<0.5"] build-backend = "uv_build" "# }; @@ -1104,7 +1104,7 @@ mod tests { let contents = extend_project(""); let pyproject_toml = PyProjectToml::parse(&contents).unwrap(); assert_snapshot!( - pyproject_toml.check_build_system("1.0.0+test").join("\n"), + pyproject_toml.check_build_system("0.4.15+test").join("\n"), @"" ); } @@ -1135,7 +1135,7 @@ mod tests { version = "0.1.0" [build-system] - requires = ["uv_build>=0.4.15,<5", "wheel"] + requires = ["uv_build>=0.4.15,<0.5", "wheel"] build-backend = "uv_build" "#}; let pyproject_toml = PyProjectToml::parse(contents).unwrap(); @@ -1171,7 +1171,7 @@ mod tests { version = "0.1.0" [build-system] - requires = ["uv_build>=0.4.15,<5"] + requires = ["uv_build>=0.4.15,<0.5"] build-backend = "setuptools" "#}; let pyproject_toml = PyProjectToml::parse(contents).unwrap(); diff --git a/crates/uv/src/commands/project/init.rs b/crates/uv/src/commands/project/init.rs index 9ff321a72..4fd79b1c2 100644 --- a/crates/uv/src/commands/project/init.rs +++ b/crates/uv/src/commands/project/init.rs @@ -63,9 +63,6 @@ pub(crate) async fn init( printer: Printer, preview: PreviewMode, ) -> Result { - if build_backend == Some(ProjectBuildBackend::Uv) && preview.is_disabled() { - warn_user_once!("The uv build backend is experimental and may change without warning"); - } match init_kind { InitKind::Script => { let Some(path) = explicit_path.as_deref() else { @@ -596,7 +593,6 @@ async fn init_project( author_from, no_readme, package, - preview, )?; if let Some(workspace) = workspace { @@ -724,7 +720,6 @@ impl InitProjectKind { author_from: Option, no_readme: bool, package: bool, - preview: PreviewMode, ) -> Result<()> { match self { InitProjectKind::Application => InitProjectKind::init_application( @@ -739,7 +734,6 @@ impl InitProjectKind { author_from, no_readme, package, - preview, ), InitProjectKind::Library => InitProjectKind::init_library( name, @@ -753,7 +747,6 @@ impl InitProjectKind { author_from, no_readme, package, - preview, ), } } @@ -772,7 +765,6 @@ impl InitProjectKind { author_from: Option, no_readme: bool, package: bool, - preview: PreviewMode, ) -> Result<()> { fs_err::create_dir_all(path)?; @@ -805,11 +797,7 @@ impl InitProjectKind { } // Add a build system - let build_backend = match build_backend { - Some(build_backend) => build_backend, - None if preview.is_enabled() => ProjectBuildBackend::Uv, - None => ProjectBuildBackend::Hatch, - }; + let build_backend = build_backend.unwrap_or(ProjectBuildBackend::Uv); pyproject.push('\n'); pyproject.push_str(&pyproject_build_system(name, build_backend)); pyproject_build_backend_prerequisites(name, path, build_backend)?; @@ -859,7 +847,6 @@ impl InitProjectKind { author_from: Option, no_readme: bool, package: bool, - preview: PreviewMode, ) -> Result<()> { if !package { return Err(anyhow!("Library projects must be packaged")); @@ -880,11 +867,7 @@ impl InitProjectKind { ); // Always include a build system if the project is packaged. - let build_backend = match build_backend { - Some(build_backend) => build_backend, - None if preview.is_enabled() => ProjectBuildBackend::Uv, - None => ProjectBuildBackend::Hatch, - }; + let build_backend = build_backend.unwrap_or(ProjectBuildBackend::Uv); pyproject.push('\n'); pyproject.push_str(&pyproject_build_system(name, build_backend)); pyproject_build_backend_prerequisites(name, path, build_backend)?; diff --git a/crates/uv/tests/it/build.rs b/crates/uv/tests/it/build.rs index 3d08a90d4..656c68d3f 100644 --- a/crates/uv/tests/it/build.rs +++ b/crates/uv/tests/it/build.rs @@ -1439,7 +1439,6 @@ fn build_fast_path() -> Result<()> { let built_by_uv = current_dir()?.join("../../scripts/packages/built-by-uv"); uv_snapshot!(context.build() - .arg("--preview") .arg(&built_by_uv) .arg("--out-dir") .arg(context.temp_dir.join("output1")), @r###" @@ -1465,7 +1464,6 @@ fn build_fast_path() -> Result<()> { .assert(predicate::path::is_file()); uv_snapshot!(context.build() - .arg("--preview") .arg(&built_by_uv) .arg("--out-dir") .arg(context.temp_dir.join("output2")) @@ -1485,7 +1483,6 @@ fn build_fast_path() -> Result<()> { .assert(predicate::path::is_file()); uv_snapshot!(context.build() - .arg("--preview") .arg(&built_by_uv) .arg("--out-dir") .arg(context.temp_dir.join("output3")) @@ -1505,7 +1502,6 @@ fn build_fast_path() -> Result<()> { .assert(predicate::path::is_file()); uv_snapshot!(context.build() - .arg("--preview") .arg(&built_by_uv) .arg("--out-dir") .arg(context.temp_dir.join("output4")) @@ -1545,7 +1541,6 @@ fn build_list_files() -> Result<()> { // By default, we build the wheel from the source dist, which we need to do even for the list // task. uv_snapshot!(context.build() - .arg("--preview") .arg(&built_by_uv) .arg("--out-dir") .arg(context.temp_dir.join("output1")) @@ -1601,7 +1596,6 @@ fn build_list_files() -> Result<()> { .assert(predicate::path::missing()); uv_snapshot!(context.build() - .arg("--preview") .arg(&built_by_uv) .arg("--out-dir") .arg(context.temp_dir.join("output2")) @@ -1670,7 +1664,6 @@ fn build_list_files_errors() -> Result<()> { // In CI, we run with link mode settings. filters.push(("--link-mode ", "")); uv_snapshot!(filters, context.build() - .arg("--preview") .arg(&built_by_uv) .arg("--out-dir") .arg(context.temp_dir.join("output1")) @@ -1694,7 +1687,6 @@ fn build_list_files_errors() -> Result<()> { // Windows normalization filters.push(("/crates/uv/../../", "/")); uv_snapshot!(filters, context.build() - .arg("--preview") .arg(&anyio_local) .arg("--out-dir") .arg(context.temp_dir.join("output2")) @@ -1987,12 +1979,7 @@ fn force_pep517() -> Result<()> { // We need to use a real `uv_build` package. let context = TestContext::new("3.12").with_exclude_newer("2025-05-27T00:00:00Z"); - context - .init() - .arg("--build-backend") - .arg("uv") - .assert() - .success(); + context.init().assert().success(); let pyproject_toml = context.temp_dir.child("pyproject.toml"); pyproject_toml.write_str(indoc! {r#" @@ -2026,7 +2013,7 @@ fn force_pep517() -> Result<()> { ----- stderr ----- Building source distribution... - Error: Missing module directory for `does_not_exist` in `src`. Found: `temp` + Error: Missing source directory at: `src` × Failed to build `[TEMP_DIR]/` ├─▶ The build backend returned an error ╰─▶ Call to `uv_build.build_sdist` failed (exit status: 1) diff --git a/crates/uv/tests/it/build_backend.rs b/crates/uv/tests/it/build_backend.rs index b3bd337ae..ae3a7a740 100644 --- a/crates/uv/tests/it/build_backend.rs +++ b/crates/uv/tests/it/build_backend.rs @@ -222,8 +222,7 @@ fn preserve_executable_bit() -> Result<()> { let project_dir = context.temp_dir.path().join("preserve_executable_bit"); context .init() - .arg("--build-backend") - .arg("uv") + .arg("--lib") .arg(&project_dir) .assert() .success(); @@ -296,7 +295,7 @@ fn rename_module() -> Result<()> { module-name = "bar" [build-system] - requires = ["uv_build>=0.5,<0.8"] + requires = ["uv_build>=0.7,<10000"] build-backend = "uv_build" "#})?; @@ -377,7 +376,7 @@ fn rename_module_editable_build() -> Result<()> { module-name = "bar" [build-system] - requires = ["uv_build>=0.5,<0.8"] + requires = ["uv_build>=0.7,<10000"] build-backend = "uv_build" "#})?; @@ -436,7 +435,7 @@ fn build_module_name_normalization() -> Result<()> { version = "1.0.0" [build-system] - requires = ["uv_build>=0.5,<0.8"] + requires = ["uv_build>=0.7,<10000"] build-backend = "uv_build" [tool.uv.build-backend] @@ -548,7 +547,7 @@ fn build_sdist_with_long_path() -> Result<()> { version = "1.0.0" [build-system] - requires = ["uv_build>=0.7,<0.8"] + requires = ["uv_build>=0.7,<10000"] build-backend = "uv_build" "#})?; context @@ -591,7 +590,7 @@ fn sdist_error_without_module() -> Result<()> { version = "1.0.0" [build-system] - requires = ["uv_build>=0.7,<0.8"] + requires = ["uv_build>=0.7,<10000"] build-backend = "uv_build" "#})?; @@ -661,7 +660,7 @@ fn complex_namespace_packages() -> Result<()> { module-name = "{project_name_dist_info}.{part_name}" [build-system] - requires = ["uv_build>=0.5.15,<10000"] + requires = ["uv_build>=0.7,<10000"] build-backend = "uv_build" "# }; @@ -770,8 +769,7 @@ fn symlinked_file() -> Result<()> { let project = context.temp_dir.child("project"); context .init() - .arg("--build-backend") - .arg("uv") + .arg("--lib") .arg(project.path()) .assert() .success(); @@ -783,7 +781,7 @@ fn symlinked_file() -> Result<()> { license-files = ["LICENSE"] [build-system] - requires = ["uv_build>=0.5.15,<10000"] + requires = ["uv_build>=0.7,<10000"] build-backend = "uv_build" "# })?; diff --git a/crates/uv/tests/it/common/mod.rs b/crates/uv/tests/it/common/mod.rs index d4a73f953..bc6e65f4e 100644 --- a/crates/uv/tests/it/common/mod.rs +++ b/crates/uv/tests/it/common/mod.rs @@ -664,6 +664,14 @@ impl TestContext { )); // For wiremock tests filters.push((r"127\.0\.0\.1:\d*".to_string(), "[LOCALHOST]".to_string())); + // Avoid breaking the tests when bumping the uv version + filters.push(( + format!( + r#"requires = \["uv_build>={},<[0-9.]+"\]"#, + uv_version::version() + ), + r#"requires = ["uv_build>=[CURRENT_VERSION],<[NEXT_BREAKING]"]"#.to_string(), + )); Self { root: ChildPath::new(root.path()), diff --git a/crates/uv/tests/it/init.rs b/crates/uv/tests/it/init.rs index c5993d670..3f374eada 100644 --- a/crates/uv/tests/it/init.rs +++ b/crates/uv/tests/it/init.rs @@ -314,7 +314,7 @@ fn init_application_package() -> Result<()> { filters => context.filters(), }, { assert_snapshot!( - pyproject, @r###" + pyproject, @r#" [project] name = "foo" version = "0.1.0" @@ -327,9 +327,9 @@ fn init_application_package() -> Result<()> { foo = "foo:main" [build-system] - requires = ["hatchling"] - build-backend = "hatchling.build" - "### + requires = ["uv_build>=[CURRENT_VERSION],<[NEXT_BREAKING]"] + build-backend = "uv_build" + "# ); }); @@ -390,7 +390,7 @@ fn init_library() -> Result<()> { filters => context.filters(), }, { assert_snapshot!( - pyproject, @r###" + pyproject, @r#" [project] name = "foo" version = "0.1.0" @@ -400,9 +400,9 @@ fn init_library() -> Result<()> { dependencies = [] [build-system] - requires = ["hatchling"] - build-backend = "hatchling.build" - "### + requires = ["uv_build>=[CURRENT_VERSION],<[NEXT_BREAKING]"] + build-backend = "uv_build" + "# ); }); @@ -446,91 +446,6 @@ fn init_library() -> Result<()> { Ok(()) } -/// Test the uv build backend with using `uv init --lib --preview`. To be merged with the regular -/// init lib test once the uv build backend becomes the stable default. -#[test] -fn init_library_preview() -> Result<()> { - let context = TestContext::new("3.12"); - - let child = context.temp_dir.child("foo"); - child.create_dir_all()?; - - let pyproject_toml = child.join("pyproject.toml"); - let init_py = child.join("src").join("foo").join("__init__.py"); - let py_typed = child.join("src").join("foo").join("py.typed"); - - uv_snapshot!(context.filters(), context.init().current_dir(&child).arg("--lib").arg("--preview"), @r###" - success: true - exit_code: 0 - ----- stdout ----- - - ----- stderr ----- - Initialized project `foo` - "###); - - let pyproject = fs_err::read_to_string(&pyproject_toml)?; - let mut filters = context.filters(); - filters.push((r#"\["uv_build>=.*,<.*"\]"#, r#"["uv_build[SPECIFIERS]"]"#)); - insta::with_settings!({ - filters => filters, - }, { - assert_snapshot!( - pyproject, @r#" - [project] - name = "foo" - version = "0.1.0" - description = "Add your description here" - readme = "README.md" - requires-python = ">=3.12" - dependencies = [] - - [build-system] - requires = ["uv_build[SPECIFIERS]"] - build-backend = "uv_build" - "# - ); - }); - - let init = fs_err::read_to_string(init_py)?; - insta::with_settings!({ - filters => context.filters(), - }, { - assert_snapshot!( - init, @r###" - def hello() -> str: - return "Hello from foo!" - "### - ); - }); - - let py_typed = fs_err::read_to_string(py_typed)?; - insta::with_settings!({ - filters => context.filters(), - }, { - assert_snapshot!( - py_typed, @"" - ); - }); - - uv_snapshot!(context.filters(), context.run().arg("--preview").current_dir(&child).arg("python").arg("-c").arg("import foo; print(foo.hello())"), @r###" - success: true - exit_code: 0 - ----- stdout ----- - Hello from foo! - - ----- stderr ----- - warning: `VIRTUAL_ENV=[VENV]/` does not match the project environment path `.venv` and will be ignored; use `--active` to target the active environment instead - Using CPython 3.12.[X] interpreter at: [PYTHON-3.12] - Creating virtual environment at: .venv - Resolved 1 package in [TIME] - Prepared 1 package in [TIME] - Installed 1 package in [TIME] - + foo==0.1.0 (from file://[TEMP_DIR]/foo) - "###); - - Ok(()) -} - /// Test the uv build backend with using `uv init --package --preview`. To be merged with the regular /// init lib test once the uv build backend becomes the stable default. #[test] @@ -550,10 +465,8 @@ fn init_package_preview() -> Result<()> { "###); let pyproject = fs_err::read_to_string(child.join("pyproject.toml"))?; - let mut filters = context.filters(); - filters.push((r#"\["uv_build>=.*,<.*"\]"#, r#"["uv_build[SPECIFIERS]"]"#)); insta::with_settings!({ - filters => filters, + filters => context.filters(), }, { assert_snapshot!( pyproject, @r#" @@ -569,7 +482,7 @@ fn init_package_preview() -> Result<()> { foo = "foo:main" [build-system] - requires = ["uv_build[SPECIFIERS]"] + requires = ["uv_build>=[CURRENT_VERSION],<[NEXT_BREAKING]"] build-backend = "uv_build" "# ); @@ -615,7 +528,7 @@ fn init_bare_lib() { filters => context.filters(), }, { assert_snapshot!( - pyproject, @r###" + pyproject, @r#" [project] name = "foo" version = "0.1.0" @@ -623,9 +536,9 @@ fn init_bare_lib() { dependencies = [] [build-system] - requires = ["hatchling"] - build-backend = "hatchling.build" - "### + requires = ["uv_build>=[CURRENT_VERSION],<[NEXT_BREAKING]"] + build-backend = "uv_build" + "# ); }); } @@ -667,7 +580,7 @@ fn init_bare_package() { filters => context.filters(), }, { assert_snapshot!( - pyproject, @r###" + pyproject, @r#" [project] name = "foo" version = "0.1.0" @@ -675,9 +588,9 @@ fn init_bare_package() { dependencies = [] [build-system] - requires = ["hatchling"] - build-backend = "hatchling.build" - "### + requires = ["uv_build>=[CURRENT_VERSION],<[NEXT_BREAKING]"] + build-backend = "uv_build" + "# ); }); } @@ -1154,7 +1067,7 @@ fn init_library_current_dir() -> Result<()> { filters => context.filters(), }, { assert_snapshot!( - pyproject, @r###" + pyproject, @r#" [project] name = "foo" version = "0.1.0" @@ -1164,9 +1077,9 @@ fn init_library_current_dir() -> Result<()> { dependencies = [] [build-system] - requires = ["hatchling"] - build-backend = "hatchling.build" - "### + requires = ["uv_build>=[CURRENT_VERSION],<[NEXT_BREAKING]"] + build-backend = "uv_build" + "# ); }); @@ -1283,7 +1196,7 @@ fn init_dot_args() -> Result<()> { filters => context.filters(), }, { assert_snapshot!( - pyproject, @r###" + pyproject, @r#" [project] name = "foo" version = "0.1.0" @@ -1293,9 +1206,9 @@ fn init_dot_args() -> Result<()> { dependencies = [] [build-system] - requires = ["hatchling"] - build-backend = "hatchling.build" - "### + requires = ["uv_build>=[CURRENT_VERSION],<[NEXT_BREAKING]"] + build-backend = "uv_build" + "# ); }); @@ -1361,7 +1274,7 @@ fn init_workspace() -> Result<()> { filters => context.filters(), }, { assert_snapshot!( - pyproject, @r###" + pyproject, @r#" [project] name = "foo" version = "0.1.0" @@ -1371,9 +1284,9 @@ fn init_workspace() -> Result<()> { dependencies = [] [build-system] - requires = ["hatchling"] - build-backend = "hatchling.build" - "### + requires = ["uv_build>=[CURRENT_VERSION],<[NEXT_BREAKING]"] + build-backend = "uv_build" + "# ); }); @@ -1546,7 +1459,7 @@ fn init_workspace_relative_sub_package() -> Result<()> { filters => context.filters(), }, { assert_snapshot!( - pyproject, @r###" + pyproject, @r#" [project] name = "foo" version = "0.1.0" @@ -1556,9 +1469,9 @@ fn init_workspace_relative_sub_package() -> Result<()> { dependencies = [] [build-system] - requires = ["hatchling"] - build-backend = "hatchling.build" - "### + requires = ["uv_build>=[CURRENT_VERSION],<[NEXT_BREAKING]"] + build-backend = "uv_build" + "# ); }); @@ -1643,7 +1556,7 @@ fn init_workspace_outside() -> Result<()> { filters => context.filters(), }, { assert_snapshot!( - pyproject, @r###" + pyproject, @r#" [project] name = "foo" version = "0.1.0" @@ -1653,9 +1566,9 @@ fn init_workspace_outside() -> Result<()> { dependencies = [] [build-system] - requires = ["hatchling"] - build-backend = "hatchling.build" - "### + requires = ["uv_build>=[CURRENT_VERSION],<[NEXT_BREAKING]"] + build-backend = "uv_build" + "# ); }); @@ -1725,7 +1638,7 @@ fn init_normalized_names() -> Result<()> { filters => context.filters(), }, { assert_snapshot!( - pyproject, @r###" + pyproject, @r#" [project] name = "foo-bar" version = "0.1.0" @@ -1735,9 +1648,9 @@ fn init_normalized_names() -> Result<()> { dependencies = [] [build-system] - requires = ["hatchling"] - build-backend = "hatchling.build" - "### + requires = ["uv_build>=[CURRENT_VERSION],<[NEXT_BREAKING]"] + build-backend = "uv_build" + "# ); }); @@ -3008,8 +2921,8 @@ fn init_with_author() { dependencies = [] [build-system] - requires = ["hatchling"] - build-backend = "hatchling.build" + requires = ["uv_build>=[CURRENT_VERSION],<[NEXT_BREAKING]"] + build-backend = "uv_build" "# ); }); @@ -3038,8 +2951,8 @@ fn init_with_author() { dependencies = [] [build-system] - requires = ["hatchling"] - build-backend = "hatchling.build" + requires = ["uv_build>=[CURRENT_VERSION],<[NEXT_BREAKING]"] + build-backend = "uv_build" "# ); }); @@ -3822,9 +3735,9 @@ fn init_lib_build_backend_scikit() -> Result<()> { Ok(()) } -/// Run `uv init --app --package --build-backend uv` to create a packaged application project +/// Run `uv init --app --package --build-backend hatchling` to create a packaged application project #[test] -fn init_application_package_uv() -> Result<()> { +fn init_application_package_hatchling() -> Result<()> { let context = TestContext::new("3.12"); let child = context.temp_dir.child("foo"); @@ -3833,41 +3746,34 @@ fn init_application_package_uv() -> Result<()> { let pyproject_toml = child.join("pyproject.toml"); let init_py = child.join("src").join("foo").join("__init__.py"); - uv_snapshot!(context.filters(), context.init().current_dir(&child).arg("--app").arg("--package").arg("--build-backend").arg("uv"), @r###" + uv_snapshot!(context.filters(), context.init().current_dir(&child).arg("--app").arg("--package").arg("--build-backend").arg("hatchling"), @r###" success: true exit_code: 0 ----- stdout ----- ----- stderr ----- - warning: The uv build backend is experimental and may change without warning Initialized project `foo` "###); let pyproject = fs_err::read_to_string(&pyproject_toml)?; - let mut filters = context.filters(); - filters.push((r#"\["uv_build>=.*,<.*"\]"#, r#"["uv_build[SPECIFIERS]"]"#)); - insta::with_settings!({ - filters => filters, - }, { - assert_snapshot!( - pyproject, @r###" - [project] - name = "foo" - version = "0.1.0" - description = "Add your description here" - readme = "README.md" - requires-python = ">=3.12" - dependencies = [] + assert_snapshot!( + pyproject, @r#" + [project] + name = "foo" + version = "0.1.0" + description = "Add your description here" + readme = "README.md" + requires-python = ">=3.12" + dependencies = [] - [project.scripts] - foo = "foo:main" + [project.scripts] + foo = "foo:main" - [build-system] - requires = ["uv_build[SPECIFIERS]"] - build-backend = "uv_build" - "### - ); - }); + [build-system] + requires = ["hatchling"] + build-backend = "hatchling.build" + "# + ); let init = fs_err::read_to_string(init_py)?; insta::with_settings!({ @@ -3881,8 +3787,7 @@ fn init_application_package_uv() -> Result<()> { ); }); - // Use preview to go through the fast path. - uv_snapshot!(context.filters(), context.run().arg("--preview").arg("foo").current_dir(&child).env_remove(EnvVars::VIRTUAL_ENV), @r###" + uv_snapshot!(context.filters(), context.run().arg("foo").current_dir(&child).env_remove(EnvVars::VIRTUAL_ENV), @r###" success: true exit_code: 0 ----- stdout ----- @@ -3935,8 +3840,8 @@ fn init_with_description() -> Result<()> { dependencies = [] [build-system] - requires = ["hatchling"] - build-backend = "hatchling.build" + requires = ["uv_build>=[CURRENT_VERSION],<[NEXT_BREAKING]"] + build-backend = "uv_build" "# ); }); @@ -3977,8 +3882,8 @@ fn init_without_description() -> Result<()> { dependencies = [] [build-system] - requires = ["hatchling"] - build-backend = "hatchling.build" + requires = ["uv_build>=[CURRENT_VERSION],<[NEXT_BREAKING]"] + build-backend = "uv_build" "# ); }); diff --git a/docs/concepts/build-backend.md b/docs/concepts/build-backend.md index 5f52463bf..d2edf1bad 100644 --- a/docs/concepts/build-backend.md +++ b/docs/concepts/build-backend.md @@ -1,10 +1,5 @@ # The uv build backend -!!! note - - Currently, the default build backend for `uv init` is - [hatchling](https://pypi.org/project/hatchling/). This will change to `uv` in a future version. - A build backend transforms a source tree (i.e., a directory) into a source distribution or a wheel. uv supports all build backends (as specified by [PEP 517](https://peps.python.org/pep-0517/)), but @@ -49,7 +44,7 @@ build-backend = "uv_build" To create a new project that uses the uv build backend, use `uv init`: ```console -$ uv init --build-backend uv +$ uv init ``` When the project is built, e.g., with [`uv build`](../guides/package.md), the uv build backend will From 25e69458b1f6bf8aa937bb0d83b660e8d5c088e7 Mon Sep 17 00:00:00 2001 From: Zanie Blue Date: Wed, 16 Jul 2025 14:26:42 -0500 Subject: [PATCH 070/130] Stabilize addition of Python versions to the Windows registry (#14625) Following #14614 this is non-fatal and has an opt-out so it should be safe to stabilize. --- crates/uv/src/commands/python/install.rs | 2 +- crates/uv/src/commands/python/uninstall.rs | 2 +- docs/concepts/python-versions.md | 15 +++++++++++++++ 3 files changed, 17 insertions(+), 2 deletions(-) diff --git a/crates/uv/src/commands/python/install.rs b/crates/uv/src/commands/python/install.rs index feb0cf7c7..b9d4660df 100644 --- a/crates/uv/src/commands/python/install.rs +++ b/crates/uv/src/commands/python/install.rs @@ -501,7 +501,7 @@ pub(crate) async fn install( ); } - if preview.is_enabled() && !matches!(registry, Some(false)) { + if !matches!(registry, Some(false)) { #[cfg(windows)] { match uv_python::windows_registry::create_registry_entry(installation) { diff --git a/crates/uv/src/commands/python/uninstall.rs b/crates/uv/src/commands/python/uninstall.rs index 642942d07..dd306fc4d 100644 --- a/crates/uv/src/commands/python/uninstall.rs +++ b/crates/uv/src/commands/python/uninstall.rs @@ -211,7 +211,7 @@ async fn do_uninstall( } #[cfg(windows)] - if preview.is_enabled() { + { uv_python::windows_registry::remove_registry_entry( &matching_installations, all, diff --git a/docs/concepts/python-versions.md b/docs/concepts/python-versions.md index a7472bea8..ee18fa9da 100644 --- a/docs/concepts/python-versions.md +++ b/docs/concepts/python-versions.md @@ -435,3 +435,18 @@ are not yet available for musl Linux on ARM). ### PyPy distributions PyPy distributions are provided by the PyPy project. + +## Registration in the Windows registry + +On Windows, installation of managed Python versions will register them with the Windows registry as +defined by [PEP 514](https://peps.python.org/pep-0514/). + +After installation, the Python versions can be selected with the `py` launcher, e.g.: + +```console +$ uv python install 3.13.1 +$ py -V:Astral/CPython3.13.1 +``` + +On uninstall, uv will remove the registry entry for the target version as well as any broken +registry entries. From 2df06ebfbc2d4df90d47ef9a9e631ba9926712e0 Mon Sep 17 00:00:00 2001 From: John Mumm Date: Wed, 16 Jul 2025 21:25:48 +0200 Subject: [PATCH 071/130] Require `uv venv --clear` before removing an existing directory (#14309) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit By default, `uv venv ` currently removes the ` directory if it exists. This can be surprising behavior: not everyone expects an existing environment to be overwritten. This PR updates the default to fail if a non-empty `` directory already exists and neither `--allow-existing` nor the new `-c/--clear` option is provided (if a TTY is detected, it prompts first). If it's not a TTY, then uv will only warn and not fail for now — we'll make this an error in the future. I've also added a corresponding `UV_VENV_CLEAR` env var. I've chosen to use `--clear` instead of `--force` for this option because it is used by the `venv` module and `virtualenv` and will be familiar to users. I also think its meaning is clearer in this context than `--force` (which could plausibly mean force overwrite just the virtual environment files, which is what our current `--allow-existing` option does). Closes #1472. --------- Co-authored-by: Zanie Blue --- .github/workflows/ci.yml | 10 +- Cargo.lock | 4 + crates/uv-build-frontend/src/lib.rs | 2 +- crates/uv-cli/src/compat.rs | 9 - crates/uv-cli/src/lib.rs | 13 +- crates/uv-console/src/lib.rs | 32 +++- crates/uv-static/src/env_vars.rs | 4 + crates/uv-tool/src/lib.rs | 2 +- crates/uv-virtualenv/Cargo.toml | 4 + crates/uv-virtualenv/src/lib.rs | 6 +- crates/uv-virtualenv/src/virtualenv.rs | 169 ++++++++++++++---- crates/uv/src/commands/project/environment.rs | 16 +- crates/uv/src/commands/project/mod.rs | 8 +- crates/uv/src/commands/project/run.rs | 8 +- crates/uv/src/commands/venv.rs | 5 +- crates/uv/src/lib.rs | 4 +- crates/uv/src/settings.rs | 3 + crates/uv/tests/it/cache_prune.rs | 2 +- crates/uv/tests/it/common/mod.rs | 1 + crates/uv/tests/it/pip_install.rs | 7 +- crates/uv/tests/it/pip_sync.rs | 2 +- crates/uv/tests/it/sync.rs | 1 + crates/uv/tests/it/venv.rs | 94 +++++----- docs/reference/cli.md | 6 +- docs/reference/environment.md | 5 + 25 files changed, 282 insertions(+), 135 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index e9beddcc5..4fb67346e 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -1042,7 +1042,7 @@ jobs: - name: "Create a virtual environment (uv)" run: | - ./uv venv -p 3.13t --managed-python + ./uv venv -c -p 3.13t --managed-python - name: "Check version (uv)" run: | @@ -1087,7 +1087,7 @@ jobs: - name: "Create a virtual environment (uv)" run: | - ./uv venv -p 3.13 --managed-python + ./uv venv -c -p 3.13 --managed-python - name: "Check version (uv)" run: | @@ -1132,7 +1132,7 @@ jobs: - name: "Create a virtual environment (uv)" run: | - ./uv venv -p 3.13 --managed-python + ./uv venv -c -p 3.13 --managed-python - name: "Check version (uv)" run: | @@ -1758,14 +1758,14 @@ jobs: ./uv run --no-project python -c "from built_by_uv import greet; print(greet())" # Test both `build_wheel` and `build_sdist` through uv - ./uv venv -v + ./uv venv -c -v ./uv build -v --force-pep517 scripts/packages/built-by-uv --find-links crates/uv-build/dist --offline ./uv pip install -v scripts/packages/built-by-uv/dist/*.tar.gz --find-links crates/uv-build/dist --offline --no-deps ./uv run --no-project python -c "from built_by_uv import greet; print(greet())" # Test both `build_wheel` and `build_sdist` through the official `build` rm -rf scripts/packages/built-by-uv/dist/ - ./uv venv -v + ./uv venv -c -v ./uv pip install build # Add the uv binary to PATH for `build` to find PATH="$(pwd):$PATH" UV_OFFLINE=1 UV_FIND_LINKS=crates/uv-build/dist ./uv run --no-project python -m build -v --installer uv scripts/packages/built-by-uv diff --git a/Cargo.lock b/Cargo.lock index 2963b6374..0900699cb 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -5998,18 +5998,22 @@ version = "0.7.22" name = "uv-virtualenv" version = "0.0.4" dependencies = [ + "console 0.15.11", "fs-err 3.1.1", "itertools 0.14.0", + "owo-colors", "pathdiff", "self-replace", "thiserror 2.0.12", "tracing", "uv-configuration", + "uv-console", "uv-fs", "uv-pypi-types", "uv-python", "uv-shell", "uv-version", + "uv-warnings", ] [[package]] diff --git a/crates/uv-build-frontend/src/lib.rs b/crates/uv-build-frontend/src/lib.rs index 5cbaece2e..67bee9619 100644 --- a/crates/uv-build-frontend/src/lib.rs +++ b/crates/uv-build-frontend/src/lib.rs @@ -331,7 +331,7 @@ impl SourceBuild { interpreter.clone(), uv_virtualenv::Prompt::None, false, - false, + uv_virtualenv::OnExisting::Remove, false, false, false, diff --git a/crates/uv-cli/src/compat.rs b/crates/uv-cli/src/compat.rs index d29afa760..344d1a4e7 100644 --- a/crates/uv-cli/src/compat.rs +++ b/crates/uv-cli/src/compat.rs @@ -266,9 +266,6 @@ enum Resolver { /// These represent a subset of the `virtualenv` interface that uv supports by default. #[derive(Args)] pub struct VenvCompatArgs { - #[clap(long, hide = true)] - clear: bool, - #[clap(long, hide = true)] no_seed: bool, @@ -289,12 +286,6 @@ impl CompatArgs for VenvCompatArgs { /// behavior. If an argument is passed that does _not_ match uv's behavior, this method will /// return an error. fn validate(&self) -> Result<()> { - if self.clear { - warn_user!( - "virtualenv's `--clear` has no effect (uv always clears the virtual environment)" - ); - } - if self.no_seed { warn_user!( "virtualenv's `--no-seed` has no effect (uv omits seed packages by default)" diff --git a/crates/uv-cli/src/lib.rs b/crates/uv-cli/src/lib.rs index 4c01fd780..5df818654 100644 --- a/crates/uv-cli/src/lib.rs +++ b/crates/uv-cli/src/lib.rs @@ -2615,16 +2615,23 @@ pub struct VenvArgs { #[arg(long, value_parser = clap::builder::BoolishValueParser::new(), env = EnvVars::UV_VENV_SEED)] pub seed: bool, + /// Remove any existing files or directories at the target path. + /// + /// By default, `uv venv` will exit with an error if the given path is non-empty. The + /// `--clear` option will instead clear a non-empty path before creating a new virtual + /// environment. + #[clap(long, short, overrides_with = "allow_existing", value_parser = clap::builder::BoolishValueParser::new(), env = EnvVars::UV_VENV_CLEAR)] + pub clear: bool, + /// Preserve any existing files or directories at the target path. /// - /// By default, `uv venv` will remove an existing virtual environment at the given path, and - /// exit with an error if the path is non-empty but _not_ a virtual environment. The + /// By default, `uv venv` will exit with an error if the given path is non-empty. The /// `--allow-existing` option will instead write to the given path, regardless of its contents, /// and without clearing it beforehand. /// /// WARNING: This option can lead to unexpected behavior if the existing virtual environment and /// the newly-created virtual environment are linked to different Python interpreters. - #[clap(long)] + #[clap(long, overrides_with = "clear")] pub allow_existing: bool, /// The path to the virtual environment to create. diff --git a/crates/uv-console/src/lib.rs b/crates/uv-console/src/lib.rs index 807b77aa4..24c5eea16 100644 --- a/crates/uv-console/src/lib.rs +++ b/crates/uv-console/src/lib.rs @@ -6,6 +6,25 @@ use std::{cmp::Ordering, iter}; /// This is a slimmed-down version of `dialoguer::Confirm`, with the post-confirmation report /// enabled. pub fn confirm(message: &str, term: &Term, default: bool) -> std::io::Result { + confirm_inner(message, None, term, default) +} + +/// Prompt the user for confirmation in the given [`Term`], with a hint. +pub fn confirm_with_hint( + message: &str, + hint: &str, + term: &Term, + default: bool, +) -> std::io::Result { + confirm_inner(message, Some(hint), term, default) +} + +fn confirm_inner( + message: &str, + hint: Option<&str>, + term: &Term, + default: bool, +) -> std::io::Result { let prompt = format!( "{} {} {} {} {}", style("?".to_string()).for_stderr().yellow(), @@ -18,6 +37,13 @@ pub fn confirm(message: &str, term: &Term, default: bool) -> std::io::Result std::io::Result { - if metadata.is_file() { - return Err(Error::Io(io::Error::new( - io::ErrorKind::AlreadyExists, - format!("File exists at `{}`", location.user_display()), - ))); - } else if metadata.is_dir() { - if allow_existing { - debug!("Allowing existing directory"); - } else if uv_fs::is_virtualenv_base(location) { - debug!("Removing existing directory"); - - // On Windows, if the current executable is in the directory, guard against - // self-deletion. - #[cfg(windows)] - if let Ok(itself) = std::env::current_exe() { - let target = std::path::absolute(location)?; - if itself.starts_with(&target) { - debug!("Detected self-delete of executable: {}", itself.display()); - self_replace::self_delete_outside_path(location)?; - } - } - - fs::remove_dir_all(location)?; - fs::create_dir_all(location)?; - } else if location - .read_dir() - .is_ok_and(|mut dir| dir.next().is_none()) + Ok(metadata) if metadata.is_file() => { + return Err(Error::Io(io::Error::new( + io::ErrorKind::AlreadyExists, + format!("File exists at `{}`", location.user_display()), + ))); + } + Ok(metadata) if metadata.is_dir() => { + let name = if uv_fs::is_virtualenv_base(location) { + "virtual environment" + } else { + "directory" + }; + match on_existing { + OnExisting::Allow => { + debug!("Allowing existing {name} due to `--allow-existing`"); + } + OnExisting::Remove => { + debug!("Removing existing {name} due to `--clear`"); + remove_venv_directory(location)?; + } + OnExisting::Fail + if location + .read_dir() + .is_ok_and(|mut dir| dir.next().is_none()) => { debug!("Ignoring empty directory"); - } else { - return Err(Error::Io(io::Error::new( - io::ErrorKind::AlreadyExists, - format!( - "The directory `{}` exists, but it's not a virtual environment", - location.user_display() - ), - ))); + } + OnExisting::Fail => { + match confirm_clear(location, name)? { + Some(true) => { + debug!("Removing existing {name} due to confirmation"); + remove_venv_directory(location)?; + } + Some(false) => { + let hint = format!( + "Use the `{}` flag or set `{}` to replace the existing {name}", + "--clear".green(), + "UV_VENV_CLEAR=1".green() + ); + return Err(Error::Io(io::Error::new( + io::ErrorKind::AlreadyExists, + format!( + "A {name} already exists at: {}\n\n{}{} {hint}", + location.user_display(), + "hint".bold().cyan(), + ":".bold(), + ), + ))); + } + // When we don't have a TTY, warn that the behavior will change in the future + None => { + warn_user_once!( + "A {name} already exists at `{}`. In the future, uv will require `{}` to replace it", + location.user_display(), + "--clear".green(), + ); + } + } } } } + Ok(_) => { + // It's not a file or a directory + return Err(Error::Io(io::Error::new( + io::ErrorKind::AlreadyExists, + format!("Object already exists at `{}`", location.user_display()), + ))); + } Err(err) if err.kind() == io::ErrorKind::NotFound => { fs::create_dir_all(location)?; } @@ -464,6 +494,71 @@ pub(crate) fn create( }) } +/// Prompt a confirmation that the virtual environment should be cleared. +/// +/// If not a TTY, returns `None`. +fn confirm_clear(location: &Path, name: &'static str) -> Result, io::Error> { + let term = Term::stderr(); + if term.is_term() { + let prompt = format!( + "A {name} already exists at `{}`. Do you want to replace it?", + location.user_display(), + ); + let hint = format!( + "Use the `{}` flag or set `{}` to skip this prompt", + "--clear".green(), + "UV_VENV_CLEAR=1".green() + ); + Ok(Some(uv_console::confirm_with_hint( + &prompt, &hint, &term, true, + )?)) + } else { + Ok(None) + } +} + +fn remove_venv_directory(location: &Path) -> Result<(), Error> { + // On Windows, if the current executable is in the directory, guard against + // self-deletion. + #[cfg(windows)] + if let Ok(itself) = std::env::current_exe() { + let target = std::path::absolute(location)?; + if itself.starts_with(&target) { + debug!("Detected self-delete of executable: {}", itself.display()); + self_replace::self_delete_outside_path(location)?; + } + } + + fs::remove_dir_all(location)?; + fs::create_dir_all(location)?; + + Ok(()) +} + +#[derive(Debug, Default, Copy, Clone, Eq, PartialEq)] +pub enum OnExisting { + /// Fail if the directory already exists and is non-empty. + #[default] + Fail, + /// Allow an existing directory, overwriting virtual environment files while retaining other + /// files in the directory. + Allow, + /// Remove an existing directory. + Remove, +} + +impl OnExisting { + pub fn from_args(allow_existing: bool, clear: bool) -> Self { + if allow_existing { + OnExisting::Allow + } else if clear { + OnExisting::Remove + } else { + OnExisting::default() + } + } +} + #[derive(Debug, Copy, Clone)] enum WindowsExecutable { /// The `python.exe` executable (or `venvlauncher.exe` launcher shim). diff --git a/crates/uv/src/commands/project/environment.rs b/crates/uv/src/commands/project/environment.rs index cf1add99a..4f9d936c5 100644 --- a/crates/uv/src/commands/project/environment.rs +++ b/crates/uv/src/commands/project/environment.rs @@ -2,13 +2,6 @@ use std::path::Path; use tracing::debug; -use uv_cache::{Cache, CacheBucket}; -use uv_cache_key::{cache_digest, hash_digest}; -use uv_configuration::{Concurrency, Constraints, PreviewMode}; -use uv_distribution_types::{Name, Resolution}; -use uv_fs::PythonExt; -use uv_python::{Interpreter, PythonEnvironment, canonicalize_executable}; - use crate::commands::pip::loggers::{InstallLogger, ResolveLogger}; use crate::commands::pip::operations::Modifications; use crate::commands::project::{ @@ -17,6 +10,13 @@ use crate::commands::project::{ use crate::printer::Printer; use crate::settings::{NetworkSettings, ResolverInstallerSettings}; +use uv_cache::{Cache, CacheBucket}; +use uv_cache_key::{cache_digest, hash_digest}; +use uv_configuration::{Concurrency, Constraints, PreviewMode}; +use uv_distribution_types::{Name, Resolution}; +use uv_fs::PythonExt; +use uv_python::{Interpreter, PythonEnvironment, canonicalize_executable}; + /// An ephemeral [`PythonEnvironment`] for running an individual command. #[derive(Debug)] pub(crate) struct EphemeralEnvironment(PythonEnvironment); @@ -171,7 +171,7 @@ impl CachedEnvironment { interpreter, uv_virtualenv::Prompt::None, false, - false, + uv_virtualenv::OnExisting::Remove, true, false, false, diff --git a/crates/uv/src/commands/project/mod.rs b/crates/uv/src/commands/project/mod.rs index fde2b638c..23655c1ca 100644 --- a/crates/uv/src/commands/project/mod.rs +++ b/crates/uv/src/commands/project/mod.rs @@ -1336,7 +1336,7 @@ impl ProjectEnvironment { interpreter, prompt, false, - false, + uv_virtualenv::OnExisting::Remove, false, false, upgradeable, @@ -1375,7 +1375,7 @@ impl ProjectEnvironment { interpreter, prompt, false, - false, + uv_virtualenv::OnExisting::Remove, false, false, upgradeable, @@ -1527,7 +1527,7 @@ impl ScriptEnvironment { interpreter, prompt, false, - false, + uv_virtualenv::OnExisting::Remove, false, false, upgradeable, @@ -1563,7 +1563,7 @@ impl ScriptEnvironment { interpreter, prompt, false, - false, + uv_virtualenv::OnExisting::Remove, false, false, upgradeable, diff --git a/crates/uv/src/commands/project/run.rs b/crates/uv/src/commands/project/run.rs index 16ebf88fb..ba8935013 100644 --- a/crates/uv/src/commands/project/run.rs +++ b/crates/uv/src/commands/project/run.rs @@ -465,7 +465,7 @@ hint: If you are running a script with `{}` in the shebang, you may need to incl interpreter, uv_virtualenv::Prompt::None, false, - false, + uv_virtualenv::OnExisting::Remove, false, false, false, @@ -670,7 +670,7 @@ hint: If you are running a script with `{}` in the shebang, you may need to incl interpreter, uv_virtualenv::Prompt::None, false, - false, + uv_virtualenv::OnExisting::Remove, false, false, false, @@ -907,7 +907,7 @@ hint: If you are running a script with `{}` in the shebang, you may need to incl interpreter, uv_virtualenv::Prompt::None, false, - false, + uv_virtualenv::OnExisting::Remove, false, false, false, @@ -1038,7 +1038,7 @@ hint: If you are running a script with `{}` in the shebang, you may need to incl base_interpreter.clone(), uv_virtualenv::Prompt::None, false, - false, + uv_virtualenv::OnExisting::Remove, false, false, false, diff --git a/crates/uv/src/commands/venv.rs b/crates/uv/src/commands/venv.rs index 02bc818f8..92eb1ead7 100644 --- a/crates/uv/src/commands/venv.rs +++ b/crates/uv/src/commands/venv.rs @@ -27,6 +27,7 @@ use uv_resolver::{ExcludeNewer, FlatIndex}; use uv_settings::PythonInstallMirrors; use uv_shell::{Shell, shlex_posix, shlex_windows}; use uv_types::{AnyErrorBuild, BuildContext, BuildIsolation, BuildStack, HashStrategy}; +use uv_virtualenv::OnExisting; use uv_warnings::warn_user; use uv_workspace::{DiscoveryOptions, VirtualProject, WorkspaceCache, WorkspaceError}; @@ -73,7 +74,7 @@ pub(crate) async fn venv( prompt: uv_virtualenv::Prompt, system_site_packages: bool, seed: bool, - allow_existing: bool, + on_existing: OnExisting, exclude_newer: Option, concurrency: Concurrency, no_config: bool, @@ -209,7 +210,7 @@ pub(crate) async fn venv( interpreter, prompt, system_site_packages, - allow_existing, + on_existing, relocatable, seed, upgradeable, diff --git a/crates/uv/src/lib.rs b/crates/uv/src/lib.rs index 433f5afd3..9c9b41065 100644 --- a/crates/uv/src/lib.rs +++ b/crates/uv/src/lib.rs @@ -1032,6 +1032,8 @@ async fn run(mut cli: Cli) -> Result { let python_request: Option = args.settings.python.as_deref().map(PythonRequest::parse); + let on_existing = uv_virtualenv::OnExisting::from_args(args.allow_existing, args.clear); + commands::venv( &project_dir, args.path, @@ -1048,7 +1050,7 @@ async fn run(mut cli: Cli) -> Result { uv_virtualenv::Prompt::from_args(prompt), args.system_site_packages, args.seed, - args.allow_existing, + on_existing, args.settings.exclude_newer, globals.concurrency, cli.top_level.no_config, diff --git a/crates/uv/src/settings.rs b/crates/uv/src/settings.rs index bf3bca4a4..1ebeecba8 100644 --- a/crates/uv/src/settings.rs +++ b/crates/uv/src/settings.rs @@ -2623,6 +2623,7 @@ impl BuildSettings { pub(crate) struct VenvSettings { pub(crate) seed: bool, pub(crate) allow_existing: bool, + pub(crate) clear: bool, pub(crate) path: Option, pub(crate) prompt: Option, pub(crate) system_site_packages: bool, @@ -2641,6 +2642,7 @@ impl VenvSettings { no_system, seed, allow_existing, + clear, path, prompt, system_site_packages, @@ -2658,6 +2660,7 @@ impl VenvSettings { Self { seed, allow_existing, + clear, path, prompt, system_site_packages, diff --git a/crates/uv/tests/it/cache_prune.rs b/crates/uv/tests/it/cache_prune.rs index a6ec48bd4..99493fe21 100644 --- a/crates/uv/tests/it/cache_prune.rs +++ b/crates/uv/tests/it/cache_prune.rs @@ -227,7 +227,7 @@ fn prune_unzipped() -> Result<()> { Removed [N] files ([SIZE]) "###); - context.venv().assert().success(); + context.venv().arg("--clear").assert().success(); // Reinstalling the source distribution should not require re-downloading the source // distribution. diff --git a/crates/uv/tests/it/common/mod.rs b/crates/uv/tests/it/common/mod.rs index bc6e65f4e..9d3c1428f 100644 --- a/crates/uv/tests/it/common/mod.rs +++ b/crates/uv/tests/it/common/mod.rs @@ -1415,6 +1415,7 @@ pub fn create_venv_from_executable>(path: P, cache_dir: &ChildPat assert_cmd::Command::new(get_bin()) .arg("venv") .arg(path.as_ref().as_os_str()) + .arg("--clear") .arg("--cache-dir") .arg(cache_dir.path()) .arg("--python") diff --git a/crates/uv/tests/it/pip_install.rs b/crates/uv/tests/it/pip_install.rs index 123d9066b..9cd394bbd 100644 --- a/crates/uv/tests/it/pip_install.rs +++ b/crates/uv/tests/it/pip_install.rs @@ -2859,7 +2859,7 @@ fn install_no_binary_cache() { ); // Re-create the virtual environment. - context.venv().assert().success(); + context.venv().arg("--clear").assert().success(); // Re-install. The distribution should be installed from the cache. uv_snapshot!( @@ -2877,7 +2877,7 @@ fn install_no_binary_cache() { ); // Re-create the virtual environment. - context.venv().assert().success(); + context.venv().arg("--clear").assert().success(); // Install with `--no-binary`. The distribution should be built from source, despite a binary // distribution being available in the cache. @@ -3088,7 +3088,7 @@ fn cache_priority() { ); // Re-create the virtual environment. - context.venv().assert().success(); + context.venv().arg("--clear").assert().success(); // Install `idna` without a version specifier. uv_snapshot!( @@ -8252,6 +8252,7 @@ fn install_relocatable() -> Result<()> { context .venv() .arg(context.venv.as_os_str()) + .arg("--clear") .arg("--python") .arg("3.12") .arg("--relocatable") diff --git a/crates/uv/tests/it/pip_sync.rs b/crates/uv/tests/it/pip_sync.rs index 537c5dff2..4b249be8c 100644 --- a/crates/uv/tests/it/pip_sync.rs +++ b/crates/uv/tests/it/pip_sync.rs @@ -5625,7 +5625,7 @@ fn sync_seed() -> Result<()> { ); // Re-create the environment with seed packages. - uv_snapshot!(context.filters(), context.venv() + uv_snapshot!(context.filters(), context.venv().arg("--clear") .arg("--seed"), @r" success: true exit_code: 0 diff --git a/crates/uv/tests/it/sync.rs b/crates/uv/tests/it/sync.rs index c225225b8..35a06ea57 100644 --- a/crates/uv/tests/it/sync.rs +++ b/crates/uv/tests/it/sync.rs @@ -9987,6 +9987,7 @@ fn sync_when_virtual_environment_incompatible_with_interpreter() -> Result<()> { context .venv() .arg(context.venv.as_os_str()) + .arg("--clear") .arg("--python") .arg("3.12") .assert() diff --git a/crates/uv/tests/it/venv.rs b/crates/uv/tests/it/venv.rs index 43cacb640..2430e607d 100644 --- a/crates/uv/tests/it/venv.rs +++ b/crates/uv/tests/it/venv.rs @@ -30,10 +30,28 @@ fn create_venv() { context.venv.assert(predicates::path::is_dir()); - // Create a virtual environment at the same location, which should replace it. uv_snapshot!(context.filters(), context.venv() .arg(context.venv.as_os_str()) .arg("--python") + .arg("3.12"), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Using CPython 3.12.[X] interpreter at: [PYTHON-3.12] + Creating virtual environment at: .venv + warning: A virtual environment already exists at `.venv`. In the future, uv will require `--clear` to replace it + Activate with: source .venv/[BIN]/activate + " + ); + + // Create a virtual environment at the same location using `--clear`, + // which should replace it. + uv_snapshot!(context.filters(), context.venv() + .arg(context.venv.as_os_str()) + .arg("--clear") + .arg("--python") .arg("3.12"), @r###" success: true exit_code: 0 @@ -162,7 +180,7 @@ fn create_venv_project_environment() -> Result<()> { .assert(predicates::path::is_dir()); // Or, of they opt-out with `--no-workspace` or `--no-project` - uv_snapshot!(context.filters(), context.venv().arg("--no-workspace"), @r###" + uv_snapshot!(context.filters(), context.venv().arg("--clear").arg("--no-workspace"), @r###" success: true exit_code: 0 ----- stdout ----- @@ -174,7 +192,7 @@ fn create_venv_project_environment() -> Result<()> { "### ); - uv_snapshot!(context.filters(), context.venv().arg("--no-project"), @r###" + uv_snapshot!(context.filters(), context.venv().arg("--clear").arg("--no-project"), @r###" success: true exit_code: 0 ----- stdout ----- @@ -252,7 +270,7 @@ fn create_venv_reads_request_from_python_version_file() { .write_str("3.12") .unwrap(); - uv_snapshot!(context.filters(), context.venv(), @r###" + uv_snapshot!(context.filters(), context.venv().arg("--clear"), @r###" success: true exit_code: 0 ----- stdout ----- @@ -291,7 +309,7 @@ fn create_venv_reads_request_from_python_versions_file() { .write_str("3.12\n3.11") .unwrap(); - uv_snapshot!(context.filters(), context.venv(), @r###" + uv_snapshot!(context.filters(), context.venv().arg("--clear"), @r###" success: true exit_code: 0 ----- stdout ----- @@ -334,7 +352,7 @@ fn create_venv_respects_pyproject_requires_python() -> Result<()> { "# })?; - uv_snapshot!(context.filters(), context.venv(), @r###" + uv_snapshot!(context.filters(), context.venv().arg("--clear"), @r###" success: true exit_code: 0 ----- stdout ----- @@ -357,7 +375,7 @@ fn create_venv_respects_pyproject_requires_python() -> Result<()> { "# })?; - uv_snapshot!(context.filters(), context.venv(), @r###" + uv_snapshot!(context.filters(), context.venv().arg("--clear"), @r###" success: true exit_code: 0 ----- stdout ----- @@ -380,7 +398,7 @@ fn create_venv_respects_pyproject_requires_python() -> Result<()> { "# })?; - uv_snapshot!(context.filters(), context.venv(), @r###" + uv_snapshot!(context.filters(), context.venv().arg("--clear"), @r###" success: true exit_code: 0 ----- stdout ----- @@ -414,7 +432,7 @@ fn create_venv_respects_pyproject_requires_python() -> Result<()> { "# })?; - uv_snapshot!(context.filters(), context.venv(), @r###" + uv_snapshot!(context.filters(), context.venv().arg("--clear"), @r###" success: true exit_code: 0 ----- stdout ----- @@ -437,7 +455,7 @@ fn create_venv_respects_pyproject_requires_python() -> Result<()> { "# })?; - uv_snapshot!(context.filters(), context.venv(), @r###" + uv_snapshot!(context.filters(), context.venv().arg("--clear"), @r###" success: true exit_code: 0 ----- stdout ----- @@ -460,7 +478,7 @@ fn create_venv_respects_pyproject_requires_python() -> Result<()> { "# })?; - uv_snapshot!(context.filters(), context.venv(), @r###" + uv_snapshot!(context.filters(), context.venv().arg("--clear"), @r###" success: true exit_code: 0 ----- stdout ----- @@ -475,7 +493,7 @@ fn create_venv_respects_pyproject_requires_python() -> Result<()> { context.venv.assert(predicates::path::is_dir()); // We warn if we receive an incompatible version - uv_snapshot!(context.filters(), context.venv().arg("--python").arg("3.11"), @r" + uv_snapshot!(context.filters(), context.venv().arg("--clear").arg("--python").arg("3.11"), @r" success: true exit_code: 0 ----- stdout ----- @@ -527,7 +545,7 @@ fn create_venv_respects_group_requires_python() -> Result<()> { "# })?; - uv_snapshot!(context.filters(), context.venv(), @r" + uv_snapshot!(context.filters(), context.venv().arg("--clear"), @r" success: true exit_code: 0 ----- stdout ----- @@ -560,7 +578,7 @@ fn create_venv_respects_group_requires_python() -> Result<()> { "# })?; - uv_snapshot!(context.filters(), context.venv(), @r" + uv_snapshot!(context.filters(), context.venv().arg("--clear"), @r" success: true exit_code: 0 ----- stdout ----- @@ -593,7 +611,7 @@ fn create_venv_respects_group_requires_python() -> Result<()> { "# })?; - uv_snapshot!(context.filters(), context.venv(), @r" + uv_snapshot!(context.filters(), context.venv().arg("--clear"), @r" success: true exit_code: 0 ----- stdout ----- @@ -621,7 +639,7 @@ fn create_venv_respects_group_requires_python() -> Result<()> { "# })?; - uv_snapshot!(context.filters(), context.venv().arg("--python").arg("3.11"), @r" + uv_snapshot!(context.filters(), context.venv().arg("--clear").arg("--python").arg("3.11"), @r" success: true exit_code: 0 ----- stdout ----- @@ -654,7 +672,7 @@ fn create_venv_respects_group_requires_python() -> Result<()> { "# })?; - uv_snapshot!(context.filters(), context.venv().arg("--python").arg("3.11"), @r" + uv_snapshot!(context.filters(), context.venv().arg("--clear").arg("--python").arg("3.11"), @r" success: false exit_code: 2 ----- stdout ----- @@ -945,15 +963,15 @@ fn non_empty_dir_exists() -> Result<()> { .arg(context.venv.as_os_str()) .arg("--python") .arg("3.12"), @r" - success: false - exit_code: 2 + success: true + exit_code: 0 ----- stdout ----- ----- stderr ----- Using CPython 3.12.[X] interpreter at: [PYTHON-3.12] Creating virtual environment at: .venv - error: Failed to create virtual environment - Caused by: The directory `.venv` exists, but it's not a virtual environment + warning: A directory already exists at `.venv`. In the future, uv will require `--clear` to replace it + Activate with: source .venv/[BIN]/activate " ); @@ -973,15 +991,15 @@ fn non_empty_dir_exists_allow_existing() -> Result<()> { .arg(context.venv.as_os_str()) .arg("--python") .arg("3.12"), @r" - success: false - exit_code: 2 + success: true + exit_code: 0 ----- stdout ----- ----- stderr ----- Using CPython 3.12.[X] interpreter at: [PYTHON-3.12] Creating virtual environment at: .venv - error: Failed to create virtual environment - Caused by: The directory `.venv` exists, but it's not a virtual environment + warning: A directory already exists at `.venv`. In the future, uv will require `--clear` to replace it + Activate with: source .venv/[BIN]/activate " ); @@ -1102,31 +1120,6 @@ fn windows_shims() -> Result<()> { Ok(()) } -#[test] -fn virtualenv_compatibility() { - let context = TestContext::new_with_versions(&["3.12"]); - - // Create a virtual environment at `.venv`, passing the redundant `--clear` flag. - uv_snapshot!(context.filters(), context.venv() - .arg(context.venv.as_os_str()) - .arg("--clear") - .arg("--python") - .arg("3.12"), @r###" - success: true - exit_code: 0 - ----- stdout ----- - - ----- stderr ----- - warning: virtualenv's `--clear` has no effect (uv always clears the virtual environment) - Using CPython 3.12.[X] interpreter at: [PYTHON-3.12] - Creating virtual environment at: .venv - Activate with: source .venv/[BIN]/activate - "### - ); - - context.venv.assert(predicates::path::is_dir()); -} - #[test] fn verify_pyvenv_cfg() { let context = TestContext::new("3.12"); @@ -1154,6 +1147,7 @@ fn verify_pyvenv_cfg_relocatable() { context .venv() .arg(context.venv.as_os_str()) + .arg("--clear") .arg("--python") .arg("3.12") .arg("--relocatable") diff --git a/docs/reference/cli.md b/docs/reference/cli.md index 881c96697..9be647449 100644 --- a/docs/reference/cli.md +++ b/docs/reference/cli.md @@ -4683,7 +4683,7 @@ uv venv [OPTIONS] [PATH]

    Options

    --allow-existing

    Preserve any existing files or directories at the target path.

    -

    By default, uv venv will remove an existing virtual environment at the given path, and exit with an error if the path is non-empty but not a virtual environment. The --allow-existing option will instead write to the given path, regardless of its contents, and without clearing it beforehand.

    +

    By default, uv venv will exit with an error if the given path is non-empty. The --allow-existing option will instead write to the given path, regardless of its contents, and without clearing it beforehand.

    WARNING: This option can lead to unexpected behavior if the existing virtual environment and the newly-created virtual environment are linked to different Python interpreters.

    --allow-insecure-host, --trusted-host allow-insecure-host

    Allow insecure connections to a host.

    Can be provided multiple times.

    @@ -4692,7 +4692,9 @@ uv venv [OPTIONS] [PATH]

    May also be set with the UV_INSECURE_HOST environment variable.

    --cache-dir cache-dir

    Path to the cache directory.

    Defaults to $XDG_CACHE_HOME/uv or $HOME/.cache/uv on macOS and Linux, and %LOCALAPPDATA%\uv\cache on Windows.

    To view the location of the cache directory, run uv cache dir.

    -

    May also be set with the UV_CACHE_DIR environment variable.

    --color color-choice

    Control the use of color in output.

    +

    May also be set with the UV_CACHE_DIR environment variable.

    --clear, -c

    Remove any existing files or directories at the target path.

    +

    By default, uv venv will exit with an error if the given path is non-empty. The --clear option will instead clear a non-empty path before creating a new virtual environment.

    +

    May also be set with the UV_VENV_CLEAR environment variable.

    --color color-choice

    Control the use of color in output.

    By default, uv will automatically detect support for colors when writing to a terminal.

    Possible values:

      diff --git a/docs/reference/environment.md b/docs/reference/environment.md index a64869edb..e848d4a41 100644 --- a/docs/reference/environment.md +++ b/docs/reference/environment.md @@ -458,6 +458,11 @@ Equivalent to the `--torch-backend` command-line argument (e.g., `cpu`, `cu126`, Used ephemeral environments like CI to install uv to a specific path while preventing the installer from modifying shell profiles or environment variables. +### `UV_VENV_CLEAR` + +Equivalent to the `--clear` command-line argument. If set, uv will remove any +existing files or directories at the target path. + ### `UV_VENV_SEED` Install seed packages (one or more of: `pip`, `setuptools`, and `wheel`) into the virtual environment From b98ac8c224f651a61ee3c44f6829d70cde80b3a9 Mon Sep 17 00:00:00 2001 From: Zanie Blue Date: Wed, 16 Jul 2025 15:31:47 -0500 Subject: [PATCH 072/130] Validate that discovered interpreters meet the Python preference (#7934) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Closes https://github.com/astral-sh/uv/issues/5144 e.g. ``` ❯ cargo run -q -- sync --python-preference only-system Using CPython 3.12.6 interpreter at: /opt/homebrew/opt/python@3.12/bin/python3.12 Removed virtual environment at: .venv Creating virtual environment at: .venv Resolved 9 packages in 14ms Installed 8 packages in 9ms + anyio==4.6.0 + certifi==2024.8.30 + h11==0.14.0 + httpcore==1.0.5 + httpx==0.27.2 + idna==3.10 + ruff==0.6.7 + sniffio==1.3.1 ❯ cargo run -q -- sync --python-preference only-managed Using CPython 3.12.1 Removed virtual environment at: .venv Creating virtual environment at: .venv Resolved 9 packages in 14ms Installed 8 packages in 11ms + anyio==4.6.0 + certifi==2024.8.30 + h11==0.14.0 + httpcore==1.0.5 + httpx==0.27.2 + idna==3.10 + ruff==0.6.7 + sniffio==1.3.1 ``` --- crates/uv-python/src/discovery.rs | 113 ++++++++++++++++++++- crates/uv-python/src/environment.rs | 3 +- crates/uv-python/src/interpreter.rs | 23 ++++- crates/uv-python/src/lib.rs | 2 +- crates/uv-static/src/env_vars.rs | 8 ++ crates/uv/src/commands/project/mod.rs | 26 ++++- crates/uv/tests/it/common/mod.rs | 21 ++++ crates/uv/tests/it/pip_install.rs | 55 ++++++++++ crates/uv/tests/it/python_find.rs | 51 ++++++++++ crates/uv/tests/it/run.rs | 46 +++++++++ crates/uv/tests/it/sync.rs | 141 ++++++++++++++++++++++++++ crates/uv/tests/it/venv.rs | 66 ++++++++++++ 12 files changed, 544 insertions(+), 11 deletions(-) diff --git a/crates/uv-python/src/discovery.rs b/crates/uv-python/src/discovery.rs index c067082dd..f10b480e2 100644 --- a/crates/uv-python/src/discovery.rs +++ b/crates/uv-python/src/discovery.rs @@ -446,7 +446,16 @@ fn python_executables_from_installed<'a>( .flatten(); match preference { - PythonPreference::OnlyManaged => Box::new(from_managed_installations), + PythonPreference::OnlyManaged => { + // TODO(zanieb): Ideally, we'd create "fake" managed installation directories for tests, + // but for now... we'll just include the test interpreters which are always on the + // search path. + if std::env::var(uv_static::EnvVars::UV_INTERNAL__TEST_PYTHON_MANAGED).is_ok() { + Box::new(from_managed_installations.chain(from_search_path)) + } else { + Box::new(from_managed_installations) + } + } PythonPreference::Managed => Box::new( from_managed_installations .chain(from_search_path) @@ -730,6 +739,9 @@ fn python_interpreters<'a>( false } }) + .filter_ok(move |(source, interpreter)| { + satisfies_python_preference(*source, interpreter, preference) + }) } /// Lazily convert Python executables into interpreters. @@ -857,6 +869,93 @@ fn source_satisfies_environment_preference( } } +/// Returns true if a Python interpreter matches the [`PythonPreference`]. +pub fn satisfies_python_preference( + source: PythonSource, + interpreter: &Interpreter, + preference: PythonPreference, +) -> bool { + // If the source is "explicit", we will not apply the Python preference, e.g., if the user has + // activated a virtual environment, we should always allow it. We may want to invalidate the + // environment in some cases, like in projects, but we can't distinguish between explicit + // requests for a different Python preference or a persistent preference in a configuration file + // which would result in overly aggressive invalidation. + let is_explicit = match source { + PythonSource::ProvidedPath + | PythonSource::ParentInterpreter + | PythonSource::ActiveEnvironment + | PythonSource::CondaPrefix => true, + PythonSource::Managed + | PythonSource::DiscoveredEnvironment + | PythonSource::SearchPath + | PythonSource::SearchPathFirst + | PythonSource::Registry + | PythonSource::MicrosoftStore + | PythonSource::BaseCondaPrefix => false, + }; + + match preference { + PythonPreference::OnlyManaged => { + // Perform a fast check using the source before querying the interpreter + if matches!(source, PythonSource::Managed) || interpreter.is_managed() { + true + } else { + if is_explicit { + debug!( + "Allowing unmanaged Python interpreter at `{}` (in conflict with the `python-preference`) since it is from source: {source}", + interpreter.sys_executable().display() + ); + true + } else { + debug!( + "Ignoring Python interpreter at `{}`: only managed interpreters allowed", + interpreter.sys_executable().display() + ); + false + } + } + } + // If not "only" a kind, any interpreter is okay + PythonPreference::Managed | PythonPreference::System => true, + PythonPreference::OnlySystem => { + let is_system = match source { + // A managed interpreter is never a system interpreter + PythonSource::Managed => false, + // We can't be sure if this is a system interpreter without checking + PythonSource::ProvidedPath + | PythonSource::ParentInterpreter + | PythonSource::ActiveEnvironment + | PythonSource::CondaPrefix + | PythonSource::DiscoveredEnvironment + | PythonSource::SearchPath + | PythonSource::SearchPathFirst + | PythonSource::Registry + | PythonSource::BaseCondaPrefix => !interpreter.is_managed(), + // Managed interpreters should never be found in the store + PythonSource::MicrosoftStore => true, + }; + + if is_system { + true + } else { + if is_explicit { + debug!( + "Allowing managed Python interpreter at `{}` (in conflict with the `python-preference`) since it is from source: {source}", + interpreter.sys_executable().display() + ); + true + } else { + debug!( + "Ignoring Python interpreter at `{}`: only system interpreters allowed", + interpreter.sys_executable().display() + ); + false + } + } + } + } +} + /// Check if an encountered error is critical and should stop discovery. /// /// Returns false when an error could be due to a faulty Python installation and we should continue searching for a working one. @@ -2812,6 +2911,18 @@ impl PythonPreference { } } } + + /// Return the canonical name. + // TODO(zanieb): This should be a `Display` impl and we should have a different view for + // the sources + pub fn canonical_name(&self) -> &'static str { + match self { + Self::OnlyManaged => "only managed", + Self::Managed => "prefer managed", + Self::System => "prefer system", + Self::OnlySystem => "only system", + } + } } impl fmt::Display for PythonPreference { diff --git a/crates/uv-python/src/environment.rs b/crates/uv-python/src/environment.rs index 07f3ddb54..10cec16ad 100644 --- a/crates/uv-python/src/environment.rs +++ b/crates/uv-python/src/environment.rs @@ -158,8 +158,7 @@ impl PythonEnvironment { let installation = match find_python_installation( request, preference, - // Ignore managed installations when looking for environments - PythonPreference::OnlySystem, + PythonPreference::default(), cache, preview, )? { diff --git a/crates/uv-python/src/interpreter.rs b/crates/uv-python/src/interpreter.rs index fc5adb833..dd9dd1cb4 100644 --- a/crates/uv-python/src/interpreter.rs +++ b/crates/uv-python/src/interpreter.rs @@ -271,15 +271,28 @@ impl Interpreter { /// /// Returns `false` if we cannot determine the path of the uv managed Python interpreters. pub fn is_managed(&self) -> bool { + if let Ok(test_managed) = + std::env::var(uv_static::EnvVars::UV_INTERNAL__TEST_PYTHON_MANAGED) + { + // During testing, we collect interpreters into an artificial search path and need to + // be able to mock whether an interpreter is managed or not. + return test_managed.split_ascii_whitespace().any(|item| { + let version = ::from_str(item).expect( + "`UV_INTERNAL__TEST_PYTHON_MANAGED` items should be valid Python versions", + ); + if version.patch().is_some() { + version.version() == self.python_version() + } else { + (version.major(), version.minor()) == self.python_tuple() + } + }); + } + let Ok(installations) = ManagedPythonInstallations::from_settings(None) else { return false; }; - installations - .find_all() - .into_iter() - .flatten() - .any(|install| install.path() == self.sys_base_prefix) + self.sys_base_prefix.starts_with(installations.root()) } /// Returns `Some` if the environment is externally managed, optionally including an error diff --git a/crates/uv-python/src/lib.rs b/crates/uv-python/src/lib.rs index ea6f0db61..2461f9006 100644 --- a/crates/uv-python/src/lib.rs +++ b/crates/uv-python/src/lib.rs @@ -8,7 +8,7 @@ use uv_static::EnvVars; pub use crate::discovery::{ EnvironmentPreference, Error as DiscoveryError, PythonDownloads, PythonNotFound, PythonPreference, PythonRequest, PythonSource, PythonVariant, VersionRequest, - find_python_installations, + find_python_installations, satisfies_python_preference, }; pub use crate::downloads::PlatformRequest; pub use crate::environment::{InvalidEnvironmentKind, PythonEnvironment}; diff --git a/crates/uv-static/src/env_vars.rs b/crates/uv-static/src/env_vars.rs index a99808468..f7fa6cb31 100644 --- a/crates/uv-static/src/env_vars.rs +++ b/crates/uv-static/src/env_vars.rs @@ -376,6 +376,14 @@ impl EnvVars { #[attr_hidden] pub const UV_INTERNAL__SHOW_DERIVATION_TREE: &'static str = "UV_INTERNAL__SHOW_DERIVATION_TREE"; + /// Used to set a temporary directory for some tests. + #[attr_hidden] + pub const UV_INTERNAL__TEST_DIR: &'static str = "UV_INTERNAL__TEST_DIR"; + + /// Used to force treating an interpreter as "managed" during tests. + #[attr_hidden] + pub const UV_INTERNAL__TEST_PYTHON_MANAGED: &'static str = "UV_INTERNAL__TEST_PYTHON_MANAGED"; + /// Path to system-level configuration directory on Unix systems. pub const XDG_CONFIG_DIRS: &'static str = "XDG_CONFIG_DIRS"; diff --git a/crates/uv/src/commands/project/mod.rs b/crates/uv/src/commands/project/mod.rs index 23655c1ca..cce02a70b 100644 --- a/crates/uv/src/commands/project/mod.rs +++ b/crates/uv/src/commands/project/mod.rs @@ -30,8 +30,8 @@ use uv_pep508::MarkerTreeContents; use uv_pypi_types::{ConflictPackage, ConflictSet, Conflicts}; use uv_python::{ EnvironmentPreference, Interpreter, InvalidEnvironmentKind, PythonDownloads, PythonEnvironment, - PythonInstallation, PythonPreference, PythonRequest, PythonVariant, PythonVersionFile, - VersionFileDiscoveryOptions, VersionRequest, + PythonInstallation, PythonPreference, PythonRequest, PythonSource, PythonVariant, + PythonVersionFile, VersionFileDiscoveryOptions, VersionRequest, satisfies_python_preference, }; use uv_requirements::upgrade::{LockedRequirements, read_lock_requirements}; use uv_requirements::{NamedRequirementsResolver, RequirementsSpecification}; @@ -664,6 +664,7 @@ impl ScriptInterpreter { &venv, EnvironmentKind::Script, python_request.as_ref(), + python_preference, requires_python .as_ref() .map(|(requires_python, _)| requires_python), @@ -794,6 +795,9 @@ pub(crate) enum EnvironmentIncompatibilityError { "The interpreter in the {0} environment has a different version ({1}) than it was created with ({2})" )] PyenvVersionConflict(EnvironmentKind, Version, Version), + + #[error("The {0} environment's Python interpreter does not meet the Python preference: `{1}`")] + PythonPreference(EnvironmentKind, PythonPreference), } /// Whether an environment is usable for a project or script, i.e., if it matches the requirements. @@ -801,6 +805,7 @@ fn environment_is_usable( environment: &PythonEnvironment, kind: EnvironmentKind, python_request: Option<&PythonRequest>, + python_preference: PythonPreference, requires_python: Option<&RequiresPython>, cache: &Cache, ) -> Result<(), EnvironmentIncompatibilityError> { @@ -836,6 +841,22 @@ fn environment_is_usable( } } + if satisfies_python_preference( + PythonSource::DiscoveredEnvironment, + environment.interpreter(), + python_preference, + ) { + trace!( + "The virtual environment's Python interpreter meets the Python preference: `{}`", + python_preference + ); + } else { + return Err(EnvironmentIncompatibilityError::PythonPreference( + kind, + python_preference, + )); + } + Ok(()) } @@ -889,6 +910,7 @@ impl ProjectInterpreter { &venv, EnvironmentKind::Project, python_request.as_ref(), + python_preference, requires_python.as_ref(), cache, ) { diff --git a/crates/uv/tests/it/common/mod.rs b/crates/uv/tests/it/common/mod.rs index 9d3c1428f..08eeec3aa 100644 --- a/crates/uv/tests/it/common/mod.rs +++ b/crates/uv/tests/it/common/mod.rs @@ -187,6 +187,18 @@ impl TestContext { "virtual environments, managed installations, search path, or registry".to_string(), "[PYTHON SOURCES]".to_string(), )); + self.filters.push(( + "virtual environments, search path, or registry".to_string(), + "[PYTHON SOURCES]".to_string(), + )); + self.filters.push(( + "virtual environments, registry, or search path".to_string(), + "[PYTHON SOURCES]".to_string(), + )); + self.filters.push(( + "virtual environments or search path".to_string(), + "[PYTHON SOURCES]".to_string(), + )); self.filters.push(( "managed installations or search path".to_string(), "[PYTHON SOURCES]".to_string(), @@ -415,6 +427,15 @@ impl TestContext { self } + pub fn with_versions_as_managed(mut self, versions: &[&str]) -> Self { + self.extra_env.push(( + EnvVars::UV_INTERNAL__TEST_PYTHON_MANAGED.into(), + versions.iter().join(" ").into(), + )); + + self + } + /// Clear filters on `TestContext`. pub fn clear_filters(mut self) -> Self { self.filters.clear(); diff --git a/crates/uv/tests/it/pip_install.rs b/crates/uv/tests/it/pip_install.rs index 9cd394bbd..2a7b0f404 100644 --- a/crates/uv/tests/it/pip_install.rs +++ b/crates/uv/tests/it/pip_install.rs @@ -11684,3 +11684,58 @@ fn strip_shebang_arguments() -> Result<()> { Ok(()) } + +#[test] +fn install_python_preference() { + let context = + TestContext::new_with_versions(&["3.12", "3.11"]).with_versions_as_managed(&["3.12"]); + + // Create a managed interpreter environment + uv_snapshot!(context.filters(), context.venv(), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Using CPython 3.12.[X] interpreter at: [PYTHON-3.12] + Creating virtual environment at: .venv + Activate with: source .venv/[BIN]/activate + "); + + // Install a package, requesting managed Python + uv_snapshot!(context.filters(), context.pip_install().arg("anyio").arg("--managed-python"), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Resolved 3 packages in [TIME] + Prepared 3 packages in [TIME] + Installed 3 packages in [TIME] + + anyio==4.3.0 + + idna==3.6 + + sniffio==1.3.1 + "); + + // Install a package, requesting unmanaged Python + // This is allowed, because the virtual environment already exists + uv_snapshot!(context.filters(), context.pip_install().arg("anyio").arg("--no-managed-python"), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Audited 1 package in [TIME] + "); + + // This also works with `VIRTUAL_ENV` unset + uv_snapshot!(context.filters(), context.pip_install() + .arg("anyio").arg("--no-managed-python").env_remove("VIRTUAL_ENV"), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Audited 1 package in [TIME] + "); +} diff --git a/crates/uv/tests/it/python_find.rs b/crates/uv/tests/it/python_find.rs index 49e60c068..41eceeb92 100644 --- a/crates/uv/tests/it/python_find.rs +++ b/crates/uv/tests/it/python_find.rs @@ -728,6 +728,57 @@ fn python_find_venv_invalid() { "###); } +#[test] +fn python_find_managed() { + let context: TestContext = TestContext::new_with_versions(&["3.11", "3.12"]) + .with_filtered_python_sources() + .with_versions_as_managed(&["3.12"]); + + // We find the managed interpreter + uv_snapshot!(context.filters(), context.python_find().arg("--managed-python"), @r" + success: true + exit_code: 0 + ----- stdout ----- + [PYTHON-3.12] + + ----- stderr ----- + "); + + // Request an interpreter that cannot be satisfied + uv_snapshot!(context.filters(), context.python_find().arg("--managed-python").arg("3.11"), @r" + success: false + exit_code: 2 + ----- stdout ----- + + ----- stderr ----- + error: No interpreter found for Python 3.11 in virtual environments or managed installations + "); + + let context: TestContext = TestContext::new_with_versions(&["3.11", "3.12"]) + .with_filtered_python_sources() + .with_versions_as_managed(&["3.11"]); + + // We find the unmanaged interpreter + uv_snapshot!(context.filters(), context.python_find().arg("--no-managed-python"), @r" + success: true + exit_code: 0 + ----- stdout ----- + [PYTHON-3.12] + + ----- stderr ----- + "); + + // Request an interpreter that cannot be satisfied + uv_snapshot!(context.filters(), context.python_find().arg("--no-managed-python").arg("3.11"), @r" + success: false + exit_code: 2 + ----- stdout ----- + + ----- stderr ----- + error: No interpreter found for Python 3.11 in [PYTHON SOURCES] + "); +} + /// See: /// /// This test will not succeed on macOS if using a Homebrew provided interpreter. The interpreter diff --git a/crates/uv/tests/it/run.rs b/crates/uv/tests/it/run.rs index 6a1eb6093..ad8672788 100644 --- a/crates/uv/tests/it/run.rs +++ b/crates/uv/tests/it/run.rs @@ -5500,3 +5500,49 @@ fn run_no_sync_incompatible_python() -> Result<()> { Ok(()) } + +#[test] +fn run_python_preference_no_project() { + let context = + TestContext::new_with_versions(&["3.12", "3.11"]).with_versions_as_managed(&["3.12"]); + + context.venv().assert().success(); + + uv_snapshot!(context.filters(), context.run().arg("python").arg("--version"), @r" + success: true + exit_code: 0 + ----- stdout ----- + Python 3.12.[X] + + ----- stderr ----- + "); + + uv_snapshot!(context.filters(), context.run().arg("--managed-python").arg("python").arg("--version"), @r" + success: true + exit_code: 0 + ----- stdout ----- + Python 3.12.[X] + + ----- stderr ----- + "); + + // `VIRTUAL_ENV` is set here, so we'll ignore the flag + uv_snapshot!(context.filters(), context.run().arg("--no-managed-python").arg("python").arg("--version"), @r" + success: true + exit_code: 0 + ----- stdout ----- + Python 3.12.[X] + + ----- stderr ----- + "); + + // If we remove the `VIRTUAL_ENV` variable, we should get the unmanaged Python + uv_snapshot!(context.filters(), context.run().arg("--no-managed-python").arg("python").arg("--version").env_remove("VIRTUAL_ENV"), @r" + success: true + exit_code: 0 + ----- stdout ----- + Python 3.11.[X] + + ----- stderr ----- + "); +} diff --git a/crates/uv/tests/it/sync.rs b/crates/uv/tests/it/sync.rs index 35a06ea57..3544f1961 100644 --- a/crates/uv/tests/it/sync.rs +++ b/crates/uv/tests/it/sync.rs @@ -10804,3 +10804,144 @@ fn undeclared_editable() -> Result<()> { Ok(()) } + +#[test] +fn sync_python_preference() -> Result<()> { + let context = TestContext::new_with_versions(&["3.12", "3.11"]); + + let pyproject_toml = context.temp_dir.child("pyproject.toml"); + pyproject_toml.write_str( + r#" + [project] + name = "project" + version = "0.1.0" + requires-python = ">=3.11" + dependencies = [] + "#, + )?; + + // Run an initial sync, with 3.12 as an "unmanaged" interpreter + context.sync().assert().success(); + + // Mark 3.12 as a managed interpreter for the rest of the tests + let context = context.with_versions_as_managed(&["3.12"]); + uv_snapshot!(context.filters(), context.sync(), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Resolved 1 package in [TIME] + Audited in [TIME] + "); + + // We should invalidate the environment and switch to 3.11 + uv_snapshot!(context.filters(), context.sync().arg("--no-managed-python"), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Using CPython 3.11.[X] interpreter at: [PYTHON-3.11] + Removed virtual environment at: .venv + Creating virtual environment at: .venv + Resolved 1 package in [TIME] + Audited in [TIME] + "); + + // We will use the environment if it exists + uv_snapshot!(context.filters(), context.sync(), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Resolved 1 package in [TIME] + Audited in [TIME] + "); + + // Unless the user requests a Python preference that is incompatible + uv_snapshot!(context.filters(), context.sync().arg("--managed-python"), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Using CPython 3.12.[X] interpreter at: [PYTHON-3.12] + Removed virtual environment at: .venv + Creating virtual environment at: .venv + Resolved 1 package in [TIME] + Audited in [TIME] + "); + + // If a interpreter cannot be found, we'll fail + uv_snapshot!(context.filters(), context.sync().arg("--managed-python").arg("-p").arg("3.11"), @r" + success: false + exit_code: 2 + ----- stdout ----- + + ----- stderr ----- + error: No interpreter found for Python 3.11 in managed installations + + hint: A managed Python download is available for Python 3.11, but Python downloads are set to 'never' + "); + + let pyproject_toml = context.temp_dir.child("pyproject.toml"); + pyproject_toml.write_str( + r#" + [project] + name = "project" + version = "0.1.0" + requires-python = ">=3.11" + dependencies = [] + + [tool.uv] + python-preference = "only-system" + "#, + )?; + + // We'll respect a `python-preference` in the `pyproject.toml` file + uv_snapshot!(context.filters(), context.sync(), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Using CPython 3.11.[X] interpreter at: [PYTHON-3.11] + Removed virtual environment at: .venv + Creating virtual environment at: .venv + Resolved 1 package in [TIME] + Audited in [TIME] + "); + + // But it can be overridden via the CLI + uv_snapshot!(context.filters(), context.sync().arg("--managed-python"), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Using CPython 3.12.[X] interpreter at: [PYTHON-3.12] + Removed virtual environment at: .venv + Creating virtual environment at: .venv + Resolved 1 package in [TIME] + Audited in [TIME] + "); + + // `uv run` will invalidate the environment too + uv_snapshot!(context.filters(), context.run().arg("python").arg("--version"), @r" + success: true + exit_code: 0 + ----- stdout ----- + Python 3.11.[X] + + ----- stderr ----- + Using CPython 3.11.[X] interpreter at: [PYTHON-3.11] + Removed virtual environment at: .venv + Creating virtual environment at: .venv + Resolved 1 package in [TIME] + Audited in [TIME] + "); + + Ok(()) +} diff --git a/crates/uv/tests/it/venv.rs b/crates/uv/tests/it/venv.rs index 2430e607d..120d7def2 100644 --- a/crates/uv/tests/it/venv.rs +++ b/crates/uv/tests/it/venv.rs @@ -1322,3 +1322,69 @@ fn create_venv_apostrophe() { let stdout = String::from_utf8_lossy(&output.stdout); assert_eq!(stdout.trim(), venv_dir.to_string_lossy()); } + +#[test] +fn venv_python_preference() { + let context = + TestContext::new_with_versions(&["3.12", "3.11"]).with_versions_as_managed(&["3.12"]); + + // Create a managed interpreter environment + uv_snapshot!(context.filters(), context.venv(), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Using CPython 3.12.[X] interpreter at: [PYTHON-3.12] + Creating virtual environment at: .venv + Activate with: source .venv/[BIN]/activate + "); + + uv_snapshot!(context.filters(), context.venv().arg("--no-managed-python"), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Using CPython 3.11.[X] interpreter at: [PYTHON-3.11] + Creating virtual environment at: .venv + warning: A virtual environment already exists at `.venv`. In the future, uv will require `--clear` to replace it + Activate with: source .venv/[BIN]/activate + "); + + uv_snapshot!(context.filters(), context.venv().arg("--no-managed-python"), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Using CPython 3.11.[X] interpreter at: [PYTHON-3.11] + Creating virtual environment at: .venv + warning: A virtual environment already exists at `.venv`. In the future, uv will require `--clear` to replace it + Activate with: source .venv/[BIN]/activate + "); + + uv_snapshot!(context.filters(), context.venv(), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Using CPython 3.12.[X] interpreter at: [PYTHON-3.12] + Creating virtual environment at: .venv + warning: A virtual environment already exists at `.venv`. In the future, uv will require `--clear` to replace it + Activate with: source .venv/[BIN]/activate + "); + + uv_snapshot!(context.filters(), context.venv().arg("--managed-python"), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Using CPython 3.12.[X] interpreter at: [PYTHON-3.12] + Creating virtual environment at: .venv + warning: A virtual environment already exists at `.venv`. In the future, uv will require `--clear` to replace it + Activate with: source .venv/[BIN]/activate + "); +} From ff30f14d50cfdda544397f910808eeec8b20f11b Mon Sep 17 00:00:00 2001 From: John Mumm Date: Wed, 16 Jul 2025 23:17:01 +0200 Subject: [PATCH 073/130] Build `path` sources without build systems by default (#14413) We currently treat path sources as virtual if they do not specify a build system, which is surprising behavior. This PR updates the behavior to treat path sources as packages unless the path source is explicitly marked as `package = false` or its own `tool.uv.package` is set to `false`. Closes #12015 --------- Co-authored-by: Zanie Blue --- .../uv-distribution/src/metadata/lowering.rs | 4 +- crates/uv-workspace/src/pyproject.rs | 15 ++-- crates/uv/tests/it/edit.rs | 8 +- crates/uv/tests/it/lock.rs | 33 +++---- crates/uv/tests/it/sync.rs | 85 +++++++++++++++++++ docs/concepts/projects/config.md | 5 +- docs/concepts/projects/dependencies.md | 70 ++++++++++----- 7 files changed, 172 insertions(+), 48 deletions(-) diff --git a/crates/uv-distribution/src/metadata/lowering.rs b/crates/uv-distribution/src/metadata/lowering.rs index 54782c083..c05ac4779 100644 --- a/crates/uv-distribution/src/metadata/lowering.rs +++ b/crates/uv-distribution/src/metadata/lowering.rs @@ -729,12 +729,14 @@ fn path_source( }) } else { // Determine whether the project is a package or virtual. + // If the `package` option is unset, check if `tool.uv.package` is set + // on the path source (otherwise, default to `true`). let is_package = package.unwrap_or_else(|| { let pyproject_path = install_path.join("pyproject.toml"); fs_err::read_to_string(&pyproject_path) .ok() .and_then(|contents| PyProjectToml::from_string(contents).ok()) - .map(|pyproject_toml| pyproject_toml.is_package()) + .and_then(|pyproject_toml| pyproject_toml.tool_uv_package()) .unwrap_or(true) }); diff --git a/crates/uv-workspace/src/pyproject.rs b/crates/uv-workspace/src/pyproject.rs index 124a62881..aa64c601e 100644 --- a/crates/uv-workspace/src/pyproject.rs +++ b/crates/uv-workspace/src/pyproject.rs @@ -83,12 +83,7 @@ impl PyProjectToml { /// non-package ("virtual") project. pub fn is_package(&self) -> bool { // If `tool.uv.package` is set, defer to that explicit setting. - if let Some(is_package) = self - .tool - .as_ref() - .and_then(|tool| tool.uv.as_ref()) - .and_then(|uv| uv.package) - { + if let Some(is_package) = self.tool_uv_package() { return is_package; } @@ -96,6 +91,14 @@ impl PyProjectToml { self.build_system.is_some() } + /// Returns the value of `tool.uv.package` if set. + pub fn tool_uv_package(&self) -> Option { + self.tool + .as_ref() + .and_then(|tool| tool.uv.as_ref()) + .and_then(|uv| uv.package) + } + /// Returns `true` if the project uses a dynamic version. pub fn is_dynamic(&self) -> bool { self.project diff --git a/crates/uv/tests/it/edit.rs b/crates/uv/tests/it/edit.rs index ccc0cabf2..70b8d6e50 100644 --- a/crates/uv/tests/it/edit.rs +++ b/crates/uv/tests/it/edit.rs @@ -13381,7 +13381,9 @@ fn add_path_with_no_workspace() -> Result<()> { ----- stderr ----- Resolved 2 packages in [TIME] - Audited in [TIME] + Prepared 1 package in [TIME] + Installed 1 package in [TIME] + + dep==0.1.0 (from file://[TEMP_DIR]/dep) "); let pyproject_toml = context.read("pyproject.toml"); @@ -13452,7 +13454,9 @@ fn add_path_outside_workspace_no_default() -> Result<()> { Using CPython 3.12.[X] interpreter at: [PYTHON-3.12] Creating virtual environment at: .venv Resolved 2 packages in [TIME] - Audited in [TIME] + Prepared 1 package in [TIME] + Installed 1 package in [TIME] + + dep==0.1.0 (from file://[TEMP_DIR]/external_dep) "); let pyproject_toml = fs_err::read_to_string(workspace_toml)?; diff --git a/crates/uv/tests/it/lock.rs b/crates/uv/tests/it/lock.rs index faf37a83a..75d81b4c0 100644 --- a/crates/uv/tests/it/lock.rs +++ b/crates/uv/tests/it/lock.rs @@ -7205,12 +7205,12 @@ fn lock_exclusion() -> Result<()> { ] [package.metadata] - requires-dist = [{ name = "project", virtual = "../" }] + requires-dist = [{ name = "project", directory = "../" }] [[package]] name = "project" version = "0.1.0" - source = { virtual = "../" } + source = { directory = "../" } "# ); }); @@ -7793,7 +7793,7 @@ fn lock_dev_transitive() -> Result<()> { [package.metadata] requires-dist = [ { name = "baz", editable = "baz" }, - { name = "foo", virtual = "../foo" }, + { name = "foo", directory = "../foo" }, { name = "iniconfig", specifier = ">1" }, ] @@ -7815,7 +7815,7 @@ fn lock_dev_transitive() -> Result<()> { [[package]] name = "foo" version = "0.1.0" - source = { virtual = "../foo" } + source = { directory = "../foo" } [package.metadata] @@ -13651,7 +13651,7 @@ fn lock_narrowed_python_version_upper() -> Result<()> { [[package]] name = "dependency" version = "0.1.0" - source = { virtual = "dependency" } + source = { directory = "dependency" } dependencies = [ { name = "iniconfig", marker = "python_full_version >= '3.10'" }, ] @@ -13677,7 +13677,7 @@ fn lock_narrowed_python_version_upper() -> Result<()> { ] [package.metadata] - requires-dist = [{ name = "dependency", marker = "python_full_version >= '3.10'", virtual = "dependency" }] + requires-dist = [{ name = "dependency", marker = "python_full_version >= '3.10'", directory = "dependency" }] "# ); }); @@ -17173,10 +17173,10 @@ fn lock_implicit_virtual_project() -> Result<()> { Ok(()) } -/// Lock a project that has a path dependency that is implicitly virtual (by way of omitting -/// `build-system`). +/// Lock a project that has a path dependency that is implicitly non-virtual (despite +/// omitting `build-system`). #[test] -fn lock_implicit_virtual_path() -> Result<()> { +fn lock_implicit_package_path() -> Result<()> { let context = TestContext::new("3.12"); let pyproject_toml = context.temp_dir.child("pyproject.toml"); @@ -17243,7 +17243,7 @@ fn lock_implicit_virtual_path() -> Result<()> { [[package]] name = "child" version = "0.1.0" - source = { virtual = "child" } + source = { directory = "child" } dependencies = [ { name = "iniconfig" }, ] @@ -17281,7 +17281,7 @@ fn lock_implicit_virtual_path() -> Result<()> { [package.metadata] requires-dist = [ { name = "anyio", specifier = ">3" }, - { name = "child", virtual = "child" }, + { name = "child", directory = "child" }, ] [[package]] @@ -17317,20 +17317,21 @@ fn lock_implicit_virtual_path() -> Result<()> { Resolved 6 packages in [TIME] "###); - // Install from the lockfile. The virtual project should _not_ be installed. - uv_snapshot!(context.filters(), context.sync().arg("--frozen"), @r###" + // Install from the lockfile. The path dependency should be installed. + uv_snapshot!(context.filters(), context.sync().arg("--frozen"), @r" success: true exit_code: 0 ----- stdout ----- ----- stderr ----- - Prepared 4 packages in [TIME] - Installed 4 packages in [TIME] + Prepared 5 packages in [TIME] + Installed 5 packages in [TIME] + anyio==4.3.0 + + child==0.1.0 (from file://[TEMP_DIR]/child) + idna==3.6 + iniconfig==2.0.0 + sniffio==1.3.1 - "###); + "); Ok(()) } diff --git a/crates/uv/tests/it/sync.rs b/crates/uv/tests/it/sync.rs index 3544f1961..bb3546e22 100644 --- a/crates/uv/tests/it/sync.rs +++ b/crates/uv/tests/it/sync.rs @@ -5939,6 +5939,91 @@ fn sync_override_package() -> Result<()> { ~ project==0.0.0 (from file://[TEMP_DIR]/) "); + // Update the source `tool.uv` to `package = true` + let pyproject_toml = context.temp_dir.child("core").child("pyproject.toml"); + pyproject_toml.write_str( + r#" + [project] + name = "core" + version = "0.1.0" + requires-python = ">=3.12" + + [build-system] + requires = ["hatchling"] + build-backend = "hatchling.build" + + [tool.uv] + package = true + "#, + )?; + + // Mark the source as `package = false`. + let pyproject_toml = context.temp_dir.child("pyproject.toml"); + pyproject_toml.write_str( + r#" + [project] + name = "project" + version = "0.0.0" + requires-python = ">=3.12" + dependencies = ["core"] + + [build-system] + requires = ["hatchling"] + build-backend = "hatchling.build" + + [tool.uv.sources] + core = { path = "./core", package = false } + "#, + )?; + + // Syncing the project should _not_ install `core`. + uv_snapshot!(context.filters(), context.sync(), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Resolved 2 packages in [TIME] + Prepared 1 package in [TIME] + Uninstalled 1 package in [TIME] + Installed 1 package in [TIME] + ~ project==0.0.0 (from file://[TEMP_DIR]/) + "); + + // Remove the `package = false` mark. + let pyproject_toml = context.temp_dir.child("pyproject.toml"); + pyproject_toml.write_str( + r#" + [project] + name = "project" + version = "0.0.0" + requires-python = ">=3.12" + dependencies = ["core"] + + [build-system] + requires = ["hatchling"] + build-backend = "hatchling.build" + + [tool.uv.sources] + core = { path = "./core" } + "#, + )?; + + // Syncing the project _should_ install `core`. + uv_snapshot!(context.filters(), context.sync(), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Resolved 2 packages in [TIME] + Prepared 2 packages in [TIME] + Uninstalled 1 package in [TIME] + Installed 2 packages in [TIME] + + core==0.1.0 (from file://[TEMP_DIR]/core) + ~ project==0.0.0 (from file://[TEMP_DIR]/) + "); + Ok(()) } diff --git a/docs/concepts/projects/config.md b/docs/concepts/projects/config.md index 8efb667a1..34b62c01a 100644 --- a/docs/concepts/projects/config.md +++ b/docs/concepts/projects/config.md @@ -116,8 +116,9 @@ with the default build system. the presence of a `[build-system]` table is not required in other packages. For legacy reasons, if a build system is not defined, then `setuptools.build_meta:__legacy__` is used to build the package. Packages you depend on may not explicitly declare their build system but are still - installable. Similarly, if you add a dependency on a local package or install it with `uv pip`, - uv will always attempt to build and install it. + installable. Similarly, if you [add a dependency on a local project](./dependencies.md#path) + or install it with `uv pip`, uv will attempt to build and install it regardless of the presence + of a `[build-system]` table. ### Build system options diff --git a/docs/concepts/projects/dependencies.md b/docs/concepts/projects/dependencies.md index 022db4d7e..bf11e7174 100644 --- a/docs/concepts/projects/dependencies.md +++ b/docs/concepts/projects/dependencies.md @@ -410,33 +410,28 @@ $ uv add ~/projects/bar/ !!! important - An [editable installation](#editable-dependencies) is not used for path dependencies by - default. An editable installation may be requested for project directories: + When using a directory as a path dependency, uv will attempt to build and install the target as + a package by default. See the [virtual dependency](#virtual-dependencies) documentation for + details. - ```console - $ uv add --editable ../projects/bar/ - ``` +An [editable installation](#editable-dependencies) is not used for path dependencies by default. An +editable installation may be requested for project directories: - Which will result in a `pyproject.toml` with: +```console +$ uv add --editable ../projects/bar/ +``` - ```toml title="pyproject.toml" - [project] - dependencies = ["bar"] +Which will result in a `pyproject.toml` with: - [tool.uv.sources] - bar = { path = "../projects/bar", editable = true } - ``` +```toml title="pyproject.toml" +[project] +dependencies = ["bar"] - Similarly, if a project is marked as a [non-package](./config.md#build-systems), but you'd - like to install it in the environment as a package, set `package = true` on the source: +[tool.uv.sources] +bar = { path = "../projects/bar", editable = true } +``` - ```toml title="pyproject.toml" - [project] - dependencies = ["bar"] - - [tool.uv.sources] - bar = { path = "../projects/bar", package = true } - ``` +!!! tip For multiple packages in the same repository, [_workspaces_](./workspaces.md) may be a better fit. @@ -808,6 +803,39 @@ Or, to opt-out of using an editable dependency in a workspace: $ uv add --no-editable ./path/foo ``` +## Virtual dependencies + +uv allows dependencies to be "virtual", in which the dependency itself is not installed as a +[package](./config.md#project-packaging), but its dependencies are. + +By default, only workspace members without build systems declared are virtual. + +A dependency with a [`path` source](#path) is not virtual unless it explicitly sets +[`tool.uv.package = false`](../../reference/settings.md#package). Unlike working _in_ the dependent +project with uv, the package will be built even if a [build system](./config.md#build-systems) is +not declared. + +To treat a dependency as virtual, set `package = false` on the source: + +```toml title="pyproject.toml" +[project] +dependencies = ["bar"] + +[tool.uv.sources] +bar = { path = "../projects/bar", package = false } +``` + +Similarly, if a dependency sets `tool.uv.package = false`, it can be overridden by declaring +`package = true` on the source: + +```toml title="pyproject.toml" +[project] +dependencies = ["bar"] + +[tool.uv.sources] +bar = { path = "../projects/bar", package = true } +``` + ## Dependency specifiers uv uses standard From 0077f2357f4e016c871b2b651ca59a139a95f19a Mon Sep 17 00:00:00 2001 From: Zanie Blue Date: Thu, 17 Jul 2025 11:09:13 -0500 Subject: [PATCH 074/130] Stabilize addition of Python executables to the bin (#14626) Closes https://github.com/astral-sh/uv/issues/14296 As mentioned in #14681, this does not stabilize the `--default` behavior. --- crates/uv-cli/src/lib.rs | 7 +- crates/uv/src/commands/python/install.rs | 62 ++- crates/uv/tests/it/common/mod.rs | 25 +- crates/uv/tests/it/help.rs | 7 +- crates/uv/tests/it/python_install.rs | 555 +++++++++++++++++++++-- docs/concepts/python-versions.md | 25 +- docs/guides/install-python.md | 23 +- docs/reference/cli.md | 2 +- 8 files changed, 593 insertions(+), 113 deletions(-) diff --git a/crates/uv-cli/src/lib.rs b/crates/uv-cli/src/lib.rs index 5df818654..9d7cfa6e0 100644 --- a/crates/uv-cli/src/lib.rs +++ b/crates/uv-cli/src/lib.rs @@ -4810,10 +4810,9 @@ pub enum PythonCommand { /// Python versions are installed into the uv Python directory, which can be retrieved with `uv /// python dir`. /// - /// A `python` executable is not made globally available, managed Python versions are only used - /// in uv commands or in active virtual environments. There is experimental support for adding - /// Python executables to a directory on the path — use the `--preview` flag to enable this - /// behavior and `uv python dir --bin` to retrieve the target directory. + /// By default, Python executables are added to a directory on the path with a minor version + /// suffix, e.g., `python3.13`. To install `python3` and `python`, use the `--default` flag. Use + /// `uv python dir --bin` to see the target directory. /// /// Multiple Python versions may be requested. /// diff --git a/crates/uv/src/commands/python/install.rs b/crates/uv/src/commands/python/install.rs index b9d4660df..37d6a6777 100644 --- a/crates/uv/src/commands/python/install.rs +++ b/crates/uv/src/commands/python/install.rs @@ -166,12 +166,14 @@ pub(crate) async fn install( ) -> Result { let start = std::time::Instant::now(); + // TODO(zanieb): We should consider marking the Python installation as the default when + // `--default` is used. It's not clear how this overlaps with a global Python pin, but I'd be + // surprised if `uv python find` returned the "newest" Python version rather than the one I just + // installed with the `--default` flag. if default && !preview.is_enabled() { - writeln!( - printer.stderr(), - "The `--default` flag is only available in preview mode; add the `--preview` flag to use `--default`" - )?; - return Ok(ExitStatus::Failure); + warn_user!( + "The `--default` option is experimental and may change without warning. Pass `--preview` to disable this warning" + ); } if upgrade && preview.is_disabled() { @@ -222,6 +224,8 @@ pub(crate) async fn install( .map(PythonVersionFile::into_versions) .unwrap_or_else(|| { // If no version file is found and no requests were made + // TODO(zanieb): We should consider differentiating between a global Python version + // file here, allowing a request from there to enable `is_default_install`. is_default_install = true; vec![if reinstall { // On bare `--reinstall`, reinstall all Python versions @@ -451,10 +455,10 @@ pub(crate) async fn install( } } - let bin_dir = if matches!(bin, Some(true)) || preview.is_enabled() { - Some(python_executable_dir()?) - } else { + let bin_dir = if matches!(bin, Some(false)) { None + } else { + Some(python_executable_dir()?) }; let installations: Vec<_> = downloaded.iter().chain(satisfied.iter().copied()).collect(); @@ -469,20 +473,10 @@ pub(crate) async fn install( e.warn_user(installation); } - if preview.is_disabled() { - debug!("Skipping installation of Python executables, use `--preview` to enable."); - continue; - } - - let bin_dir = bin_dir - .as_ref() - .expect("We should have a bin directory with preview enabled") - .as_path(); - let upgradeable = (default || is_default_install) || requested_minor_versions.contains(&installation.key().version().python_version()); - if !matches!(bin, Some(false)) { + if let Some(bin_dir) = bin_dir.as_ref() { create_bin_links( installation, bin_dir, @@ -661,11 +655,7 @@ pub(crate) async fn install( } } - if preview.is_enabled() && !matches!(bin, Some(false)) { - let bin_dir = bin_dir - .as_ref() - .expect("We should have a bin directory with preview enabled") - .as_path(); + if let Some(bin_dir) = bin_dir.as_ref() { warn_if_not_on_path(bin_dir); } } @@ -749,16 +739,20 @@ fn create_bin_links( errors: &mut Vec<(InstallErrorKind, PythonInstallationKey, Error)>, preview: PreviewMode, ) { - let targets = - if (default || is_default_install) && first_request.matches_installation(installation) { - vec![ - installation.key().executable_name_minor(), - installation.key().executable_name_major(), - installation.key().executable_name(), - ] - } else { - vec![installation.key().executable_name_minor()] - }; + // TODO(zanieb): We want more feedback on the `is_default_install` behavior before stabilizing + // it. In particular, it may be confusing because it does not apply when versions are loaded + // from a `.python-version` file. + let targets = if (default || (is_default_install && preview.is_enabled())) + && first_request.matches_installation(installation) + { + vec![ + installation.key().executable_name_minor(), + installation.key().executable_name_major(), + installation.key().executable_name(), + ] + } else { + vec![installation.key().executable_name_minor()] + }; for target in targets { let target = bin.join(target); diff --git a/crates/uv/tests/it/common/mod.rs b/crates/uv/tests/it/common/mod.rs index 08eeec3aa..ab4c38247 100644 --- a/crates/uv/tests/it/common/mod.rs +++ b/crates/uv/tests/it/common/mod.rs @@ -220,17 +220,30 @@ impl TestContext { /// and `.exe` suffixes. #[must_use] pub fn with_filtered_python_names(mut self) -> Self { + use env::consts::EXE_SUFFIX; + let exe_suffix = regex::escape(EXE_SUFFIX); + + self.filters.push(( + format!(r"python\d.\d\d{exe_suffix}"), + "[PYTHON]".to_string(), + )); + self.filters + .push((format!(r"python\d{exe_suffix}"), "[PYTHON]".to_string())); + if cfg!(windows) { + // On Windows, we want to filter out all `python.exe` instances self.filters - .push((r"python\.exe".to_string(), "[PYTHON]".to_string())); + .push((format!(r"python{exe_suffix}"), "[PYTHON]".to_string())); + // Including ones where we'd already stripped the `.exe` in another filter + self.filters + .push((r"[\\/]python".to_string(), "/[PYTHON]".to_string())); } else { + // On Unix, it's a little trickier — we don't want to clobber use of `python` in the + // middle of something else, e.g., `cpython`. For this reason, we require a leading `/`. self.filters - .push((r"python\d.\d\d".to_string(), "[PYTHON]".to_string())); - self.filters - .push((r"python\d".to_string(), "[PYTHON]".to_string())); - self.filters - .push((r"/python".to_string(), "/[PYTHON]".to_string())); + .push((format!(r"/python{exe_suffix}"), "/[PYTHON]".to_string())); } + self } diff --git a/crates/uv/tests/it/help.rs b/crates/uv/tests/it/help.rs index d9353f7c3..d4f46b0cb 100644 --- a/crates/uv/tests/it/help.rs +++ b/crates/uv/tests/it/help.rs @@ -469,10 +469,9 @@ fn help_subsubcommand() { Python versions are installed into the uv Python directory, which can be retrieved with `uv python dir`. - A `python` executable is not made globally available, managed Python versions are only used in uv - commands or in active virtual environments. There is experimental support for adding Python - executables to a directory on the path — use the `--preview` flag to enable this behavior and `uv - python dir --bin` to retrieve the target directory. + By default, Python executables are added to a directory on the path with a minor version suffix, + e.g., `python3.13`. To install `python3` and `python`, use the `--default` flag. Use `uv python dir + --bin` to see the target directory. Multiple Python versions may be requested. diff --git a/crates/uv/tests/it/python_install.rs b/crates/uv/tests/it/python_install.rs index 50b0b3cf5..51e394aad 100644 --- a/crates/uv/tests/it/python_install.rs +++ b/crates/uv/tests/it/python_install.rs @@ -30,15 +30,49 @@ fn python_install() { ----- stderr ----- Installed Python 3.13.5 in [TIME] - + cpython-3.13.5-[PLATFORM] + + cpython-3.13.5-[PLATFORM] (python3.13) "); let bin_python = context .bin_dir .child(format!("python3.13{}", std::env::consts::EXE_SUFFIX)); - // The executable should not be installed in the bin directory (requires preview) - bin_python.assert(predicate::path::missing()); + // The executable should be installed in the bin directory + bin_python.assert(predicate::path::exists()); + + // On Unix, it should be a link + #[cfg(unix)] + bin_python.assert(predicate::path::is_symlink()); + + // The link should be a path to the binary + if cfg!(unix) { + insta::with_settings!({ + filters => context.filters(), + }, { + insta::assert_snapshot!( + read_link(&bin_python), @"[TEMP_DIR]/managed/cpython-3.13.5-[PLATFORM]/bin/python3.13" + ); + }); + } else if cfg!(windows) { + insta::with_settings!({ + filters => context.filters(), + }, { + insta::assert_snapshot!( + read_link(&bin_python), @"[TEMP_DIR]/managed/cpython-3.13.5-[PLATFORM]/python" + ); + }); + } + + // The executable should "work" + uv_snapshot!(context.filters(), Command::new(bin_python.as_os_str()) + .arg("-c").arg("import subprocess; print('hello world')"), @r###" + success: true + exit_code: 0 + ----- stdout ----- + hello world + + ----- stderr ----- + "###); // Should be a no-op when already installed uv_snapshot!(context.filters(), context.python_install(), @r###" @@ -67,9 +101,12 @@ fn python_install() { ----- stderr ----- Installed Python 3.13.5 in [TIME] - ~ cpython-3.13.5-[PLATFORM] + ~ cpython-3.13.5-[PLATFORM] (python3.13) "); + // The executable should still be present in the bin directory + bin_python.assert(predicate::path::exists()); + // Uninstallation requires an argument uv_snapshot!(context.filters(), context.python_uninstall(), @r###" success: false @@ -93,8 +130,11 @@ fn python_install() { ----- stderr ----- Searching for Python versions matching: Python 3.13 Uninstalled Python 3.13.5 in [TIME] - - cpython-3.13.5-[PLATFORM] + - cpython-3.13.5-[PLATFORM] (python3.13) "); + + // The executable should be removed + bin_python.assert(predicate::path::missing()); } #[test] @@ -112,8 +152,8 @@ fn python_reinstall() { ----- stderr ----- Installed 2 versions in [TIME] - + cpython-3.12.11-[PLATFORM] - + cpython-3.13.5-[PLATFORM] + + cpython-3.12.11-[PLATFORM] (python3.12) + + cpython-3.13.5-[PLATFORM] (python3.13) "); // Reinstall a single version @@ -124,7 +164,7 @@ fn python_reinstall() { ----- stderr ----- Installed Python 3.13.5 in [TIME] - ~ cpython-3.13.5-[PLATFORM] + ~ cpython-3.13.5-[PLATFORM] (python3.13) "); // Reinstall multiple versions @@ -135,8 +175,8 @@ fn python_reinstall() { ----- stderr ----- Installed 2 versions in [TIME] - ~ cpython-3.12.11-[PLATFORM] - ~ cpython-3.13.5-[PLATFORM] + ~ cpython-3.12.11-[PLATFORM] (python3.12) + ~ cpython-3.13.5-[PLATFORM] (python3.13) "); // Reinstalling a version that is not installed should also work @@ -147,7 +187,7 @@ fn python_reinstall() { ----- stderr ----- Installed Python 3.11.13 in [TIME] - + cpython-3.11.13-[PLATFORM] + + cpython-3.11.13-[PLATFORM] (python3.11) "); } @@ -167,7 +207,7 @@ fn python_reinstall_patch() { ----- stderr ----- Installed 2 versions in [TIME] + cpython-3.12.6-[PLATFORM] - + cpython-3.12.7-[PLATFORM] + + cpython-3.12.7-[PLATFORM] (python3.12) "); // Reinstall all "3.12" versions @@ -180,7 +220,7 @@ fn python_reinstall_patch() { ----- stderr ----- Installed Python 3.12.11 in [TIME] - + cpython-3.12.11-[PLATFORM] + + cpython-3.12.11-[PLATFORM] (python3.12) "); } @@ -328,6 +368,208 @@ fn regression_cpython() { "###); } +#[test] +fn python_install_force() { + let context: TestContext = TestContext::new_with_versions(&[]) + .with_filtered_python_keys() + .with_filtered_exe_suffix() + .with_managed_python_dirs(); + + // Install the latest version + uv_snapshot!(context.filters(), context.python_install(), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Installed Python 3.13.5 in [TIME] + + cpython-3.13.5-[PLATFORM] (python3.13) + "); + + let bin_python = context + .bin_dir + .child(format!("python3.13{}", std::env::consts::EXE_SUFFIX)); + + // You can force replacement of the executables + uv_snapshot!(context.filters(), context.python_install().arg("--force"), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Installed Python 3.13.5 in [TIME] + + cpython-3.13.5-[PLATFORM] (python3.13) + "); + + // The executable should still be present in the bin directory + bin_python.assert(predicate::path::exists()); + + // If an unmanaged executable is present, `--force` is required + fs_err::remove_file(bin_python.path()).unwrap(); + bin_python.touch().unwrap(); + + uv_snapshot!(context.filters(), context.python_install().arg("3.13"), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + warning: Failed to install executable for cpython-3.13.5-[PLATFORM] + Caused by: Executable already exists at `[BIN]/python3.13` but is not managed by uv; use `--force` to replace it + "); + + uv_snapshot!(context.filters(), context.python_install().arg("--force").arg("3.13"), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Installed Python 3.13.5 in [TIME] + + cpython-3.13.5-[PLATFORM] (python3.13) + "); + + bin_python.assert(predicate::path::exists()); +} + +#[test] +fn python_install_minor() { + let context: TestContext = TestContext::new_with_versions(&[]) + .with_filtered_python_keys() + .with_filtered_exe_suffix() + .with_managed_python_dirs(); + + // Install a minor version + uv_snapshot!(context.filters(), context.python_install().arg("3.11"), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Installed Python 3.11.13 in [TIME] + + cpython-3.11.13-[PLATFORM] (python3.11) + "); + + let bin_python = context + .bin_dir + .child(format!("python3.11{}", std::env::consts::EXE_SUFFIX)); + + // The executable should be installed in the bin directory + bin_python.assert(predicate::path::exists()); + + // It should be a link to the minor version + if cfg!(unix) { + insta::with_settings!({ + filters => context.filters(), + }, { + insta::assert_snapshot!( + read_link(&bin_python), @"[TEMP_DIR]/managed/cpython-3.11.13-[PLATFORM]/bin/python3.11" + ); + }); + } else if cfg!(windows) { + insta::with_settings!({ + filters => context.filters(), + }, { + insta::assert_snapshot!( + read_link(&bin_python), @"[TEMP_DIR]/managed/cpython-3.11.13-[PLATFORM]/python" + ); + }); + } + + uv_snapshot!(context.filters(), context.python_uninstall().arg("3.11"), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Searching for Python versions matching: Python 3.11 + Uninstalled Python 3.11.13 in [TIME] + - cpython-3.11.13-[PLATFORM] (python3.11) + "); + + // The executable should be removed + bin_python.assert(predicate::path::missing()); +} + +#[test] +fn python_install_multiple_patch() { + let context: TestContext = TestContext::new_with_versions(&[]) + .with_filtered_python_keys() + .with_filtered_exe_suffix() + .with_managed_python_dirs(); + + // Install multiple patch versions + uv_snapshot!(context.filters(), context.python_install().arg("3.12.8").arg("3.12.6"), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Installed 2 versions in [TIME] + + cpython-3.12.6-[PLATFORM] + + cpython-3.12.8-[PLATFORM] (python3.12) + "); + + let bin_python = context + .bin_dir + .child(format!("python3.12{}", std::env::consts::EXE_SUFFIX)); + + // The executable should be installed in the bin directory + bin_python.assert(predicate::path::exists()); + + // The link should resolve to the newer patch version + if cfg!(unix) { + insta::with_settings!({ + filters => context.filters(), + }, { + insta::assert_snapshot!( + canonicalize_link_path(&bin_python), @"[TEMP_DIR]/managed/cpython-3.12.8-[PLATFORM]/bin/python3.12" + ); + }); + } else if cfg!(windows) { + insta::with_settings!({ + filters => context.filters(), + }, { + insta::assert_snapshot!( + canonicalize_link_path(&bin_python), @"[TEMP_DIR]/managed/cpython-3.12.8-[PLATFORM]/python" + ); + }); + } + + uv_snapshot!(context.filters(), context.python_uninstall().arg("3.12.8"), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Searching for Python versions matching: Python 3.12.8 + Uninstalled Python 3.12.8 in [TIME] + - cpython-3.12.8-[PLATFORM] (python3.12) + "); + + // TODO(zanieb): This behavior is not implemented yet + // // The executable should be installed in the bin directory + // bin_python.assert(predicate::path::exists()); + + // // When the version is removed, the link should point to the other patch version + // if cfg!(unix) { + // insta::with_settings!({ + // filters => context.filters(), + // }, { + // insta::assert_snapshot!( + // canonicalize_link_path(&bin_python), @"[TEMP_DIR]/managed/cpython-3.12.6-[PLATFORM]/bin/python3.12" + // ); + // }); + // } else if cfg!(windows) { + // insta::with_settings!({ + // filters => context.filters(), + // }, { + // insta::assert_snapshot!( + // canonicalize_link_path(&bin_python), @"[TEMP_DIR]/managed/cpython-3.12.6-[PLATFORM]/python" + // ); + // }); + // } +} + #[test] fn python_install_preview() { let context: TestContext = TestContext::new_with_versions(&[]) @@ -853,7 +1095,7 @@ fn python_install_freethreaded() { ----- stderr ----- Installed Python 3.13.5 in [TIME] - + cpython-3.13.5-[PLATFORM] + + cpython-3.13.5-[PLATFORM] (python3.13) "); // Should not work with older Python versions @@ -875,7 +1117,7 @@ fn python_install_freethreaded() { Searching for Python installations Uninstalled 2 versions in [TIME] - cpython-3.13.5+freethreaded-[PLATFORM] (python3.13t) - - cpython-3.13.5-[PLATFORM] + - cpython-3.13.5-[PLATFORM] (python3.13) "); } @@ -936,15 +1178,243 @@ fn python_install_default() { .bin_dir .child(format!("python{}", std::env::consts::EXE_SUFFIX)); - // `--preview` is required for `--default` - uv_snapshot!(context.filters(), context.python_install().arg("--default"), @r###" - success: false - exit_code: 1 + // Install a specific version + uv_snapshot!(context.filters(), context.python_install().arg("3.13"), @r" + success: true + exit_code: 0 ----- stdout ----- ----- stderr ----- - The `--default` flag is only available in preview mode; add the `--preview` flag to use `--default` - "###); + Installed Python 3.13.5 in [TIME] + + cpython-3.13.5-[PLATFORM] (python3.13) + "); + + // Only the minor versioned executable should be installed + bin_python_minor_13.assert(predicate::path::exists()); + bin_python_major.assert(predicate::path::missing()); + bin_python_default.assert(predicate::path::missing()); + + // Install again, with `--default` + uv_snapshot!(context.filters(), context.python_install().arg("--default").arg("3.13"), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + warning: The `--default` option is experimental and may change without warning. Pass `--preview` to disable this warning + Installed Python 3.13.5 in [TIME] + + cpython-3.13.5-[PLATFORM] (python, python3) + "); + + // Now all the executables should be installed + bin_python_minor_13.assert(predicate::path::exists()); + bin_python_major.assert(predicate::path::exists()); + bin_python_default.assert(predicate::path::exists()); + + // Uninstall + uv_snapshot!(context.filters(), context.python_uninstall().arg("--all"), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Searching for Python installations + Uninstalled Python 3.13.5 in [TIME] + - cpython-3.13.5-[PLATFORM] (python, python3, python3.13) + "); + + // The executables should be removed + bin_python_minor_13.assert(predicate::path::missing()); + bin_python_major.assert(predicate::path::missing()); + bin_python_default.assert(predicate::path::missing()); + + // Install the latest version, i.e., a "default install" + uv_snapshot!(context.filters(), context.python_install().arg("--default"), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + warning: The `--default` option is experimental and may change without warning. Pass `--preview` to disable this warning + Installed Python 3.13.5 in [TIME] + + cpython-3.13.5-[PLATFORM] (python, python3, python3.13) + "); + + // Since it's a default install, we should include all of the executables + bin_python_minor_13.assert(predicate::path::exists()); + bin_python_major.assert(predicate::path::exists()); + bin_python_default.assert(predicate::path::exists()); + + // And 3.13 should be the default + if cfg!(unix) { + insta::with_settings!({ + filters => context.filters(), + }, { + insta::assert_snapshot!( + read_link(&bin_python_major), @"[TEMP_DIR]/managed/cpython-3.13.5-[PLATFORM]/bin/python3.13" + ); + }); + + insta::with_settings!({ + filters => context.filters(), + }, { + insta::assert_snapshot!( + read_link(&bin_python_minor_13), @"[TEMP_DIR]/managed/cpython-3.13.5-[PLATFORM]/bin/python3.13" + ); + }); + + insta::with_settings!({ + filters => context.filters(), + }, { + insta::assert_snapshot!( + read_link(&bin_python_default), @"[TEMP_DIR]/managed/cpython-3.13.5-[PLATFORM]/bin/python3.13" + ); + }); + } else if cfg!(windows) { + insta::with_settings!({ + filters => context.filters(), + }, { + insta::assert_snapshot!( + read_link(&bin_python_major), @"[TEMP_DIR]/managed/cpython-3.13.5-[PLATFORM]/python" + ); + }); + + insta::with_settings!({ + filters => context.filters(), + }, { + insta::assert_snapshot!( + read_link(&bin_python_minor_13), @"[TEMP_DIR]/managed/cpython-3.13.5-[PLATFORM]/python" + ); + }); + + insta::with_settings!({ + filters => context.filters(), + }, { + insta::assert_snapshot!( + read_link(&bin_python_default), @"[TEMP_DIR]/managed/cpython-3.13.5-[PLATFORM]/python" + ); + }); + } + + // Uninstall again + uv_snapshot!(context.filters(), context.python_uninstall().arg("3.13"), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Searching for Python versions matching: Python 3.13 + Uninstalled Python 3.13.5 in [TIME] + - cpython-3.13.5-[PLATFORM] (python, python3, python3.13) + "); + + // We should remove all the executables + bin_python_minor_13.assert(predicate::path::missing()); + bin_python_major.assert(predicate::path::missing()); + bin_python_default.assert(predicate::path::missing()); + + // Install multiple versions, with the `--default` flag + uv_snapshot!(context.filters(), context.python_install().arg("3.12").arg("3.13").arg("--default"), @r" + success: false + exit_code: 2 + ----- stdout ----- + + ----- stderr ----- + warning: The `--default` option is experimental and may change without warning. Pass `--preview` to disable this warning + error: The `--default` flag cannot be used with multiple targets + "); + + // Install 3.12 as a new default + uv_snapshot!(context.filters(), context.python_install().arg("3.12").arg("--default"), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + warning: The `--default` option is experimental and may change without warning. Pass `--preview` to disable this warning + Installed Python 3.12.11 in [TIME] + + cpython-3.12.11-[PLATFORM] (python, python3, python3.12) + "); + + let bin_python_minor_12 = context + .bin_dir + .child(format!("python3.12{}", std::env::consts::EXE_SUFFIX)); + + // All the executables should exist + bin_python_minor_12.assert(predicate::path::exists()); + bin_python_major.assert(predicate::path::exists()); + bin_python_default.assert(predicate::path::exists()); + + // And 3.12 should be the default + if cfg!(unix) { + insta::with_settings!({ + filters => context.filters(), + }, { + insta::assert_snapshot!( + read_link(&bin_python_major), @"[TEMP_DIR]/managed/cpython-3.12.11-[PLATFORM]/bin/python3.12" + ); + }); + + insta::with_settings!({ + filters => context.filters(), + }, { + insta::assert_snapshot!( + read_link(&bin_python_minor_12), @"[TEMP_DIR]/managed/cpython-3.12.11-[PLATFORM]/bin/python3.12" + ); + }); + + insta::with_settings!({ + filters => context.filters(), + }, { + insta::assert_snapshot!( + read_link(&bin_python_default), @"[TEMP_DIR]/managed/cpython-3.12.11-[PLATFORM]/bin/python3.12" + ); + }); + } else { + insta::with_settings!({ + filters => context.filters(), + }, { + insta::assert_snapshot!( + read_link(&bin_python_major), @"[TEMP_DIR]/managed/cpython-3.12.11-[PLATFORM]/python" + ); + }); + + insta::with_settings!({ + filters => context.filters(), + }, { + insta::assert_snapshot!( + read_link(&bin_python_minor_12), @"[TEMP_DIR]/managed/cpython-3.12.11-[PLATFORM]/python" + ); + }); + + insta::with_settings!({ + filters => context.filters(), + }, { + insta::assert_snapshot!( + read_link(&bin_python_default), @"[TEMP_DIR]/managed/cpython-3.12.11-[PLATFORM]/python" + ); + }); + } +} + +#[test] +fn python_install_default_preview() { + let context: TestContext = TestContext::new_with_versions(&[]) + .with_filtered_python_keys() + .with_filtered_exe_suffix() + .with_managed_python_dirs(); + + let bin_python_minor_13 = context + .bin_dir + .child(format!("python3.13{}", std::env::consts::EXE_SUFFIX)); + + let bin_python_major = context + .bin_dir + .child(format!("python3{}", std::env::consts::EXE_SUFFIX)); + + let bin_python_default = context + .bin_dir + .child(format!("python{}", std::env::consts::EXE_SUFFIX)); // Install a specific version uv_snapshot!(context.filters(), context.python_install().arg("--preview").arg("3.13"), @r" @@ -1342,7 +1812,7 @@ fn python_install_unknown() { #[cfg(unix)] #[test] -fn python_install_preview_broken_link() { +fn python_install_broken_link() { use assert_fs::prelude::PathCreateDir; use fs_err::os::unix::fs::symlink; @@ -1358,7 +1828,7 @@ fn python_install_preview_broken_link() { symlink(context.temp_dir.join("does-not-exist"), &bin_python).unwrap(); // Install - uv_snapshot!(context.filters(), context.python_install().arg("--preview").arg("3.13"), @r" + uv_snapshot!(context.filters(), context.python_install().arg("3.13"), @r" success: true exit_code: 0 ----- stdout ----- @@ -1393,7 +1863,7 @@ fn python_install_default_from_env() { ----- stderr ----- Installed Python 3.12.11 in [TIME] - + cpython-3.12.11-[PLATFORM] + + cpython-3.12.11-[PLATFORM] (python3.12) "); // But prefer explicit requests @@ -1404,7 +1874,7 @@ fn python_install_default_from_env() { ----- stderr ----- Installed Python 3.11.13 in [TIME] - + cpython-3.11.13-[PLATFORM] + + cpython-3.11.13-[PLATFORM] (python3.11) "); // We should ignore `UV_PYTHON` here and complain there is not a target @@ -1431,8 +1901,8 @@ fn python_install_default_from_env() { ----- stderr ----- Searching for Python installations Uninstalled 2 versions in [TIME] - - cpython-3.11.13-[PLATFORM] - - cpython-3.12.11-[PLATFORM] + - cpython-3.11.13-[PLATFORM] (python3.11) + - cpython-3.12.11-[PLATFORM] (python3.12) "); // Uninstall with no targets should error @@ -1516,8 +1986,6 @@ fn python_install_314() { let context: TestContext = TestContext::new_with_versions(&[]) .with_filtered_python_keys() .with_managed_python_dirs() - .with_filtered_python_install_bin() - .with_filtered_python_names() .with_filtered_exe_suffix(); // Install 3.14 @@ -1529,7 +1997,7 @@ fn python_install_314() { ----- stderr ----- Installed Python 3.14.0b4 in [TIME] - + cpython-3.14.0b4-[PLATFORM] + + cpython-3.14.0b4-[PLATFORM] (python3.14) "); // Install a specific pre-release @@ -1543,6 +2011,17 @@ fn python_install_314() { + cpython-3.14.0a4-[PLATFORM] "); + // Add name filtering for the `find` tests, we avoid it in `install` tests because it clobbers + // the version suffixes which matter in the install logs + let filters = context + .filters() + .iter() + .map(|(a, b)| ((*a).to_string(), (*b).to_string())) + .collect::>(); + let context = context + .with_filtered_python_install_bin() + .with_filtered_python_names(); + // We should be able to find this version without opt-in, because there is no stable release // installed uv_snapshot!(context.filters(), context.python_find().arg("3.14"), @r" @@ -1574,14 +2053,14 @@ fn python_install_314() { "); // If we install a stable version, that should be preferred though - uv_snapshot!(context.filters(), context.python_install().arg("3.13"), @r" + uv_snapshot!(filters, context.python_install().arg("3.13"), @r" success: true exit_code: 0 ----- stdout ----- ----- stderr ----- Installed Python 3.13.5 in [TIME] - + cpython-3.13.5-[PLATFORM] + + cpython-3.13.5-[PLATFORM] (python3.13) "); uv_snapshot!(context.filters(), context.python_find().arg("3"), @r" @@ -1621,15 +2100,15 @@ fn python_install_cached() { ----- stderr ----- Installed Python 3.13.5 in [TIME] - + cpython-3.13.5-[PLATFORM] + + cpython-3.13.5-[PLATFORM] (python3.13) "); let bin_python = context .bin_dir .child(format!("python3.13{}", std::env::consts::EXE_SUFFIX)); - // The executable should not be installed in the bin directory (requires preview) - bin_python.assert(predicate::path::missing()); + // The executable should be installed in the bin directory + bin_python.assert(predicate::path::exists()); // Should be a no-op when already installed uv_snapshot!(context.filters(), context @@ -1651,7 +2130,7 @@ fn python_install_cached() { ----- stderr ----- Searching for Python versions matching: Python 3.13 Uninstalled Python 3.13.5 in [TIME] - - cpython-3.13.5-[PLATFORM] + - cpython-3.13.5-[PLATFORM] (python3.13) "); // The cached archive can be installed offline @@ -1665,7 +2144,7 @@ fn python_install_cached() { ----- stderr ----- Installed Python 3.13.5 in [TIME] - + cpython-3.13.5-[PLATFORM] + + cpython-3.13.5-[PLATFORM] (python3.13) "); // 3.12 isn't cached, so it can't be installed @@ -1714,7 +2193,7 @@ fn python_install_emulated_macos() { ----- stderr ----- Installed Python 3.13.5 in [TIME] - + cpython-3.13.5-macos-x86_64-none + + cpython-3.13.5-macos-x86_64-none (python3.13) "); // It should be discoverable with `uv python find` diff --git a/docs/concepts/python-versions.md b/docs/concepts/python-versions.md index ee18fa9da..0c16218d4 100644 --- a/docs/concepts/python-versions.md +++ b/docs/concepts/python-versions.md @@ -121,28 +121,17 @@ present, uv will install all the Python versions listed in the file. ### Installing Python executables -!!! important - - Support for installing Python executables is in _preview_. This means the behavior is experimental - and subject to change. - -To install Python executables into your `PATH`, provide the `--preview` option: - -```console -$ uv python install 3.12 --preview -``` - -This will install a Python executable for the requested version into `~/.local/bin`, e.g., as -`python3.12`. +uv installs Python executables into your `PATH` by default, e.g., `uv python install 3.12` will +install a Python executable into `~/.local/bin`, e.g., as `python3.12`. !!! tip If `~/.local/bin` is not in your `PATH`, you can add it with `uv tool update-shell`. -To install `python` and `python3` executables, include the `--default` option: +To install `python` and `python3` executables, include the experimental `--default` option: ```console -$ uv python install 3.12 --default --preview +$ uv python install 3.12 --default ``` When installing Python executables, uv will only overwrite an existing executable if it is managed @@ -153,9 +142,9 @@ uv will update executables that it manages. However, it will prefer the latest p Python minor version by default. For example: ```console -$ uv python install 3.12.7 --preview # Adds `python3.12` to `~/.local/bin` -$ uv python install 3.12.6 --preview # Does not update `python3.12` -$ uv python install 3.12.8 --preview # Updates `python3.12` to point to 3.12.8 +$ uv python install 3.12.7 # Adds `python3.12` to `~/.local/bin` +$ uv python install 3.12.6 # Does not update `python3.12` +$ uv python install 3.12.8 # Updates `python3.12` to point to 3.12.8 ``` ## Upgrading Python versions diff --git a/docs/guides/install-python.md b/docs/guides/install-python.md index da841eac6..374ab29fd 100644 --- a/docs/guides/install-python.md +++ b/docs/guides/install-python.md @@ -24,17 +24,24 @@ $ uv python install Python does not publish official distributable binaries. As such, uv uses distributions from the Astral [`python-build-standalone`](https://github.com/astral-sh/python-build-standalone) project. See the [Python distributions](../concepts/python-versions.md#managed-python-distributions) documentation for more details. -Once Python is installed, it will be used by `uv` commands automatically. +Once Python is installed, it will be used by `uv` commands automatically. uv also adds the installed +version to your `PATH`: -!!! important +```console +$ python3.13 +``` - When Python is installed by uv, it will not be available globally (i.e. via the `python` command). - Support for this feature is in _preview_. See [Installing Python executables](../concepts/python-versions.md#installing-python-executables) - for details. +uv only installs a _versioned_ executable by default. To install `python` and `python3` executables, +include the experimental `--default` option: - You can still use - [`uv run`](../guides/scripts.md#using-different-python-versions) or - [create and activate a virtual environment](../pip/environments.md) to use `python` directly. +```console +$ uv python install --default +``` + +!!! tip + + See the documentation on [installing Python executables](../concepts/python-versions.md#installing-python-executables) + for more details. ## Installing a specific version diff --git a/docs/reference/cli.md b/docs/reference/cli.md index 9be647449..4fc832cdb 100644 --- a/docs/reference/cli.md +++ b/docs/reference/cli.md @@ -2739,7 +2739,7 @@ Supports CPython and PyPy. CPython distributions are downloaded from the Astral Python versions are installed into the uv Python directory, which can be retrieved with `uv python dir`. -A `python` executable is not made globally available, managed Python versions are only used in uv commands or in active virtual environments. There is experimental support for adding Python executables to a directory on the path — use the `--preview` flag to enable this behavior and `uv python dir --bin` to retrieve the target directory. +By default, Python executables are added to a directory on the path with a minor version suffix, e.g., `python3.13`. To install `python3` and `python`, use the `--default` flag. Use `uv python dir --bin` to see the target directory. Multiple Python versions may be requested. From cd40a3452295a8d4b6af69206c43282096507c89 Mon Sep 17 00:00:00 2001 From: Zanie Blue Date: Thu, 17 Jul 2025 13:38:02 -0500 Subject: [PATCH 075/130] Build and install workspace members that are dependencies by default (#14663) Regardless of the presence of a build system, as in https://github.com/astral-sh/uv/pull/14413 --------- Co-authored-by: John Mumm --- .../uv-distribution/src/metadata/lowering.rs | 8 +- crates/uv-platform-tags/src/tags.rs | 2 +- crates/uv-resolver/src/lock/mod.rs | 6 +- crates/uv-workspace/src/pyproject.rs | 8 +- crates/uv-workspace/src/workspace.rs | 125 ++++- crates/uv/src/commands/build_frontend.rs | 4 +- crates/uv/src/commands/project/lock.rs | 4 + crates/uv/src/commands/project/lock_target.rs | 14 +- crates/uv/src/commands/project/sync.rs | 2 +- crates/uv/tests/it/edit.rs | 20 +- crates/uv/tests/it/lock.rs | 459 +++++++++++++++++- crates/uv/tests/it/lock_conflict.rs | 40 +- crates/uv/tests/it/pip_compile.rs | 12 +- crates/uv/tests/it/sync.rs | 100 +++- docs/concepts/projects/dependencies.md | 54 ++- 15 files changed, 791 insertions(+), 67 deletions(-) diff --git a/crates/uv-distribution/src/metadata/lowering.rs b/crates/uv-distribution/src/metadata/lowering.rs index c05ac4779..a8e899bb4 100644 --- a/crates/uv-distribution/src/metadata/lowering.rs +++ b/crates/uv-distribution/src/metadata/lowering.rs @@ -306,7 +306,10 @@ impl LoweredRequirement { }, url, } - } else if member.pyproject_toml().is_package() { + } else if member + .pyproject_toml() + .is_package(!workspace.is_required_member(&requirement.name)) + { RequirementSource::Directory { install_path: install_path.into_boxed_path(), url, @@ -736,7 +739,8 @@ fn path_source( fs_err::read_to_string(&pyproject_path) .ok() .and_then(|contents| PyProjectToml::from_string(contents).ok()) - .and_then(|pyproject_toml| pyproject_toml.tool_uv_package()) + // We don't require a build system for path dependencies + .map(|pyproject_toml| pyproject_toml.is_package(false)) .unwrap_or(true) }); diff --git a/crates/uv-platform-tags/src/tags.rs b/crates/uv-platform-tags/src/tags.rs index 7381f5dd5..f2c6d6cbb 100644 --- a/crates/uv-platform-tags/src/tags.rs +++ b/crates/uv-platform-tags/src/tags.rs @@ -771,7 +771,7 @@ mod tests { /// A reference list can be generated with: /// ```text /// $ python -c "from packaging import tags; [print(tag) for tag in tags.platform_tags()]"` - /// ```` + /// ``` #[test] fn test_platform_tags_manylinux() { let tags = compatible_tags(&Platform::new( diff --git a/crates/uv-resolver/src/lock/mod.rs b/crates/uv-resolver/src/lock/mod.rs index 7cbac67df..49cb851b3 100644 --- a/crates/uv-resolver/src/lock/mod.rs +++ b/crates/uv-resolver/src/lock/mod.rs @@ -1255,6 +1255,7 @@ impl Lock { root: &Path, packages: &BTreeMap, members: &[PackageName], + required_members: &BTreeSet, requirements: &[Requirement], constraints: &[Requirement], overrides: &[Requirement], @@ -1282,7 +1283,10 @@ impl Lock { // Validate that the member sources have not changed (e.g., that they've switched from // virtual to non-virtual or vice versa). for (name, member) in packages { - let expected = !member.pyproject_toml().is_package(); + // We don't require a build system, if the workspace member is a dependency + let expected = !member + .pyproject_toml() + .is_package(!required_members.contains(name)); let actual = self .find_by_name(name) .ok() diff --git a/crates/uv-workspace/src/pyproject.rs b/crates/uv-workspace/src/pyproject.rs index aa64c601e..4a994b801 100644 --- a/crates/uv-workspace/src/pyproject.rs +++ b/crates/uv-workspace/src/pyproject.rs @@ -66,7 +66,7 @@ pub struct PyProjectToml { /// Used to determine whether a `build-system` section is present. #[serde(default, skip_serializing)] - build_system: Option, + pub build_system: Option, } impl PyProjectToml { @@ -81,18 +81,18 @@ impl PyProjectToml { /// Returns `true` if the project should be considered a Python package, as opposed to a /// non-package ("virtual") project. - pub fn is_package(&self) -> bool { + pub fn is_package(&self, require_build_system: bool) -> bool { // If `tool.uv.package` is set, defer to that explicit setting. if let Some(is_package) = self.tool_uv_package() { return is_package; } // Otherwise, a project is assumed to be a package if `build-system` is present. - self.build_system.is_some() + self.build_system.is_some() || !require_build_system } /// Returns the value of `tool.uv.package` if set. - pub fn tool_uv_package(&self) -> Option { + fn tool_uv_package(&self) -> Option { self.tool .as_ref() .and_then(|tool| tool.uv.as_ref()) diff --git a/crates/uv-workspace/src/workspace.rs b/crates/uv-workspace/src/workspace.rs index 8d09554d9..09f2b692a 100644 --- a/crates/uv-workspace/src/workspace.rs +++ b/crates/uv-workspace/src/workspace.rs @@ -20,7 +20,7 @@ use uv_warnings::warn_user_once; use crate::dependency_groups::{DependencyGroupError, FlatDependencyGroup, FlatDependencyGroups}; use crate::pyproject::{ - Project, PyProjectToml, PyprojectTomlError, Sources, ToolUvSources, ToolUvWorkspace, + Project, PyProjectToml, PyprojectTomlError, Source, Sources, ToolUvSources, ToolUvWorkspace, }; type WorkspaceMembers = Arc>; @@ -109,6 +109,8 @@ pub struct Workspace { install_path: PathBuf, /// The members of the workspace. packages: WorkspaceMembers, + /// The workspace members that are required by other members. + required_members: BTreeSet, /// The sources table from the workspace `pyproject.toml`. /// /// This table is overridden by the project sources. @@ -260,6 +262,7 @@ impl Workspace { pyproject_toml: PyProjectToml, ) -> Option { let mut packages = self.packages; + let member = Arc::make_mut(&mut packages).get_mut(package_name)?; if member.root == self.install_path { @@ -279,17 +282,33 @@ impl Workspace { // Set the `pyproject.toml` for the member. member.pyproject_toml = pyproject_toml; + // Recompute required_members with the updated data + let required_members = Self::collect_required_members( + &packages, + &workspace_sources, + &workspace_pyproject_toml, + ); + Some(Self { pyproject_toml: workspace_pyproject_toml, sources: workspace_sources, packages, + required_members, ..self }) } else { // Set the `pyproject.toml` for the member. member.pyproject_toml = pyproject_toml; - Some(Self { packages, ..self }) + // Recompute required_members with the updated member data + let required_members = + Self::collect_required_members(&packages, &self.sources, &self.pyproject_toml); + + Some(Self { + packages, + required_members, + ..self + }) } } @@ -303,7 +322,7 @@ impl Workspace { /// Returns the set of all workspace members. pub fn members_requirements(&self) -> impl Iterator + '_ { - self.packages.values().filter_map(|member| { + self.packages.iter().filter_map(|(name, member)| { let url = VerbatimUrl::from_absolute_path(&member.root) .expect("path is valid URL") .with_given(member.root.to_string_lossy()); @@ -312,7 +331,10 @@ impl Workspace { extras: Box::new([]), groups: Box::new([]), marker: MarkerTree::TRUE, - source: if member.pyproject_toml.is_package() { + source: if member + .pyproject_toml() + .is_package(!self.is_required_member(name)) + { RequirementSource::Directory { install_path: member.root.clone().into_boxed_path(), editable: Some(true), @@ -332,9 +354,65 @@ impl Workspace { }) } + /// The workspace members that are required my another member of the workspace. + pub fn required_members(&self) -> &BTreeSet { + &self.required_members + } + + /// Compute the workspace members that are required by another member of the workspace. + /// + /// N.B. this checks if a workspace member is required by inspecting `tool.uv.source` entries, + /// but does not actually check if the source is _used_, which could result in false positives + /// but is easier to compute. + fn collect_required_members( + packages: &BTreeMap, + sources: &BTreeMap, + pyproject_toml: &PyProjectToml, + ) -> BTreeSet { + sources + .iter() + .filter(|(name, _)| { + pyproject_toml + .project + .as_ref() + .is_none_or(|project| project.name != **name) + }) + .chain( + packages + .iter() + .filter_map(|(name, member)| { + member + .pyproject_toml + .tool + .as_ref() + .and_then(|tool| tool.uv.as_ref()) + .and_then(|uv| uv.sources.as_ref()) + .map(ToolUvSources::inner) + .map(move |sources| { + sources + .iter() + .filter(move |(source_name, _)| name != *source_name) + }) + }) + .flatten(), + ) + .filter_map(|(package, sources)| { + sources + .iter() + .any(|source| matches!(source, Source::Workspace { .. })) + .then_some(package.clone()) + }) + .collect() + } + + /// Whether a given workspace member is required by another member. + pub fn is_required_member(&self, name: &PackageName) -> bool { + self.required_members().contains(name) + } + /// Returns the set of all workspace member dependency groups. pub fn group_requirements(&self) -> impl Iterator + '_ { - self.packages.values().filter_map(|member| { + self.packages.iter().filter_map(|(name, member)| { let url = VerbatimUrl::from_absolute_path(&member.root) .expect("path is valid URL") .with_given(member.root.to_string_lossy()); @@ -368,7 +446,10 @@ impl Workspace { extras: Box::new([]), groups: groups.into_boxed_slice(), marker: MarkerTree::TRUE, - source: if member.pyproject_toml.is_package() { + source: if member + .pyproject_toml() + .is_package(!self.is_required_member(name)) + { RequirementSource::Directory { install_path: member.root.clone().into_boxed_path(), editable: Some(true), @@ -746,9 +827,16 @@ impl Workspace { .and_then(|uv| uv.index) .unwrap_or_default(); + let required_members = Self::collect_required_members( + &workspace_members, + &workspace_sources, + &workspace_pyproject_toml, + ); + Ok(Workspace { install_path: workspace_root, packages: workspace_members, + required_members, sources: workspace_sources, indexes: workspace_indexes, pyproject_toml: workspace_pyproject_toml, @@ -1232,15 +1320,23 @@ impl ProjectWorkspace { project.name.clone(), current_project, )])); + let workspace_sources = BTreeMap::default(); + let required_members = Workspace::collect_required_members( + ¤t_project_as_members, + &workspace_sources, + project_pyproject_toml, + ); + return Ok(Self { project_root: project_path.clone(), project_name: project.name.clone(), workspace: Workspace { install_path: project_path.clone(), packages: current_project_as_members, + required_members, // There may be package sources, but we don't need to duplicate them into the // workspace sources. - sources: BTreeMap::default(), + sources: workspace_sources, indexes: Vec::default(), pyproject_toml: project_pyproject_toml.clone(), }, @@ -1692,6 +1788,7 @@ mod tests { "pyproject_toml": "[PYPROJECT_TOML]" } }, + "required_members": [], "sources": {}, "indexes": [], "pyproject_toml": { @@ -1745,6 +1842,7 @@ mod tests { "pyproject_toml": "[PYPROJECT_TOML]" } }, + "required_members": [], "sources": {}, "indexes": [], "pyproject_toml": { @@ -1825,6 +1923,10 @@ mod tests { "pyproject_toml": "[PYPROJECT_TOML]" } }, + "required_members": [ + "bird-feeder", + "seeds" + ], "sources": { "bird-feeder": [ { @@ -1946,6 +2048,10 @@ mod tests { "pyproject_toml": "[PYPROJECT_TOML]" } }, + "required_members": [ + "bird-feeder", + "seeds" + ], "sources": {}, "indexes": [], "pyproject_toml": { @@ -2013,6 +2119,7 @@ mod tests { "pyproject_toml": "[PYPROJECT_TOML]" } }, + "required_members": [], "sources": {}, "indexes": [], "pyproject_toml": { @@ -2147,6 +2254,7 @@ mod tests { "pyproject_toml": "[PYPROJECT_TOML]" } }, + "required_members": [], "sources": {}, "indexes": [], "pyproject_toml": { @@ -2254,6 +2362,7 @@ mod tests { "pyproject_toml": "[PYPROJECT_TOML]" } }, + "required_members": [], "sources": {}, "indexes": [], "pyproject_toml": { @@ -2375,6 +2484,7 @@ mod tests { "pyproject_toml": "[PYPROJECT_TOML]" } }, + "required_members": [], "sources": {}, "indexes": [], "pyproject_toml": { @@ -2470,6 +2580,7 @@ mod tests { "pyproject_toml": "[PYPROJECT_TOML]" } }, + "required_members": [], "sources": {}, "indexes": [], "pyproject_toml": { diff --git a/crates/uv/src/commands/build_frontend.rs b/crates/uv/src/commands/build_frontend.rs index fd6ed73d7..a830f7aef 100644 --- a/crates/uv/src/commands/build_frontend.rs +++ b/crates/uv/src/commands/build_frontend.rs @@ -263,7 +263,7 @@ async fn build_impl( .get(package) .ok_or_else(|| anyhow::anyhow!("Package `{package}` not found in workspace"))?; - if !package.pyproject_toml().is_package() { + if !package.pyproject_toml().is_package(true) { let name = &package.project().name; let pyproject_toml = package.root().join("pyproject.toml"); return Err(anyhow::anyhow!( @@ -300,7 +300,7 @@ async fn build_impl( let packages: Vec<_> = workspace .packages() .values() - .filter(|package| package.pyproject_toml().is_package()) + .filter(|package| package.pyproject_toml().is_package(true)) .map(|package| AnnotatedSource { source: Source::Directory(Cow::Borrowed(package.root())), package: Some(package.project().name.clone()), diff --git a/crates/uv/src/commands/project/lock.rs b/crates/uv/src/commands/project/lock.rs index 833e59a13..e23bd97c2 100644 --- a/crates/uv/src/commands/project/lock.rs +++ b/crates/uv/src/commands/project/lock.rs @@ -444,6 +444,7 @@ async fn do_lock( // Collect the requirements, etc. let members = target.members(); let packages = target.packages(); + let required_members = target.required_members(); let requirements = target.requirements(); let overrides = target.overrides(); let constraints = target.constraints(); @@ -693,6 +694,7 @@ async fn do_lock( target.install_path(), packages, &members, + required_members, &requirements, &dependency_groups, &constraints, @@ -906,6 +908,7 @@ impl ValidatedLock { install_path: &Path, packages: &BTreeMap, members: &[PackageName], + required_members: &BTreeSet, requirements: &[Requirement], dependency_groups: &BTreeMap>, constraints: &[Requirement], @@ -1117,6 +1120,7 @@ impl ValidatedLock { install_path, packages, members, + required_members, requirements, constraints, overrides, diff --git a/crates/uv/src/commands/project/lock_target.rs b/crates/uv/src/commands/project/lock_target.rs index 4618b3b84..55a726bf4 100644 --- a/crates/uv/src/commands/project/lock_target.rs +++ b/crates/uv/src/commands/project/lock_target.rs @@ -1,4 +1,4 @@ -use std::collections::BTreeMap; +use std::collections::{BTreeMap, BTreeSet}; use std::path::{Path, PathBuf}; use itertools::Either; @@ -154,6 +154,18 @@ impl<'lock> LockTarget<'lock> { } } + /// Return the set of required workspace members, i.e., those that are required by other + /// members. + pub(crate) fn required_members(self) -> &'lock BTreeSet { + match self { + Self::Workspace(workspace) => workspace.required_members(), + Self::Script(_) => { + static EMPTY: BTreeSet = BTreeSet::new(); + &EMPTY + } + } + } + /// Returns the set of supported environments for the [`LockTarget`]. pub(crate) fn environments(self) -> Option<&'lock SupportedEnvironments> { match self { diff --git a/crates/uv/src/commands/project/sync.rs b/crates/uv/src/commands/project/sync.rs index 40aa1b352..8d2dd9629 100644 --- a/crates/uv/src/commands/project/sync.rs +++ b/crates/uv/src/commands/project/sync.rs @@ -117,7 +117,7 @@ pub(crate) async fn sync( // TODO(lucab): improve warning content // if project.workspace().pyproject_toml().has_scripts() - && !project.workspace().pyproject_toml().is_package() + && !project.workspace().pyproject_toml().is_package(true) { warn_user!( "Skipping installation of entry points (`project.scripts`) because this project is not packaged; to install entry points, set `tool.uv.package = true` or define a `build-system`" diff --git a/crates/uv/tests/it/edit.rs b/crates/uv/tests/it/edit.rs index 70b8d6e50..aa494435c 100644 --- a/crates/uv/tests/it/edit.rs +++ b/crates/uv/tests/it/edit.rs @@ -10362,7 +10362,7 @@ fn add_self() -> Result<()> { filters => context.filters(), }, { assert_snapshot!( - pyproject_toml, @r###" + pyproject_toml, @r#" [project] name = "anyio" version = "0.1.0" @@ -10377,7 +10377,7 @@ fn add_self() -> Result<()> { [tool.uv.sources] anyio = { workspace = true } - "### + "# ); }); @@ -10398,7 +10398,7 @@ fn add_self() -> Result<()> { filters => context.filters(), }, { assert_snapshot!( - pyproject_toml, @r###" + pyproject_toml, @r#" [project] name = "anyio" version = "0.1.0" @@ -10418,7 +10418,7 @@ fn add_self() -> Result<()> { dev = [ "anyio[types]", ] - "### + "# ); }); @@ -13173,7 +13173,9 @@ fn add_path_with_existing_workspace() -> Result<()> { ----- stderr ----- Added `dep` to workspace members Resolved 3 packages in [TIME] - Audited in [TIME] + Prepared 1 package in [TIME] + Installed 1 package in [TIME] + + dep==0.1.0 (from file://[TEMP_DIR]/dep) "); let pyproject_toml = context.read("pyproject.toml"); @@ -13250,7 +13252,9 @@ fn add_path_with_workspace() -> Result<()> { ----- stderr ----- Added `dep` to workspace members Resolved 2 packages in [TIME] - Audited in [TIME] + Prepared 1 package in [TIME] + Installed 1 package in [TIME] + + dep==0.1.0 (from file://[TEMP_DIR]/dep) "); let pyproject_toml = context.read("pyproject.toml"); @@ -13316,7 +13320,9 @@ fn add_path_within_workspace_defaults_to_workspace() -> Result<()> { ----- stderr ----- Added `dep` to workspace members Resolved 2 packages in [TIME] - Audited in [TIME] + Prepared 1 package in [TIME] + Installed 1 package in [TIME] + + dep==0.1.0 (from file://[TEMP_DIR]/dep) "); let pyproject_toml = context.read("pyproject.toml"); diff --git a/crates/uv/tests/it/lock.rs b/crates/uv/tests/it/lock.rs index 75d81b4c0..ff9b711b7 100644 --- a/crates/uv/tests/it/lock.rs +++ b/crates/uv/tests/it/lock.rs @@ -12064,10 +12064,6 @@ fn lock_remove_member() -> Result<()> { requires-python = ">=3.12" dependencies = ["leaf"] - [build-system] - requires = ["setuptools>=42"] - build-backend = "setuptools.build_meta" - [tool.uv.workspace] members = ["leaf"] @@ -12130,7 +12126,7 @@ fn lock_remove_member() -> Result<()> { [[package]] name = "leaf" version = "0.1.0" - source = { virtual = "leaf" } + source = { editable = "leaf" } dependencies = [ { name = "anyio" }, ] @@ -12141,13 +12137,13 @@ fn lock_remove_member() -> Result<()> { [[package]] name = "project" version = "0.1.0" - source = { editable = "." } + source = { virtual = "." } dependencies = [ { name = "leaf" }, ] [package.metadata] - requires-dist = [{ name = "leaf", virtual = "leaf" }] + requires-dist = [{ name = "leaf", editable = "leaf" }] [[package]] name = "sniffio" @@ -12162,16 +12158,124 @@ fn lock_remove_member() -> Result<()> { }); // Re-run with `--locked`. - uv_snapshot!(context.filters(), context.lock().arg("--locked"), @r###" + uv_snapshot!(context.filters(), context.lock().arg("--locked"), @r" success: true exit_code: 0 ----- stdout ----- ----- stderr ----- Resolved 5 packages in [TIME] - "###); + "); - // Remove the member. + // Remove the member as a dependency (retain it as a workspace member) + pyproject_toml.write_str( + r#" + [project] + name = "project" + version = "0.1.0" + requires-python = ">=3.12" + dependencies = [] + + [tool.uv.workspace] + members = ["leaf"] + + [tool.uv.sources] + leaf = { workspace = true } + "#, + )?; + + // Re-run with `--locked`. This should fail. + uv_snapshot!(context.filters(), context.lock().arg("--locked"), @r" + success: false + exit_code: 1 + ----- stdout ----- + + ----- stderr ----- + Resolved 5 packages in [TIME] + The lockfile at `uv.lock` needs to be updated, but `--locked` was provided. To update the lockfile, run `uv lock`. + "); + + // Re-run without `--locked`. + uv_snapshot!(context.filters(), context.lock(), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Resolved 5 packages in [TIME] + "); + + let lock = context.read("uv.lock"); + + insta::with_settings!({ + filters => context.filters(), + }, { + assert_snapshot!( + lock, @r#" + version = 1 + revision = 2 + requires-python = ">=3.12" + + [options] + exclude-newer = "2024-03-25T00:00:00Z" + + [manifest] + members = [ + "leaf", + "project", + ] + + [[package]] + name = "anyio" + version = "4.3.0" + source = { registry = "https://pypi.org/simple" } + dependencies = [ + { name = "idna" }, + { name = "sniffio" }, + ] + sdist = { url = "https://files.pythonhosted.org/packages/db/4d/3970183622f0330d3c23d9b8a5f52e365e50381fd484d08e3285104333d3/anyio-4.3.0.tar.gz", hash = "sha256:f75253795a87df48568485fd18cdd2a3fa5c4f7c5be8e5e36637733fce06fed6", size = 159642, upload-time = "2024-02-19T08:36:28.641Z" } + wheels = [ + { url = "https://files.pythonhosted.org/packages/14/fd/2f20c40b45e4fb4324834aea24bd4afdf1143390242c0b33774da0e2e34f/anyio-4.3.0-py3-none-any.whl", hash = "sha256:048e05d0f6caeed70d731f3db756d35dcc1f35747c8c403364a8332c630441b8", size = 85584, upload-time = "2024-02-19T08:36:26.842Z" }, + ] + + [[package]] + name = "idna" + version = "3.6" + source = { registry = "https://pypi.org/simple" } + sdist = { url = "https://files.pythonhosted.org/packages/bf/3f/ea4b9117521a1e9c50344b909be7886dd00a519552724809bb1f486986c2/idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca", size = 175426, upload-time = "2023-11-25T15:40:54.902Z" } + wheels = [ + { url = "https://files.pythonhosted.org/packages/c2/e7/a82b05cf63a603df6e68d59ae6a68bf5064484a0718ea5033660af4b54a9/idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f", size = 61567, upload-time = "2023-11-25T15:40:52.604Z" }, + ] + + [[package]] + name = "leaf" + version = "0.1.0" + source = { editable = "leaf" } + dependencies = [ + { name = "anyio" }, + ] + + [package.metadata] + requires-dist = [{ name = "anyio", specifier = ">3" }] + + [[package]] + name = "project" + version = "0.1.0" + source = { virtual = "." } + + [[package]] + name = "sniffio" + version = "1.3.1" + source = { registry = "https://pypi.org/simple" } + sdist = { url = "https://files.pythonhosted.org/packages/a2/87/a6771e1546d97e7e041b6ae58d80074f81b7d5121207425c964ddf5cfdbd/sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc", size = 20372, upload-time = "2024-02-25T23:20:04.057Z" } + wheels = [ + { url = "https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2", size = 10235, upload-time = "2024-02-25T23:20:01.196Z" }, + ] + "# + ); + }); + + // Remove the member entirely pyproject_toml.write_str( r#" [project] @@ -12238,7 +12342,7 @@ fn lock_remove_member() -> Result<()> { /// This test would fail if we didn't write the list of workspace members to the lockfile, since /// we wouldn't be able to determine that a new member was added. #[test] -fn lock_add_member() -> Result<()> { +fn lock_add_member_with_build_system() -> Result<()> { let context = TestContext::new("3.12"); // Create a workspace, but don't add the member. @@ -12449,6 +12553,339 @@ fn lock_add_member() -> Result<()> { Ok(()) } +#[test] +fn lock_add_member_without_build_system() -> Result<()> { + let context = TestContext::new("3.12"); + + // Create a workspace, but don't add the member. + let pyproject_toml = context.temp_dir.child("pyproject.toml"); + pyproject_toml.write_str( + r#" + [project] + name = "project" + version = "0.1.0" + requires-python = ">=3.12" + dependencies = [] + + [tool.uv.workspace] + members = [] + "#, + )?; + + uv_snapshot!(context.filters(), context.lock(), @r###" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Resolved 1 package in [TIME] + "###); + + let lock = context.read("uv.lock"); + + insta::with_settings!({ + filters => context.filters(), + }, { + assert_snapshot!( + lock, @r#" + version = 1 + revision = 2 + requires-python = ">=3.12" + + [options] + exclude-newer = "2024-03-25T00:00:00Z" + + [[package]] + name = "project" + version = "0.1.0" + source = { virtual = "." } + "# + ); + }); + + // Re-run with `--locked`. + uv_snapshot!(context.filters(), context.lock().arg("--locked"), @r###" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Resolved 1 package in [TIME] + "###); + + // Create a workspace member. + let leaf = context.temp_dir.child("leaf"); + leaf.child("pyproject.toml").write_str( + r#" + [project] + name = "leaf" + version = "0.1.0" + requires-python = ">=3.12" + dependencies = ["anyio>3"] + "#, + )?; + + // Add the member to the workspace, but not as a dependency of the root. + pyproject_toml.write_str( + r#" + [project] + name = "project" + version = "0.1.0" + requires-python = ">=3.12" + dependencies = [] + + [tool.uv.workspace] + members = ["leaf"] + "#, + )?; + + // Re-run with `--locked`. This should fail. + uv_snapshot!(context.filters(), context.lock().arg("--locked"), @r" + success: false + exit_code: 1 + ----- stdout ----- + + ----- stderr ----- + Resolved 5 packages in [TIME] + The lockfile at `uv.lock` needs to be updated, but `--locked` was provided. To update the lockfile, run `uv lock`. + "); + + // Re-run with `--offline`. This should also fail, during the resolve phase. + uv_snapshot!(context.filters(), context.lock().arg("--locked").arg("--offline").arg("--no-cache"), @r###" + success: false + exit_code: 1 + ----- stdout ----- + + ----- stderr ----- + × No solution found when resolving dependencies: + ╰─▶ Because anyio was not found in the cache and leaf depends on anyio>3, we can conclude that leaf's requirements are unsatisfiable. + And because your workspace requires leaf, we can conclude that your workspace's requirements are unsatisfiable. + + hint: Packages were unavailable because the network was disabled. When the network is disabled, registry packages may only be read from the cache. + "###); + + // Re-run without `--locked`. + uv_snapshot!(context.filters(), context.lock(), @r###" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Resolved 5 packages in [TIME] + Added anyio v4.3.0 + Added idna v3.6 + Added leaf v0.1.0 + Added sniffio v1.3.1 + "###); + + // Re-run with `--locked`. + uv_snapshot!(context.filters(), context.lock().arg("--locked"), @r###" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Resolved 5 packages in [TIME] + "###); + + let lock = context.read("uv.lock"); + + insta::with_settings!({ + filters => context.filters(), + }, { + assert_snapshot!( + lock, @r#" + version = 1 + revision = 2 + requires-python = ">=3.12" + + [options] + exclude-newer = "2024-03-25T00:00:00Z" + + [manifest] + members = [ + "leaf", + "project", + ] + + [[package]] + name = "anyio" + version = "4.3.0" + source = { registry = "https://pypi.org/simple" } + dependencies = [ + { name = "idna" }, + { name = "sniffio" }, + ] + sdist = { url = "https://files.pythonhosted.org/packages/db/4d/3970183622f0330d3c23d9b8a5f52e365e50381fd484d08e3285104333d3/anyio-4.3.0.tar.gz", hash = "sha256:f75253795a87df48568485fd18cdd2a3fa5c4f7c5be8e5e36637733fce06fed6", size = 159642, upload-time = "2024-02-19T08:36:28.641Z" } + wheels = [ + { url = "https://files.pythonhosted.org/packages/14/fd/2f20c40b45e4fb4324834aea24bd4afdf1143390242c0b33774da0e2e34f/anyio-4.3.0-py3-none-any.whl", hash = "sha256:048e05d0f6caeed70d731f3db756d35dcc1f35747c8c403364a8332c630441b8", size = 85584, upload-time = "2024-02-19T08:36:26.842Z" }, + ] + + [[package]] + name = "idna" + version = "3.6" + source = { registry = "https://pypi.org/simple" } + sdist = { url = "https://files.pythonhosted.org/packages/bf/3f/ea4b9117521a1e9c50344b909be7886dd00a519552724809bb1f486986c2/idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca", size = 175426, upload-time = "2023-11-25T15:40:54.902Z" } + wheels = [ + { url = "https://files.pythonhosted.org/packages/c2/e7/a82b05cf63a603df6e68d59ae6a68bf5064484a0718ea5033660af4b54a9/idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f", size = 61567, upload-time = "2023-11-25T15:40:52.604Z" }, + ] + + [[package]] + name = "leaf" + version = "0.1.0" + source = { virtual = "leaf" } + dependencies = [ + { name = "anyio" }, + ] + + [package.metadata] + requires-dist = [{ name = "anyio", specifier = ">3" }] + + [[package]] + name = "project" + version = "0.1.0" + source = { virtual = "." } + + [[package]] + name = "sniffio" + version = "1.3.1" + source = { registry = "https://pypi.org/simple" } + sdist = { url = "https://files.pythonhosted.org/packages/a2/87/a6771e1546d97e7e041b6ae58d80074f81b7d5121207425c964ddf5cfdbd/sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc", size = 20372, upload-time = "2024-02-25T23:20:04.057Z" } + wheels = [ + { url = "https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2", size = 10235, upload-time = "2024-02-25T23:20:01.196Z" }, + ] + "# + ); + }); + + // Add the member to the workspace, as a dependency of the root. + pyproject_toml.write_str( + r#" + [project] + name = "project" + version = "0.1.0" + requires-python = ">=3.12" + dependencies = ["leaf"] + + [tool.uv.workspace] + members = ["leaf"] + + [tool.uv.sources] + leaf = { workspace = true } + "#, + )?; + + // Re-run with `--locked`. This should fail. + uv_snapshot!(context.filters(), context.lock().arg("--locked"), @r" + success: false + exit_code: 1 + ----- stdout ----- + + ----- stderr ----- + Resolved 5 packages in [TIME] + The lockfile at `uv.lock` needs to be updated, but `--locked` was provided. To update the lockfile, run `uv lock`. + "); + + // Re-run without `--locked`. + uv_snapshot!(context.filters(), context.lock(), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Resolved 5 packages in [TIME] + "); + + // Re-run with `--locked`. + uv_snapshot!(context.filters(), context.lock().arg("--locked"), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Resolved 5 packages in [TIME] + "); + + let lock = context.read("uv.lock"); + + // It should change from a virtual to an editable source + insta::with_settings!({ + filters => context.filters(), + }, { + assert_snapshot!( + lock, @r#" + version = 1 + revision = 2 + requires-python = ">=3.12" + + [options] + exclude-newer = "2024-03-25T00:00:00Z" + + [manifest] + members = [ + "leaf", + "project", + ] + + [[package]] + name = "anyio" + version = "4.3.0" + source = { registry = "https://pypi.org/simple" } + dependencies = [ + { name = "idna" }, + { name = "sniffio" }, + ] + sdist = { url = "https://files.pythonhosted.org/packages/db/4d/3970183622f0330d3c23d9b8a5f52e365e50381fd484d08e3285104333d3/anyio-4.3.0.tar.gz", hash = "sha256:f75253795a87df48568485fd18cdd2a3fa5c4f7c5be8e5e36637733fce06fed6", size = 159642, upload-time = "2024-02-19T08:36:28.641Z" } + wheels = [ + { url = "https://files.pythonhosted.org/packages/14/fd/2f20c40b45e4fb4324834aea24bd4afdf1143390242c0b33774da0e2e34f/anyio-4.3.0-py3-none-any.whl", hash = "sha256:048e05d0f6caeed70d731f3db756d35dcc1f35747c8c403364a8332c630441b8", size = 85584, upload-time = "2024-02-19T08:36:26.842Z" }, + ] + + [[package]] + name = "idna" + version = "3.6" + source = { registry = "https://pypi.org/simple" } + sdist = { url = "https://files.pythonhosted.org/packages/bf/3f/ea4b9117521a1e9c50344b909be7886dd00a519552724809bb1f486986c2/idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca", size = 175426, upload-time = "2023-11-25T15:40:54.902Z" } + wheels = [ + { url = "https://files.pythonhosted.org/packages/c2/e7/a82b05cf63a603df6e68d59ae6a68bf5064484a0718ea5033660af4b54a9/idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f", size = 61567, upload-time = "2023-11-25T15:40:52.604Z" }, + ] + + [[package]] + name = "leaf" + version = "0.1.0" + source = { editable = "leaf" } + dependencies = [ + { name = "anyio" }, + ] + + [package.metadata] + requires-dist = [{ name = "anyio", specifier = ">3" }] + + [[package]] + name = "project" + version = "0.1.0" + source = { virtual = "." } + dependencies = [ + { name = "leaf" }, + ] + + [package.metadata] + requires-dist = [{ name = "leaf", editable = "leaf" }] + + [[package]] + name = "sniffio" + version = "1.3.1" + source = { registry = "https://pypi.org/simple" } + sdist = { url = "https://files.pythonhosted.org/packages/a2/87/a6771e1546d97e7e041b6ae58d80074f81b7d5121207425c964ddf5cfdbd/sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc", size = 20372, upload-time = "2024-02-25T23:20:04.057Z" } + wheels = [ + { url = "https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2", size = 10235, upload-time = "2024-02-25T23:20:01.196Z" }, + ] + "# + ); + }); + + Ok(()) +} + /// Lock a `pyproject.toml`, then add a dependency that's already included in the resolution. /// In theory, we shouldn't need to re-resolve, but based on our current strategy, we don't accept /// the existing lockfile. diff --git a/crates/uv/tests/it/lock_conflict.rs b/crates/uv/tests/it/lock_conflict.rs index bf1bc1eac..d67736c88 100644 --- a/crates/uv/tests/it/lock_conflict.rs +++ b/crates/uv/tests/it/lock_conflict.rs @@ -1094,18 +1094,19 @@ fn extra_unconditional() -> Result<()> { "###); // This is fine because we are only enabling one // extra, and thus, there is no conflict. - uv_snapshot!(context.filters(), context.sync().arg("--frozen"), @r###" + uv_snapshot!(context.filters(), context.sync().arg("--frozen"), @r" success: true exit_code: 0 ----- stdout ----- ----- stderr ----- - Prepared 3 packages in [TIME] - Installed 3 packages in [TIME] + Prepared 4 packages in [TIME] + Installed 4 packages in [TIME] + anyio==4.1.0 + idna==3.6 + + proxy1==0.1.0 (from file://[TEMP_DIR]/proxy1) + sniffio==1.3.1 - "###); + "); // And same thing for the other extra. root_pyproject_toml.write_str( @@ -1215,18 +1216,19 @@ fn extra_unconditional_non_conflicting() -> Result<()> { // `uv sync` wasn't correctly propagating extras in a way // that would satisfy the conflict markers that got added // to the `proxy1[extra1]` dependency. - uv_snapshot!(context.filters(), context.sync().arg("--frozen"), @r###" + uv_snapshot!(context.filters(), context.sync().arg("--frozen"), @r" success: true exit_code: 0 ----- stdout ----- ----- stderr ----- - Prepared 3 packages in [TIME] - Installed 3 packages in [TIME] + Prepared 4 packages in [TIME] + Installed 4 packages in [TIME] + anyio==4.1.0 + idna==3.6 + + proxy1==0.1.0 (from file://[TEMP_DIR]/proxy1) + sniffio==1.3.1 - "###); + "); Ok(()) } @@ -1301,16 +1303,17 @@ fn extra_unconditional_in_optional() -> Result<()> { "###); // This should install `sortedcontainers==2.3.0`. - uv_snapshot!(context.filters(), context.sync().arg("--frozen").arg("--extra=x1"), @r###" + uv_snapshot!(context.filters(), context.sync().arg("--frozen").arg("--extra=x1"), @r" success: true exit_code: 0 ----- stdout ----- ----- stderr ----- - Prepared 1 package in [TIME] - Installed 1 package in [TIME] + Prepared 2 packages in [TIME] + Installed 2 packages in [TIME] + + proxy1==0.1.0 (from file://[TEMP_DIR]/proxy1) + sortedcontainers==2.3.0 - "###); + "); // This should install `sortedcontainers==2.4.0`. uv_snapshot!(context.filters(), context.sync().arg("--frozen").arg("--extra=x2"), @r###" @@ -4460,19 +4463,20 @@ conflicts = [ error: Extra `x2` is not defined in the project's `optional-dependencies` table "###); - uv_snapshot!(context.filters(), context.sync(), @r###" + uv_snapshot!(context.filters(), context.sync(), @r" success: true exit_code: 0 ----- stdout ----- ----- stderr ----- Resolved 7 packages in [TIME] - Prepared 3 packages in [TIME] - Installed 3 packages in [TIME] + Prepared 4 packages in [TIME] + Installed 4 packages in [TIME] + anyio==4.3.0 + idna==3.6 + + proxy1==0.1.0 (from file://[TEMP_DIR]/proxy1) + sniffio==1.3.1 - "###); + "); let lock = fs_err::read_to_string(context.temp_dir.join("uv.lock")).unwrap(); insta::with_settings!({ @@ -4558,14 +4562,14 @@ conflicts = [ requires-dist = [ { name = "anyio", specifier = ">=4" }, { name = "idna", marker = "extra == 'x1'", specifier = "==3.6" }, - { name = "proxy1", virtual = "proxy1" }, + { name = "proxy1", editable = "proxy1" }, ] provides-extras = ["x1"] [[package]] name = "proxy1" version = "0.1.0" - source = { virtual = "proxy1" } + source = { editable = "proxy1" } [package.optional-dependencies] x2 = [ diff --git a/crates/uv/tests/it/pip_compile.rs b/crates/uv/tests/it/pip_compile.rs index ac3549874..69da12fd6 100644 --- a/crates/uv/tests/it/pip_compile.rs +++ b/crates/uv/tests/it/pip_compile.rs @@ -15772,18 +15772,18 @@ fn project_and_group_workspace_inherit() -> Result<()> { ----- stdout ----- # This file was autogenerated by uv via the following command: # uv pip compile --cache-dir [CACHE_DIR] --group packages/mysubproject/pyproject.toml:foo + -e file://[TEMP_DIR]/packages/pytest + # via mysubproject (packages/mysubproject/pyproject.toml:foo) + -e file://[TEMP_DIR]/packages/sniffio + # via + # mysubproject (packages/mysubproject/pyproject.toml:foo) + # anyio anyio==4.3.0 # via mysubproject (packages/mysubproject/pyproject.toml:foo) idna==3.6 # via anyio iniconfig==2.0.0 # via mysubproject (packages/mysubproject/pyproject.toml:foo) - pytest @ file://[TEMP_DIR]/packages/pytest - # via mysubproject (packages/mysubproject/pyproject.toml:foo) - sniffio @ file://[TEMP_DIR]/packages/sniffio - # via - # mysubproject (packages/mysubproject/pyproject.toml:foo) - # anyio ----- stderr ----- Resolved 5 packages in [TIME] diff --git a/crates/uv/tests/it/sync.rs b/crates/uv/tests/it/sync.rs index bb3546e22..5a8d79447 100644 --- a/crates/uv/tests/it/sync.rs +++ b/crates/uv/tests/it/sync.rs @@ -3565,6 +3565,101 @@ fn sync_ignore_extras_check_when_no_provides_extras() -> Result<()> { Ok(()) } +#[test] +fn sync_workspace_members_with_transitive_dependencies() -> Result<()> { + let context = TestContext::new("3.12"); + + let pyproject_toml = context.temp_dir.child("pyproject.toml"); + pyproject_toml.write_str( + r#" + [tool.uv.workspace] + members = [ + "packages/*", + ] + "#, + )?; + + let packages = context.temp_dir.child("packages"); + packages.create_dir_all()?; + + // Create three workspace members with transitive dependency from + // pkg-c -> pkg-b -> pkg-a + let pkg_a = packages.child("pkg-a"); + pkg_a.create_dir_all()?; + let pkg_a_pyproject_toml = pkg_a.child("pyproject.toml"); + pkg_a_pyproject_toml.write_str( + r#" + [project] + name = "pkg-a" + version = "0.0.1" + requires-python = ">=3.12" + dependencies = ["anyio"] + "#, + )?; + + let pkg_b = packages.child("pkg-b"); + pkg_b.create_dir_all()?; + let pkg_b_pyproject_toml = pkg_b.child("pyproject.toml"); + pkg_b_pyproject_toml.write_str( + r#" + [project] + name = "pkg-b" + version = "0.0.1" + requires-python = ">=3.12" + dependencies = ["pkg-a"] + + [tool.uv.sources] + pkg-a = { workspace = true } + "#, + )?; + + let pkg_c = packages.child("pkg-c"); + pkg_c.create_dir_all()?; + let pkg_c_pyproject_toml = pkg_c.child("pyproject.toml"); + pkg_c_pyproject_toml.write_str( + r#" + [project] + name = "pkg-c" + version = "0.0.1" + requires-python = ">=3.12" + dependencies = ["pkg-b"] + + [tool.uv.sources] + pkg-b = { workspace = true } + "#, + )?; + + // Syncing should build the two transitive dependencies pkg-a and pkg-b, + // but not pkg-c, which is not a dependency. + uv_snapshot!(context.filters(), context.sync(), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Resolved 6 packages in [TIME] + Prepared 5 packages in [TIME] + Installed 5 packages in [TIME] + + anyio==4.3.0 + + idna==3.6 + + pkg-a==0.0.1 (from file://[TEMP_DIR]/packages/pkg-a) + + pkg-b==0.0.1 (from file://[TEMP_DIR]/packages/pkg-b) + + sniffio==1.3.1 + "); + + // The lockfile should be valid. + uv_snapshot!(context.filters(), context.lock().arg("--check"), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Resolved 6 packages in [TIME] + "); + + Ok(()) +} + #[test] fn sync_non_existent_extra_workspace_member() -> Result<()> { let context = TestContext::new("3.12"); @@ -3626,9 +3721,10 @@ fn sync_non_existent_extra_workspace_member() -> Result<()> { ----- stderr ----- Resolved 5 packages in [TIME] - Prepared 3 packages in [TIME] - Installed 3 packages in [TIME] + Prepared 4 packages in [TIME] + Installed 4 packages in [TIME] + anyio==4.3.0 + + child==0.1.0 (from file://[TEMP_DIR]/child) + idna==3.6 + sniffio==1.3.1 "); diff --git a/docs/concepts/projects/dependencies.md b/docs/concepts/projects/dependencies.md index bf11e7174..52a71fd04 100644 --- a/docs/concepts/projects/dependencies.md +++ b/docs/concepts/projects/dependencies.md @@ -808,9 +808,9 @@ $ uv add --no-editable ./path/foo uv allows dependencies to be "virtual", in which the dependency itself is not installed as a [package](./config.md#project-packaging), but its dependencies are. -By default, only workspace members without build systems declared are virtual. +By default, dependencies are never virtual. -A dependency with a [`path` source](#path) is not virtual unless it explicitly sets +A dependency with a [`path` source](#path) can be virtual if it explicitly sets [`tool.uv.package = false`](../../reference/settings.md#package). Unlike working _in_ the dependent project with uv, the package will be built even if a [build system](./config.md#build-systems) is not declared. @@ -825,8 +825,8 @@ dependencies = ["bar"] bar = { path = "../projects/bar", package = false } ``` -Similarly, if a dependency sets `tool.uv.package = false`, it can be overridden by declaring -`package = true` on the source: +If a dependency sets `tool.uv.package = false`, it can be overridden by declaring `package = true` +on the source: ```toml title="pyproject.toml" [project] @@ -836,6 +836,52 @@ dependencies = ["bar"] bar = { path = "../projects/bar", package = true } ``` +Similarly, a dependency with a [`workspace` source](#workspace-member) can be virtual if it +explicitly sets [`tool.uv.package = false`](../../reference/settings.md#package). The workspace +member will be built even if a [build system](./config.md#build-systems) is not declared. + +Workspace members that are _not_ dependencies can be virtual by default, e.g., if the parent +`pyproject.toml` is: + +```toml title="pyproject.toml" +[project] +name = "parent" +version = "1.0.0" +dependencies = [] + +[tool.uv.workspace] +members = ["child"] +``` + +And the child `pyproject.toml` excluded a build system: + +```toml title="pyproject.toml" +[project] +name = "child" +version = "1.0.0" +dependencies = ["anyio"] +``` + +Then the `child` workspace member would not be installed, but the transitive dependency `anyio` +would be. + +In contrast, if the parent declared a dependency on `child`: + +```toml title="pyproject.toml" +[project] +name = "parent" +version = "1.0.0" +dependencies = ["child"] + +[tool.uv.sources] +child = { workspace = true } + +[tool.uv.workspace] +members = ["child"] +``` + +Then `child` would be built and installed. + ## Dependency specifiers uv uses standard From 5b716c4e50f2a4e829de3f474125cdac8de11c79 Mon Sep 17 00:00:00 2001 From: konsti Date: Thu, 17 Jul 2025 22:37:35 +0200 Subject: [PATCH 076/130] Add missing trailing newline to outdated error (#14689) Unlike the other branch in match, which uses a fully formatted error, we need to print the newline ourselves. Before (top) and after (bottom): image --- crates/uv/src/commands/diagnostics.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/uv/src/commands/diagnostics.rs b/crates/uv/src/commands/diagnostics.rs index 02412d683..f24aa3406 100644 --- a/crates/uv/src/commands/diagnostics.rs +++ b/crates/uv/src/commands/diagnostics.rs @@ -128,7 +128,7 @@ impl OperationDiagnostic { None } pip::operations::Error::OutdatedEnvironment => { - anstream::eprint!("{}", err); + anstream::eprintln!("{}", err); None } err => Some(err), From ac35377132954c9ee0fd60d5a464dab4490966c0 Mon Sep 17 00:00:00 2001 From: Zanie Blue Date: Thu, 17 Jul 2025 15:52:31 -0500 Subject: [PATCH 077/130] Fix rendering of `uv venv --clear` hint in bash (#14691) Closes https://github.com/astral-sh/uv/issues/14688 --- crates/uv-console/src/lib.rs | 3 +++ 1 file changed, 3 insertions(+) diff --git a/crates/uv-console/src/lib.rs b/crates/uv-console/src/lib.rs index 24c5eea16..1da7efdf1 100644 --- a/crates/uv-console/src/lib.rs +++ b/crates/uv-console/src/lib.rs @@ -84,6 +84,9 @@ fn confirm_inner( if hint.is_some() { term.clear_last_lines(2)?; + // It's not clear why we need to clear to the end of the screen here, but it fixes lingering + // display of the hint on `bash` (the issue did not reproduce on `zsh`). + term.clear_to_end_of_screen()?; } else { term.clear_line()?; } From 1a339b76e841188a04af9dbd1ad87136cd301122 Mon Sep 17 00:00:00 2001 From: Zanie Blue Date: Thu, 17 Jul 2025 17:07:48 -0500 Subject: [PATCH 078/130] Add release notes and bump version for 0.8.0 (#14690) [Rendered](https://github.com/astral-sh/uv/blob/zb/release-notes/CHANGELOG.md) --- CHANGELOG.md | 197 +++++++++++++++++--- Cargo.lock | 6 +- crates/uv-build-backend/src/lib.rs | 2 +- crates/uv-build/Cargo.toml | 2 +- crates/uv-build/pyproject.toml | 2 +- crates/uv-version/Cargo.toml | 2 +- crates/uv/Cargo.toml | 2 +- docs/concepts/build-backend.md | 2 +- docs/getting-started/installation.md | 4 +- docs/guides/integration/aws-lambda.md | 4 +- docs/guides/integration/docker.md | 12 +- docs/guides/integration/github.md | 2 +- docs/guides/integration/pre-commit.md | 10 +- scripts/packages/built-by-uv/pyproject.toml | 2 +- 14 files changed, 199 insertions(+), 50 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 87cf0c9e8..351edc326 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -3,6 +3,155 @@ +## 0.8.0 + +Since we released uv [0.7.0](https://github.com/astral-sh/uv/releases/tag/0.5.0) in April, we've accumulated various changes that improve correctness and user experience, but could break some workflows. This release contains those changes; many have been marked as breaking out of an abundance of caution. We expect most users to be able to upgrade without making changes. + +This release also includes the stabilization of a couple `uv python install` features, which have been available under preview since late last year. + +### Breaking changes + +- **Install Python executables into a directory on the `PATH` ([#14626](https://github.com/astral-sh/uv/pull/14626))** + + `uv python install` now installs a versioned Python executable (e.g., `python3.13`) into a directory on the `PATH` (e.g., `~/.local/bin`) by default. This behavior has been available under the `--preview` flag since [Oct 2024](https://github.com/astral-sh/uv/pull/8458). This change should not be breaking unless it shadows a Python executable elsewhere on the `PATH`. + + To install unversioned executables, i.e., `python3` and `python`, use the `--default` flag. The `--default` flag has also been in preview, but is not stabilized in this release. + + Note that these executables point to the base Python installation and only include the standard library. That means they will not include dependencies from your current project (use `uv run python` instead) and you cannot install packages into their environment (use `uvx --with python` instead). + + As with tool installation, the target directory respects common variables like `XDG_BIN_HOME` and can be overridden with a `UV_PYTHON_BIN_DIR` variable. + + You can opt out of this behavior with `uv python install --no-bin` or `UV_PYTHON_INSTALL_BIN=0`. + + See the [documentation on installing Python executables](https://docs.astral.sh/uv/concepts/python-versions/#installing-python-executables) for more details. + +- **Register Python versions with the Windows registry ([#14625](https://github.com/astral-sh/uv/pull/14625))** + + `uv python install` now registers the installed Python version with the Windows Registry as specified by [PEP 514](https://peps.python.org/pep-0514/). This allows using uv installed Python versions via the `py` launcher. This behavior has been available under the `--preview` flag since [Jan 2025](https://github.com/astral-sh/uv/pull/10634). This change should not be breaking, as using the uv Python versions with `py` requires explicit opt in. + + You can opt out of this behavior with `uv python install --no-registry` or `UV_PYTHON_INSTALL_REGISTRY=0`. + +- **Prompt before removing an existing directory in `uv venv` ([#14309](https://github.com/astral-sh/uv/pull/14309))** + + Previously, `uv venv` would remove an existing virtual environment without confirmation. While this is consistent with the behavior of project commands (e.g., `uv sync`), it's surprising to users that are using imperative workflows (i.e., `uv pip`). Now, `uv venv` will prompt for confirmation before removing an existing virtual environment. **If not in an interactive context, uv will still remove the virtual environment for backwards compatibility. However, this behavior is likely to change in a future release.** + + The behavior for other commands (e.g., `uv sync`) is unchanged. + + You can opt out of this behavior by setting `UV_VENV_CLEAR=1` or passing the `--clear` flag. + +- **Validate that discovered interpreters meet the Python preference ([#7934](https://github.com/astral-sh/uv/pull/7934))** + + uv allows opting out of its managed Python versions with the `--no-managed-python` and `python-preference` options. + + Previously, uv would not enforce this option for Python interpreters discovered on the `PATH`. For example, if a symlink to a managed Python interpreter was created, uv would allow it to be used even if `--no-managed-python` was provided. Now, uv ignores Python interpreters that do not match the Python preference _unless_ they are in an active virtual environment or are explicitly requested, e.g., with `--python /path/to/python3.13`. + + Similarly, uv would previously not invalidate existing project environments if they did not match the Python preference. Now, uv will invalidate and recreate project environments when the Python preference changes. + + You can opt out of this behavior by providing the explicit path to the Python interpreter providing `--managed-python` / `--no-managed-python` matching the interpreter you want. + +- **Install dependencies without build systems when they are `path` sources ([#14413](https://github.com/astral-sh/uv/pull/14413))** + + When working on a project, uv uses the [presence of a build system](https://docs.astral.sh/uv/concepts/projects/config/#build-systems) to determine if it should be built and installed into the environment. However, when a project is a dependency of another project, it can be surprising for the dependency to be missing from the environment. + + Previously, uv would not build and install dependencies with [`path` sources](https://docs.astral.sh/uv/concepts/projects/dependencies/#path) unless they declared a build system or set `tool.uv.package = true`. Now, dependencies with `path` sources are built and installed regardless of the presence of a build system. If a build system is not present, the `setuptools.build_meta:__legacy__ ` backend will be used (per [PEP 517](https://peps.python.org/pep-0517/#source-trees)). + + You can opt out of this behavior by setting `package = false` in the source declaration, e.g.: + + ```toml + [tool.uv.sources] + foo = { path = "./foo", package = false } + ``` + + Or, by setting `tool.uv.package = false` in the dependent `pyproject.toml`. + + See the documentation on [virtual dependencies](https://docs.astral.sh/uv/concepts/projects/dependencies/#virtual-dependencies) for details. + +- **Install dependencies without build systems when they are workspace members ([#14663](https://github.com/astral-sh/uv/pull/14663))** + + As described above for dependencies with `path` sources, uv previously would not build and install workspace members that did not declare a build system. Now, uv will build and install workspace members that are a dependency of _another_ workspace member regardless of the presence of a build system. The behavior is unchanged for workspace members that are not included in the `project.dependencies`, `project.optional-dependencies`, or `dependency-groups` tables of another workspace member. + + You can opt out of this behavior by setting `tool.uv.package = false` in the workspace member's `pyproject.toml`. + + See the documentation on [virtual dependencies](https://docs.astral.sh/uv/concepts/projects/dependencies/#virtual-dependencies) for details. + +- **Bump `--python-platform linux` to `manylinux_2_28` ([#14300](https://github.com/astral-sh/uv/pull/14300))** + + uv allows performing [platform-specific resolution](https://docs.astral.sh/uv/concepts/resolution/#platform-specific-resolution) for explicit targets and provides short aliases, e.g., `linux`, for common targets. + + Previously, the default target for `--python-platform linux` was `manylinux_2_17`, which is compatible with most Linux distributions from 2014 or newer. We now default to `manylinux_2_28`, which is compatible with most Linux distributions from 2017 or newer. This change follows the lead of other tools, such as `cibuildwheel`, which changed their default to `manylinux_2_28` in [Mar 2025](https://github.com/pypa/cibuildwheel/pull/2330). + + This change only affects users requesting a specific target platform. Otherwise, uv detects the `manylinux` target from your local glibc version. + + You can opt out of this behavior by using `--python-platform x86_64-manylinux_2_17` instead. + +- **Remove `uv version` fallback ([#14161](https://github.com/astral-sh/uv/pull/14161))** + + In [Apr 2025](https://github.com/astral-sh/uv/pull/12349), uv changed the `uv version` command to an interface for viewing and updating the version of the current project. However, when outside a project, `uv version` would continue to display uv's version for backwards compatibility. Now, when used outside of a project, `uv version` will fail. + + You cannot opt out of this behavior. Use `uv self version` instead. + +- **Require `--global` for removal of the global Python pin ([#14169](https://github.com/astral-sh/uv/pull/14169))** + + Previously, `uv python pin --rm` would allow you to remove the global Python pin without opt in. Now, uv requires the `--global` flag to remove the global Python pin. + + You cannot opt out of this behavior. Use the `--global` flag instead. + +- **Support conflicting editable settings across groups ([#14197](https://github.com/astral-sh/uv/pull/14197))** + + Previously, uv would always treat a package as editable if any requirement requested it as editable. However, this prevented users from declaring `path` sources that toggled the `editable` setting across dependency groups. Now, uv allows declaring different `editable` values for conflicting groups. However, if a project includes a path dependency twice, once with `editable = true` and once without any editable annotation, those are now considered conflicting, and uv will exit with an error. + + You cannot opt out of this behavior. Use consistent `editable` settings or [mark groups as conflicting](https://docs.astral.sh/uv/concepts/projects/config/#conflicting-dependencies). + +- **Make `uv_build` the default build backend in `uv init` ([#14661](https://github.com/astral-sh/uv/pull/14661))** + + The uv build backend (`uv_build`) was [stabilized in uv 0.7.19](https://github.com/astral-sh/uv/releases/tag/0.7.19). Now, it is the default build backend for `uv init --package` and `uv init --lib`. Previously, `hatchling` was the default build backend. A build backend is still not used without opt-in in `uv init`, but we expect to change this in a future release. + + You can opt out of this behavior with `uv init --build-backend hatchling`. + +- **Set default `UV_TOOL_BIN_DIR` on Docker images ([#13391](https://github.com/astral-sh/uv/pull/13391))** + + Previously, `UV_TOOL_BIN_DIR` was not set in Docker images which meant that `uv tool install` did not install tools into a directory on the `PATH` without additional configuration. Now, `UV_TOOL_BIN_DIR` is set to `/usr/local/bin` in all Docker derived images. + + When the default image user is overridden (e.g. `USER `) with a less privileged user, this may cause `uv tool install` to fail. + + You can opt out of this behavior by setting an alternative `UV_TOOL_BIN_DIR`. + +- **Update `--check` to return an exit code of 1 ([#14167](https://github.com/astral-sh/uv/pull/14167))** + + uv uses an exit code of 1 to indicate a "successful failure" and an exit code of 2 to indicate an "error". + + Previously, `uv lock --check` and `uv sync --check` would exit with a code of 2 when the lockfile or environment were outdated. Now, uv will exit with a code of 1. + + You cannot opt out of this behavior. + +- **Use an ephemeral environment for `uv run --with` invocations ([#14447](https://github.com/astral-sh/uv/pull/14447))** + + When using `uv run --with`, uv layers the requirements requested using `--with` into another virtual environment and caches it. Previously, uv would invoke the Python interpreter in this layered environment. However, this allows poisoning the cached environment and introduces race conditions for concurrent invocations. Now, uv will layer _another_ empty virtual environment on top of the cached environment and invoke the Python interpreter there. This should only cause breakage in cases where the environment is being inspected at runtime. + + You cannot opt out of this behavior. + +- **Restructure the `uv venv` command output and exit codes ([#14546](https://github.com/astral-sh/uv/pull/14546))** + + Previously, uv used `miette` to format the `uv venv` output. However, this was inconsistent with most of the uv CLI. Now, the output is a little different and the exit code has switched from 1 to 2 for some error cases. + + You cannot opt out of this behavior. + +- **Default to `--workspace` when adding subdirectories ([#14529](https://github.com/astral-sh/uv/pull/14529))** + + When using `uv add` to add a subdirectory in a workspace, uv now defaults to adding the target as a workspace member. + + You can opt out of this behavior by providing `--no-workspace`. + +- **Add missing validations for disallowed `uv.toml` fields ([#14322](https://github.com/astral-sh/uv/pull/14322))** + + uv does not allow some settings in the `uv.toml`. Previously, some settings were silently ignored when present in the `uv.toml`. Now, uv will error. + + You cannot opt out of this behavior. Use `--no-config` or remove the invalid settings. + +### Configuration + +- Add support for toggling Python bin and registry install options via env vars ([#14662](https://github.com/astral-sh/uv/pull/14662)) + ## 0.7.22 ### Python @@ -153,7 +302,7 @@ See the [python-build-standalone release](https://github.com/astral-sh/python-bu ### Python - Added arm64 Windows Python 3.11, 3.12, 3.13, and 3.14 - + These are not downloaded by default, since x86-64 Python has broader ecosystem support on Windows. However, they can be requested with `cpython--windows-aarch64`. @@ -633,11 +782,11 @@ This release contains various changes that improve correctness and user experien ### Breaking changes - **Update `uv version` to display and update project versions ([#12349](https://github.com/astral-sh/uv/pull/12349))** - + Previously, `uv version` displayed uv's version. Now, `uv version` will display or update the project's version. This interface was [heavily requested](https://github.com/astral-sh/uv/issues/6298) and, after much consideration, we decided that transitioning the top-level command was the best option. - + Here's a brief example: - + ```console $ uv init example Initialized project `example` at `./example` @@ -649,72 +798,72 @@ This release contains various changes that improve correctness and user experien $ uv version --short 1.0.0 ``` - + If used outside of a project, uv will fallback to showing its own version still: - + ```console $ uv version warning: failed to read project: No `pyproject.toml` found in current directory or any parent directory running `uv self version` for compatibility with old `uv version` command. this fallback will be removed soon, pass `--preview` to make this an error. - + uv 0.7.0 (4433f41c9 2025-04-29) ``` - + As described in the warning, `--preview` can be used to error instead: - + ```console $ uv version --preview error: No `pyproject.toml` found in current directory or any parent directory ``` - + The previous functionality of `uv version` was moved to `uv self version`. - **Avoid fallback to subsequent indexes on authentication failure ([#12805](https://github.com/astral-sh/uv/pull/12805))** - + When using the `first-index` strategy (the default), uv will stop searching indexes for a package once it is found on a single index. Previously, uv considered a package as "missing" from an index during authentication failures, such as an HTTP 401 or HTTP 403 (normally, missing packages are represented by an HTTP 404). This behavior was motivated by unusual responses from some package indexes, but reduces the safety of uv's index strategy when authentication fails. Now, uv will consider an authentication failure as a stop-point when searching for a package across indexes. The `index.ignore-error-codes` option can be used to recover the existing behavior, e.g.: - + ```toml [[tool.uv.index]] name = "pytorch" url = "https://download.pytorch.org/whl/cpu" ignore-error-codes = [401, 403] ``` - + Since PyTorch's indexes always return a HTTP 403 for missing packages, uv special-cases indexes on the `pytorch.org` domain to ignore that error code by default. - **Require the command in `uvx ` to be available in the Python environment ([#11603](https://github.com/astral-sh/uv/pull/11603))** - + Previously, `uvx` would attempt to execute a command even if it was not provided by a Python package. For example, if we presume `foo` is an empty Python package which provides no command, `uvx foo` would invoke the `foo` command on the `PATH` (if present). Now, uv will error early if the `foo` executable is not provided by the requested Python package. This check is not enforced when `--from` is used, so patterns like `uvx --from foo bash -c "..."` are still valid. uv also still allows `uvx foo` where the `foo` executable is provided by a dependency of `foo` instead of `foo` itself, as this is fairly common for packages which depend on a dedicated package for their command-line interface. - **Use index URL instead of package URL for keyring credential lookups ([#12651](https://github.com/astral-sh/uv/pull/12651))** - + When determining credentials for querying a package URL, uv previously sent the full URL to the `keyring` command. However, some keyring plugins expect to receive the *index URL* (which is usually a parent of the package URL). Now, uv requests credentials for the index URL instead. This behavior matches `pip`. - **Remove `--version` from subcommands ([#13108](https://github.com/astral-sh/uv/pull/13108))** - + Previously, uv allowed the `--version` flag on arbitrary subcommands, e.g., `uv run --version`. However, the `--version` flag is useful for other operations since uv is a package manager. Consequently, we've removed the `--version` flag from subcommands — it is only available as `uv --version`. - **Omit Python 3.7 downloads from managed versions ([#13022](https://github.com/astral-sh/uv/pull/13022))** - + Python 3.7 is EOL and not formally supported by uv; however, Python 3.7 was previously available for download on a subset of platforms. - **Reject non-PEP 751 TOML files in install, compile, and export commands ([#13120](https://github.com/astral-sh/uv/pull/13120), [#13119](https://github.com/astral-sh/uv/pull/13119))** - + Previously, uv treated arbitrary `.toml` files passed to commands (e.g., `uv pip install -r foo.toml` or `uv pip compile -o foo.toml`) as `requirements.txt`-formatted files. Now, uv will error instead. If using PEP 751 lockfiles, use the standardized format for custom names instead, e.g., `pylock.foo.toml`. - **Ignore arbitrary Python requests in version files ([#12909](https://github.com/astral-sh/uv/pull/12909))** - + uv allows arbitrary strings to be used for Python version requests, in which they are treated as an executable name to search for in the `PATH`. However, using this form of request in `.python-version` files is non-standard and conflicts with `pyenv-virtualenv` which writes environment names to `.python-version` files. In this release, uv will now ignore requests that are arbitrary strings when found in `.python-version` files. - **Error on unknown dependency object specifiers ([12811](https://github.com/astral-sh/uv/pull/12811))** - + The `[dependency-groups]` entries can include "object specifiers", e.g. `set-phasers-to = ...` in: - + ```toml [dependency-groups] foo = ["pyparsing"] bar = [{set-phasers-to = "stun"}] ``` - + However, the only current spec-compliant object specifier is `include-group`. Previously, uv would ignore unknown object specifiers. Now, uv will error. - **Make `--frozen` and `--no-sources` conflicting options ([#12671](https://github.com/astral-sh/uv/pull/12671))** - + Using `--no-sources` always requires a new resolution and `--frozen` will always fail when used with it. Now, this conflict is encoded in the CLI options for clarity. - **Treat empty `UV_PYTHON_INSTALL_DIR` and `UV_TOOL_DIR` as unset ([#12907](https://github.com/astral-sh/uv/pull/12907), [#12905](https://github.com/astral-sh/uv/pull/12905))** - + Previously, these variables were treated as set to the current working directory when set to an empty string. Now, uv will ignore these variables when empty. This matches uv's behavior for other environment variables which configure directories. ### Enhancements diff --git a/Cargo.lock b/Cargo.lock index 0900699cb..78429b08f 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4633,7 +4633,7 @@ dependencies = [ [[package]] name = "uv" -version = "0.7.22" +version = "0.8.0" dependencies = [ "anstream", "anyhow", @@ -4799,7 +4799,7 @@ dependencies = [ [[package]] name = "uv-build" -version = "0.7.22" +version = "0.8.0" dependencies = [ "anyhow", "uv-build-backend", @@ -5992,7 +5992,7 @@ dependencies = [ [[package]] name = "uv-version" -version = "0.7.22" +version = "0.8.0" [[package]] name = "uv-virtualenv" diff --git a/crates/uv-build-backend/src/lib.rs b/crates/uv-build-backend/src/lib.rs index 2ec11aeeb..8add8dda3 100644 --- a/crates/uv-build-backend/src/lib.rs +++ b/crates/uv-build-backend/src/lib.rs @@ -557,7 +557,7 @@ mod tests { // Check that the source dist is reproducible across platforms. assert_snapshot!( format!("{:x}", sha2::Sha256::digest(fs_err::read(&source_dist_path).unwrap())), - @"dab46bcc4d66960a11cfdc19604512a8e1a3241a67536f7e962166760e9c575c" + @"9a7f7181c5e69ac14e411a2500fed153a1e6ea41cd5da6f24f226c4cddacf6b7" ); // Check both the files we report and the actual files assert_snapshot!(format_file_list(build.source_dist_list_files, src.path()), @r" diff --git a/crates/uv-build/Cargo.toml b/crates/uv-build/Cargo.toml index 8014fa445..dcf61a435 100644 --- a/crates/uv-build/Cargo.toml +++ b/crates/uv-build/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "uv-build" -version = "0.7.22" +version = "0.8.0" edition.workspace = true rust-version.workspace = true homepage.workspace = true diff --git a/crates/uv-build/pyproject.toml b/crates/uv-build/pyproject.toml index 1a78d34dc..53bcbf49b 100644 --- a/crates/uv-build/pyproject.toml +++ b/crates/uv-build/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "uv-build" -version = "0.7.22" +version = "0.8.0" description = "The uv build backend" authors = [{ name = "Astral Software Inc.", email = "hey@astral.sh" }] requires-python = ">=3.8" diff --git a/crates/uv-version/Cargo.toml b/crates/uv-version/Cargo.toml index e1a424af8..02f940b30 100644 --- a/crates/uv-version/Cargo.toml +++ b/crates/uv-version/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "uv-version" -version = "0.7.22" +version = "0.8.0" edition = { workspace = true } rust-version = { workspace = true } homepage = { workspace = true } diff --git a/crates/uv/Cargo.toml b/crates/uv/Cargo.toml index ff389f033..d160cce7b 100644 --- a/crates/uv/Cargo.toml +++ b/crates/uv/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "uv" -version = "0.7.22" +version = "0.8.0" edition = { workspace = true } rust-version = { workspace = true } homepage = { workspace = true } diff --git a/docs/concepts/build-backend.md b/docs/concepts/build-backend.md index d2edf1bad..d29420085 100644 --- a/docs/concepts/build-backend.md +++ b/docs/concepts/build-backend.md @@ -31,7 +31,7 @@ To use uv as a build backend in an existing project, add `uv_build` to the ```toml title="pyproject.toml" [build-system] -requires = ["uv_build>=0.7.22,<0.8.0"] +requires = ["uv_build>=0.8.0,<0.9.0"] build-backend = "uv_build" ``` diff --git a/docs/getting-started/installation.md b/docs/getting-started/installation.md index 3e31a5003..5e8165824 100644 --- a/docs/getting-started/installation.md +++ b/docs/getting-started/installation.md @@ -25,7 +25,7 @@ uv provides a standalone installer to download and install uv: Request a specific version by including it in the URL: ```console - $ curl -LsSf https://astral.sh/uv/0.7.22/install.sh | sh + $ curl -LsSf https://astral.sh/uv/0.8.0/install.sh | sh ``` === "Windows" @@ -41,7 +41,7 @@ uv provides a standalone installer to download and install uv: Request a specific version by including it in the URL: ```pwsh-session - PS> powershell -ExecutionPolicy ByPass -c "irm https://astral.sh/uv/0.7.22/install.ps1 | iex" + PS> powershell -ExecutionPolicy ByPass -c "irm https://astral.sh/uv/0.8.0/install.ps1 | iex" ``` !!! tip diff --git a/docs/guides/integration/aws-lambda.md b/docs/guides/integration/aws-lambda.md index 14224b3fe..d9fc06d29 100644 --- a/docs/guides/integration/aws-lambda.md +++ b/docs/guides/integration/aws-lambda.md @@ -92,7 +92,7 @@ the second stage, we'll copy this directory over to the final image, omitting th other unnecessary files. ```dockerfile title="Dockerfile" -FROM ghcr.io/astral-sh/uv:0.7.22 AS uv +FROM ghcr.io/astral-sh/uv:0.8.0 AS uv # First, bundle the dependencies into the task root. FROM public.ecr.aws/lambda/python:3.13 AS builder @@ -334,7 +334,7 @@ And confirm that opening http://127.0.0.1:8000/ in a web browser displays, "Hell Finally, we'll update the Dockerfile to include the local library in the deployment package: ```dockerfile title="Dockerfile" -FROM ghcr.io/astral-sh/uv:0.7.22 AS uv +FROM ghcr.io/astral-sh/uv:0.8.0 AS uv # First, bundle the dependencies into the task root. FROM public.ecr.aws/lambda/python:3.13 AS builder diff --git a/docs/guides/integration/docker.md b/docs/guides/integration/docker.md index a75228723..0eeaed62d 100644 --- a/docs/guides/integration/docker.md +++ b/docs/guides/integration/docker.md @@ -31,8 +31,8 @@ $ docker run --rm -it ghcr.io/astral-sh/uv:debian uv --help The following distroless images are available: - `ghcr.io/astral-sh/uv:latest` -- `ghcr.io/astral-sh/uv:{major}.{minor}.{patch}`, e.g., `ghcr.io/astral-sh/uv:0.7.22` -- `ghcr.io/astral-sh/uv:{major}.{minor}`, e.g., `ghcr.io/astral-sh/uv:0.7` (the latest patch +- `ghcr.io/astral-sh/uv:{major}.{minor}.{patch}`, e.g., `ghcr.io/astral-sh/uv:0.8.0` +- `ghcr.io/astral-sh/uv:{major}.{minor}`, e.g., `ghcr.io/astral-sh/uv:0.8` (the latest patch version) And the following derived images are available: @@ -75,7 +75,7 @@ And the following derived images are available: As with the distroless image, each derived image is published with uv version tags as `ghcr.io/astral-sh/uv:{major}.{minor}.{patch}-{base}` and -`ghcr.io/astral-sh/uv:{major}.{minor}-{base}`, e.g., `ghcr.io/astral-sh/uv:0.7.22-alpine`. +`ghcr.io/astral-sh/uv:{major}.{minor}-{base}`, e.g., `ghcr.io/astral-sh/uv:0.8.0-alpine`. In addition, starting with `0.8` each derived image also sets `UV_TOOL_BIN_DIR` to `/usr/local/bin` to allow `uv tool install` to work as expected with the default user. @@ -116,7 +116,7 @@ Note this requires `curl` to be available. In either case, it is best practice to pin to a specific uv version, e.g., with: ```dockerfile -COPY --from=ghcr.io/astral-sh/uv:0.7.22 /uv /uvx /bin/ +COPY --from=ghcr.io/astral-sh/uv:0.8.0 /uv /uvx /bin/ ``` !!! tip @@ -134,7 +134,7 @@ COPY --from=ghcr.io/astral-sh/uv:0.7.22 /uv /uvx /bin/ Or, with the installer: ```dockerfile -ADD https://astral.sh/uv/0.7.22/install.sh /uv-installer.sh +ADD https://astral.sh/uv/0.8.0/install.sh /uv-installer.sh ``` ### Installing a project @@ -560,5 +560,5 @@ Verified OK !!! tip These examples use `latest`, but best practice is to verify the attestation for a specific - version tag, e.g., `ghcr.io/astral-sh/uv:0.7.22`, or (even better) the specific image digest, + version tag, e.g., `ghcr.io/astral-sh/uv:0.8.0`, or (even better) the specific image digest, such as `ghcr.io/astral-sh/uv:0.5.27@sha256:5adf09a5a526f380237408032a9308000d14d5947eafa687ad6c6a2476787b4f`. diff --git a/docs/guides/integration/github.md b/docs/guides/integration/github.md index 956b47660..15d26b280 100644 --- a/docs/guides/integration/github.md +++ b/docs/guides/integration/github.md @@ -47,7 +47,7 @@ jobs: uses: astral-sh/setup-uv@v6 with: # Install a specific version of uv. - version: "0.7.22" + version: "0.8.0" ``` ## Setting up Python diff --git a/docs/guides/integration/pre-commit.md b/docs/guides/integration/pre-commit.md index d2598fed8..bbc21ab45 100644 --- a/docs/guides/integration/pre-commit.md +++ b/docs/guides/integration/pre-commit.md @@ -19,7 +19,7 @@ To make sure your `uv.lock` file is up to date even if your `pyproject.toml` fil repos: - repo: https://github.com/astral-sh/uv-pre-commit # uv version. - rev: 0.7.22 + rev: 0.8.0 hooks: - id: uv-lock ``` @@ -30,7 +30,7 @@ To keep a `requirements.txt` file in sync with your `uv.lock` file: repos: - repo: https://github.com/astral-sh/uv-pre-commit # uv version. - rev: 0.7.22 + rev: 0.8.0 hooks: - id: uv-export ``` @@ -41,7 +41,7 @@ To compile requirements files: repos: - repo: https://github.com/astral-sh/uv-pre-commit # uv version. - rev: 0.7.22 + rev: 0.8.0 hooks: # Compile requirements - id: pip-compile @@ -54,7 +54,7 @@ To compile alternative requirements files, modify `args` and `files`: repos: - repo: https://github.com/astral-sh/uv-pre-commit # uv version. - rev: 0.7.22 + rev: 0.8.0 hooks: # Compile requirements - id: pip-compile @@ -68,7 +68,7 @@ To run the hook over multiple files at the same time, add additional entries: repos: - repo: https://github.com/astral-sh/uv-pre-commit # uv version. - rev: 0.7.22 + rev: 0.8.0 hooks: # Compile requirements - id: pip-compile diff --git a/scripts/packages/built-by-uv/pyproject.toml b/scripts/packages/built-by-uv/pyproject.toml index f9f893485..b1914e071 100644 --- a/scripts/packages/built-by-uv/pyproject.toml +++ b/scripts/packages/built-by-uv/pyproject.toml @@ -24,5 +24,5 @@ data = "assets" headers = "header" [build-system] -requires = ["uv_build>=0.7,<0.8"] +requires = ["uv_build>=0.8,<0.9"] build-backend = "uv_build" From 1f887552f6c630c4736ec05cf834086fe95eb92f Mon Sep 17 00:00:00 2001 From: Geoffrey Thomas Date: Thu, 17 Jul 2025 18:25:03 -0400 Subject: [PATCH 079/130] CHANGELOG: manylinux_2_28 is more like 2019 (#14696) I must have Googled something too fast, sorry. glibc 2.28 came out August 2018, Fedora 29 was the earliest to ship with it in October 2018, Debian 10 shipped with it in July 2019, and CentOS 8 shipped with it in September 2019. --- CHANGELOG.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 351edc326..e3d8f6f17 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -78,7 +78,7 @@ This release also includes the stabilization of a couple `uv python install` fea uv allows performing [platform-specific resolution](https://docs.astral.sh/uv/concepts/resolution/#platform-specific-resolution) for explicit targets and provides short aliases, e.g., `linux`, for common targets. - Previously, the default target for `--python-platform linux` was `manylinux_2_17`, which is compatible with most Linux distributions from 2014 or newer. We now default to `manylinux_2_28`, which is compatible with most Linux distributions from 2017 or newer. This change follows the lead of other tools, such as `cibuildwheel`, which changed their default to `manylinux_2_28` in [Mar 2025](https://github.com/pypa/cibuildwheel/pull/2330). + Previously, the default target for `--python-platform linux` was `manylinux_2_17`, which is compatible with most Linux distributions from 2014 or newer. We now default to `manylinux_2_28`, which is compatible with most Linux distributions from 2019 or newer. This change follows the lead of other tools, such as `cibuildwheel`, which changed their default to `manylinux_2_28` in [Mar 2025](https://github.com/pypa/cibuildwheel/pull/2330). This change only affects users requesting a specific target platform. Otherwise, uv detects the `manylinux` target from your local glibc version. From 0b23572941e271086485227f7c2b5c440062660f Mon Sep 17 00:00:00 2001 From: Zanie Blue Date: Thu, 17 Jul 2025 17:26:47 -0500 Subject: [PATCH 080/130] Bump version to 0.8.0 Somehow this one was missed? --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index a079d53b2..1d0a1e713 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "maturin" [project] name = "uv" -version = "0.7.22" +version = "0.8.0" description = "An extremely fast Python package and project manager, written in Rust." authors = [{ name = "Astral Software Inc.", email = "hey@astral.sh" }] requires-python = ">=3.8" From a6a5e65e0c2e6aa7d56554ab86033b7066865f51 Mon Sep 17 00:00:00 2001 From: Zanie Blue Date: Thu, 17 Jul 2025 18:11:22 -0500 Subject: [PATCH 081/130] Edits to the 0.8 changelog entry (#14698) --- CHANGELOG.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index e3d8f6f17..b80747ed1 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -5,7 +5,7 @@ ## 0.8.0 -Since we released uv [0.7.0](https://github.com/astral-sh/uv/releases/tag/0.5.0) in April, we've accumulated various changes that improve correctness and user experience, but could break some workflows. This release contains those changes; many have been marked as breaking out of an abundance of caution. We expect most users to be able to upgrade without making changes. +Since we released uv [0.7.0](https://github.com/astral-sh/uv/releases/tag/0.7.0) in April, we've accumulated various changes that improve correctness and user experience, but could break some workflows. This release contains those changes; many have been marked as breaking out of an abundance of caution. We expect most users to be able to upgrade without making changes. This release also includes the stabilization of a couple `uv python install` features, which have been available under preview since late last year. @@ -25,7 +25,7 @@ This release also includes the stabilization of a couple `uv python install` fea See the [documentation on installing Python executables](https://docs.astral.sh/uv/concepts/python-versions/#installing-python-executables) for more details. -- **Register Python versions with the Windows registry ([#14625](https://github.com/astral-sh/uv/pull/14625))** +- **Register Python versions with the Windows Registry ([#14625](https://github.com/astral-sh/uv/pull/14625))** `uv python install` now registers the installed Python version with the Windows Registry as specified by [PEP 514](https://peps.python.org/pep-0514/). This allows using uv installed Python versions via the `py` launcher. This behavior has been available under the `--preview` flag since [Jan 2025](https://github.com/astral-sh/uv/pull/10634). This change should not be breaking, as using the uv Python versions with `py` requires explicit opt in. From e724ddc63f14b9378672c16433dbfba534c6cb84 Mon Sep 17 00:00:00 2001 From: Charlie Marsh Date: Thu, 17 Jul 2025 21:27:54 -0400 Subject: [PATCH 082/130] Allow `--config-settings-package` to apply configuration settings at the package level (#14573) ## Summary Closes https://github.com/astral-sh/uv/issues/14564. Closes https://github.com/astral-sh/uv/issues/10940. --- crates/uv-bench/benches/uv.rs | 6 +- crates/uv-cli/src/lib.rs | 37 +++- crates/uv-cli/src/options.rs | 32 ++- .../uv-configuration/src/config_settings.rs | 184 ++++++++++++++++++ crates/uv-dispatch/src/lib.rs | 25 ++- .../src/index/built_wheel_index.rs | 42 ++-- crates/uv-distribution/src/source/mod.rs | 51 +++-- crates/uv-installer/src/plan.rs | 11 +- crates/uv-settings/src/combine.rs | 15 +- crates/uv-settings/src/settings.rs | 34 +++- crates/uv-types/src/traits.rs | 7 +- crates/uv/src/commands/build_frontend.rs | 6 +- crates/uv/src/commands/pip/compile.rs | 5 +- crates/uv/src/commands/pip/install.rs | 6 +- crates/uv/src/commands/pip/operations.rs | 4 +- crates/uv/src/commands/pip/sync.rs | 6 +- crates/uv/src/commands/project/add.rs | 1 + crates/uv/src/commands/project/lock.rs | 2 + crates/uv/src/commands/project/mod.rs | 10 + crates/uv/src/commands/project/sync.rs | 3 + crates/uv/src/commands/project/tree.rs | 1 + crates/uv/src/commands/venv.rs | 4 +- crates/uv/src/lib.rs | 3 + crates/uv/src/settings.rs | 22 ++- crates/uv/tests/it/pip_install.rs | 119 ++++++++++- crates/uv/tests/it/show_settings.rs | 108 +++++++++- crates/uv/tests/it/sync.rs | 143 ++++++++++++++ docs/reference/cli.md | 15 ++ docs/reference/settings.md | 54 +++++ uv.schema.json | 29 +++ 30 files changed, 927 insertions(+), 58 deletions(-) diff --git a/crates/uv-bench/benches/uv.rs b/crates/uv-bench/benches/uv.rs index 9bdd7adb9..8380ccd60 100644 --- a/crates/uv-bench/benches/uv.rs +++ b/crates/uv-bench/benches/uv.rs @@ -86,8 +86,8 @@ mod resolver { use uv_cache::Cache; use uv_client::RegistryClient; use uv_configuration::{ - BuildOptions, Concurrency, ConfigSettings, Constraints, IndexStrategy, PreviewMode, - SourceStrategy, + BuildOptions, Concurrency, ConfigSettings, Constraints, IndexStrategy, + PackageConfigSettings, PreviewMode, SourceStrategy, }; use uv_dispatch::{BuildDispatch, SharedState}; use uv_distribution::DistributionDatabase; @@ -144,6 +144,7 @@ mod resolver { let build_options = BuildOptions::default(); let concurrency = Concurrency::default(); let config_settings = ConfigSettings::default(); + let config_settings_package = PackageConfigSettings::default(); let exclude_newer = Some( jiff::civil::date(2024, 9, 1) .to_zoned(jiff::tz::TimeZone::UTC) @@ -184,6 +185,7 @@ mod resolver { state, IndexStrategy::default(), &config_settings, + &config_settings_package, build_isolation, LinkMode::default(), &build_options, diff --git a/crates/uv-cli/src/lib.rs b/crates/uv-cli/src/lib.rs index 9d7cfa6e0..d6560014f 100644 --- a/crates/uv-cli/src/lib.rs +++ b/crates/uv-cli/src/lib.rs @@ -10,8 +10,9 @@ use clap::{Args, Parser, Subcommand}; use uv_cache::CacheArgs; use uv_configuration::{ - ConfigSettingEntry, ExportFormat, IndexStrategy, KeyringProviderType, PackageNameSpecifier, - ProjectBuildBackend, TargetTriple, TrustedHost, TrustedPublishing, VersionControlSystem, + ConfigSettingEntry, ConfigSettingPackageEntry, ExportFormat, IndexStrategy, + KeyringProviderType, PackageNameSpecifier, ProjectBuildBackend, TargetTriple, TrustedHost, + TrustedPublishing, VersionControlSystem, }; use uv_distribution_types::{Index, IndexUrl, Origin, PipExtraIndex, PipFindLinks, PipIndex}; use uv_normalize::{ExtraName, GroupName, PackageName, PipGroupName}; @@ -4693,6 +4694,14 @@ pub struct ToolUpgradeArgs { )] pub config_setting: Option>, + /// Settings to pass to the PEP 517 build backend for a specific package, specified as `PACKAGE:KEY=VALUE` pairs. + #[arg( + long, + alias = "config-settings-package", + help_heading = "Build options" + )] + pub config_setting_package: Option>, + /// Disable isolation when building source distributions. /// /// Assumes that build dependencies specified by PEP 518 are already installed. @@ -5484,6 +5493,14 @@ pub struct InstallerArgs { )] pub config_setting: Option>, + /// Settings to pass to the PEP 517 build backend for a specific package, specified as `PACKAGE:KEY=VALUE` pairs. + #[arg( + long, + alias = "config-settings-package", + help_heading = "Build options" + )] + pub config_settings_package: Option>, + /// Disable isolation when building source distributions. /// /// Assumes that build dependencies specified by PEP 518 are already installed. @@ -5671,6 +5688,14 @@ pub struct ResolverArgs { )] pub config_setting: Option>, + /// Settings to pass to the PEP 517 build backend for a specific package, specified as `PACKAGE:KEY=VALUE` pairs. + #[arg( + long, + alias = "config-settings-package", + help_heading = "Build options" + )] + pub config_settings_package: Option>, + /// Disable isolation when building source distributions. /// /// Assumes that build dependencies specified by PEP 518 are already installed. @@ -5860,6 +5885,14 @@ pub struct ResolverInstallerArgs { )] pub config_setting: Option>, + /// Settings to pass to the PEP 517 build backend for a specific package, specified as `PACKAGE:KEY=VALUE` pairs. + #[arg( + long, + alias = "config-settings-package", + help_heading = "Build options" + )] + pub config_settings_package: Option>, + /// Disable isolation when building source distributions. /// /// Assumes that build dependencies specified by PEP 518 are already installed. diff --git a/crates/uv-cli/src/options.rs b/crates/uv-cli/src/options.rs index f522022a1..d2e651a19 100644 --- a/crates/uv-cli/src/options.rs +++ b/crates/uv-cli/src/options.rs @@ -1,7 +1,7 @@ use anstream::eprintln; use uv_cache::Refresh; -use uv_configuration::ConfigSettings; +use uv_configuration::{ConfigSettings, PackageConfigSettings}; use uv_resolver::PrereleaseMode; use uv_settings::{Combine, PipOptions, ResolverInstallerOptions, ResolverOptions}; use uv_warnings::owo_colors::OwoColorize; @@ -62,6 +62,7 @@ impl From for PipOptions { pre, fork_strategy, config_setting, + config_settings_package, no_build_isolation, no_build_isolation_package, build_isolation, @@ -84,6 +85,11 @@ impl From for PipOptions { }, config_settings: config_setting .map(|config_settings| config_settings.into_iter().collect::()), + config_settings_package: config_settings_package.map(|config_settings| { + config_settings + .into_iter() + .collect::() + }), no_build_isolation: flag(no_build_isolation, build_isolation, "build-isolation"), no_build_isolation_package: Some(no_build_isolation_package), exclude_newer, @@ -104,6 +110,7 @@ impl From for PipOptions { index_strategy, keyring_provider, config_setting, + config_settings_package, no_build_isolation, build_isolation, exclude_newer, @@ -120,6 +127,11 @@ impl From for PipOptions { keyring_provider, config_settings: config_setting .map(|config_settings| config_settings.into_iter().collect::()), + config_settings_package: config_settings_package.map(|config_settings| { + config_settings + .into_iter() + .collect::() + }), no_build_isolation: flag(no_build_isolation, build_isolation, "build-isolation"), exclude_newer, link_mode, @@ -147,6 +159,7 @@ impl From for PipOptions { pre, fork_strategy, config_setting, + config_settings_package, no_build_isolation, no_build_isolation_package, build_isolation, @@ -173,6 +186,11 @@ impl From for PipOptions { fork_strategy, config_settings: config_setting .map(|config_settings| config_settings.into_iter().collect::()), + config_settings_package: config_settings_package.map(|config_settings| { + config_settings + .into_iter() + .collect::() + }), no_build_isolation: flag(no_build_isolation, build_isolation, "build-isolation"), no_build_isolation_package: Some(no_build_isolation_package), exclude_newer, @@ -260,6 +278,7 @@ pub fn resolver_options( pre, fork_strategy, config_setting, + config_settings_package, no_build_isolation, no_build_isolation_package, build_isolation, @@ -321,6 +340,11 @@ pub fn resolver_options( dependency_metadata: None, config_settings: config_setting .map(|config_settings| config_settings.into_iter().collect::()), + config_settings_package: config_settings_package.map(|config_settings| { + config_settings + .into_iter() + .collect::() + }), no_build_isolation: flag(no_build_isolation, build_isolation, "build-isolation"), no_build_isolation_package: Some(no_build_isolation_package), exclude_newer, @@ -353,6 +377,7 @@ pub fn resolver_installer_options( pre, fork_strategy, config_setting, + config_settings_package, no_build_isolation, no_build_isolation_package, build_isolation, @@ -428,6 +453,11 @@ pub fn resolver_installer_options( dependency_metadata: None, config_settings: config_setting .map(|config_settings| config_settings.into_iter().collect::()), + config_settings_package: config_settings_package.map(|config_settings| { + config_settings + .into_iter() + .collect::() + }), no_build_isolation: flag(no_build_isolation, build_isolation, "build-isolation"), no_build_isolation_package: if no_build_isolation_package.is_empty() { None diff --git a/crates/uv-configuration/src/config_settings.rs b/crates/uv-configuration/src/config_settings.rs index cd1d67196..c6238deb2 100644 --- a/crates/uv-configuration/src/config_settings.rs +++ b/crates/uv-configuration/src/config_settings.rs @@ -3,6 +3,7 @@ use std::{ str::FromStr, }; use uv_cache_key::CacheKeyHasher; +use uv_normalize::PackageName; #[derive(Debug, Clone)] pub struct ConfigSettingEntry { @@ -28,6 +29,32 @@ impl FromStr for ConfigSettingEntry { } } +#[derive(Debug, Clone)] +pub struct ConfigSettingPackageEntry { + /// The package name to apply the setting to. + package: PackageName, + /// The config setting entry. + setting: ConfigSettingEntry, +} + +impl FromStr for ConfigSettingPackageEntry { + type Err = String; + + fn from_str(s: &str) -> Result { + let Some((package_str, config_str)) = s.split_once(':') else { + return Err(format!( + "Invalid config setting: {s} (expected `PACKAGE:KEY=VALUE`)" + )); + }; + + let package = PackageName::from_str(package_str.trim()) + .map_err(|e| format!("Invalid package name: {e}"))?; + let setting = ConfigSettingEntry::from_str(config_str)?; + + Ok(Self { package, setting }) + } +} + #[derive(Debug, Clone, PartialEq, Eq)] #[cfg_attr(feature = "schemars", derive(schemars::JsonSchema), schemars(untagged))] enum ConfigSettingValue { @@ -212,6 +239,111 @@ impl<'de> serde::Deserialize<'de> for ConfigSettings { } } +/// Settings to pass to PEP 517 build backends on a per-package basis. +#[derive(Debug, Default, Clone, PartialEq, Eq)] +#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] +pub struct PackageConfigSettings(BTreeMap); + +impl FromIterator for PackageConfigSettings { + fn from_iter>(iter: T) -> Self { + let mut package_configs: BTreeMap> = BTreeMap::new(); + + for entry in iter { + package_configs + .entry(entry.package) + .or_default() + .push(entry.setting); + } + + let configs = package_configs + .into_iter() + .map(|(package, entries)| (package, entries.into_iter().collect())) + .collect(); + + Self(configs) + } +} + +impl PackageConfigSettings { + /// Returns the config settings for a specific package, if any. + pub fn get(&self, package: &PackageName) -> Option<&ConfigSettings> { + self.0.get(package) + } + + /// Returns `true` if there are no package-specific settings. + pub fn is_empty(&self) -> bool { + self.0.is_empty() + } + + /// Merge two sets of package config settings, with the values in `self` taking precedence. + #[must_use] + pub fn merge(mut self, other: PackageConfigSettings) -> PackageConfigSettings { + for (package, settings) in other.0 { + match self.0.entry(package) { + Entry::Vacant(vacant) => { + vacant.insert(settings); + } + Entry::Occupied(mut occupied) => { + let merged = occupied.get().clone().merge(settings); + occupied.insert(merged); + } + } + } + self + } +} + +impl uv_cache_key::CacheKey for PackageConfigSettings { + fn cache_key(&self, state: &mut CacheKeyHasher) { + for (package, settings) in &self.0 { + package.to_string().cache_key(state); + settings.cache_key(state); + } + } +} + +impl serde::Serialize for PackageConfigSettings { + fn serialize(&self, serializer: S) -> Result { + use serde::ser::SerializeMap; + + let mut map = serializer.serialize_map(Some(self.0.len()))?; + for (key, value) in &self.0 { + map.serialize_entry(&key.to_string(), value)?; + } + map.end() + } +} + +impl<'de> serde::Deserialize<'de> for PackageConfigSettings { + fn deserialize>(deserializer: D) -> Result { + struct Visitor; + + impl<'de> serde::de::Visitor<'de> for Visitor { + type Value = PackageConfigSettings; + + fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result { + formatter.write_str("a map from package name to config settings") + } + + fn visit_map>( + self, + mut map: A, + ) -> Result { + let mut config = BTreeMap::default(); + while let Some((key, value)) = map.next_entry::()? { + let package = PackageName::from_str(&key).map_err(|e| { + serde::de::Error::custom(format!("Invalid package name: {e}")) + })?; + config.insert(package, value); + } + Ok(PackageConfigSettings(config)) + } + } + + deserializer.deserialize_map(Visitor) + } +} + #[cfg(test)] mod tests { use super::*; @@ -291,4 +423,56 @@ mod tests { ); assert_eq!(settings.escape_for_python(), r#"{"key":"val\\1 {}value"}"#); } + + #[test] + fn parse_config_setting_package_entry() { + // Test valid parsing + let entry = ConfigSettingPackageEntry::from_str("numpy:editable_mode=compat").unwrap(); + assert_eq!(entry.package.as_ref(), "numpy"); + assert_eq!(entry.setting.key, "editable_mode"); + assert_eq!(entry.setting.value, "compat"); + + // Test with package name containing hyphens + let entry = ConfigSettingPackageEntry::from_str("my-package:some_key=value").unwrap(); + assert_eq!(entry.package.as_ref(), "my-package"); + assert_eq!(entry.setting.key, "some_key"); + assert_eq!(entry.setting.value, "value"); + + // Test with spaces around values + let entry = ConfigSettingPackageEntry::from_str(" numpy : key = value ").unwrap(); + assert_eq!(entry.package.as_ref(), "numpy"); + assert_eq!(entry.setting.key, "key"); + assert_eq!(entry.setting.value, "value"); + } + + #[test] + fn collect_config_settings_package() { + let settings: PackageConfigSettings = vec![ + ConfigSettingPackageEntry::from_str("numpy:editable_mode=compat").unwrap(), + ConfigSettingPackageEntry::from_str("numpy:another_key=value").unwrap(), + ConfigSettingPackageEntry::from_str("scipy:build_option=fast").unwrap(), + ] + .into_iter() + .collect(); + + let numpy_settings = settings + .get(&PackageName::from_str("numpy").unwrap()) + .unwrap(); + assert_eq!( + numpy_settings.0.get("editable_mode"), + Some(&ConfigSettingValue::String("compat".to_string())) + ); + assert_eq!( + numpy_settings.0.get("another_key"), + Some(&ConfigSettingValue::String("value".to_string())) + ); + + let scipy_settings = settings + .get(&PackageName::from_str("scipy").unwrap()) + .unwrap(); + assert_eq!( + scipy_settings.0.get("build_option"), + Some(&ConfigSettingValue::String("fast".to_string())) + ); + } } diff --git a/crates/uv-dispatch/src/lib.rs b/crates/uv-dispatch/src/lib.rs index 874e412e5..2e34b583d 100644 --- a/crates/uv-dispatch/src/lib.rs +++ b/crates/uv-dispatch/src/lib.rs @@ -17,8 +17,8 @@ use uv_build_frontend::{SourceBuild, SourceBuildContext}; use uv_cache::Cache; use uv_client::RegistryClient; use uv_configuration::{ - BuildKind, BuildOptions, ConfigSettings, Constraints, IndexStrategy, PreviewMode, Reinstall, - SourceStrategy, + BuildKind, BuildOptions, ConfigSettings, Constraints, IndexStrategy, PackageConfigSettings, + PreviewMode, Reinstall, SourceStrategy, }; use uv_configuration::{BuildOutput, Concurrency}; use uv_distribution::DistributionDatabase; @@ -91,6 +91,7 @@ pub struct BuildDispatch<'a> { link_mode: uv_install_wheel::LinkMode, build_options: &'a BuildOptions, config_settings: &'a ConfigSettings, + config_settings_package: &'a PackageConfigSettings, hasher: &'a HashStrategy, exclude_newer: Option, source_build_context: SourceBuildContext, @@ -113,6 +114,7 @@ impl<'a> BuildDispatch<'a> { shared_state: SharedState, index_strategy: IndexStrategy, config_settings: &'a ConfigSettings, + config_settings_package: &'a PackageConfigSettings, build_isolation: BuildIsolation<'a>, link_mode: uv_install_wheel::LinkMode, build_options: &'a BuildOptions, @@ -134,6 +136,7 @@ impl<'a> BuildDispatch<'a> { dependency_metadata, index_strategy, config_settings, + config_settings_package, build_isolation, link_mode, build_options, @@ -200,6 +203,10 @@ impl BuildContext for BuildDispatch<'_> { self.config_settings } + fn config_settings_package(&self) -> &PackageConfigSettings { + self.config_settings_package + } + fn sources(&self) -> SourceStrategy { self.sources } @@ -295,6 +302,7 @@ impl BuildContext for BuildDispatch<'_> { self.hasher, self.index_locations, self.config_settings, + self.config_settings_package, self.cache(), venv, tags, @@ -418,6 +426,17 @@ impl BuildContext for BuildDispatch<'_> { build_stack.insert(dist.distribution_id()); } + // Get package-specific config settings if available; otherwise, use global settings. + let config_settings = if let Some(name) = dist_name { + if let Some(package_settings) = self.config_settings_package.get(name) { + package_settings.clone().merge(self.config_settings.clone()) + } else { + self.config_settings.clone() + } + } else { + self.config_settings.clone() + }; + let builder = SourceBuild::setup( source, subdirectory, @@ -431,7 +450,7 @@ impl BuildContext for BuildDispatch<'_> { self.index_locations, sources, self.workspace_cache(), - self.config_settings.clone(), + config_settings, self.build_isolation, &build_stack, build_kind, diff --git a/crates/uv-distribution/src/index/built_wheel_index.rs b/crates/uv-distribution/src/index/built_wheel_index.rs index 9752e7e4f..90ce5deed 100644 --- a/crates/uv-distribution/src/index/built_wheel_index.rs +++ b/crates/uv-distribution/src/index/built_wheel_index.rs @@ -1,10 +1,12 @@ +use std::borrow::Cow; use uv_cache::{Cache, CacheBucket, CacheShard, WheelCache}; use uv_cache_info::CacheInfo; use uv_cache_key::cache_digest; -use uv_configuration::ConfigSettings; +use uv_configuration::{ConfigSettings, PackageConfigSettings}; use uv_distribution_types::{ DirectUrlSourceDist, DirectorySourceDist, GitSourceDist, Hashed, PathSourceDist, }; +use uv_normalize::PackageName; use uv_platform_tags::Tags; use uv_types::HashStrategy; @@ -18,7 +20,8 @@ pub struct BuiltWheelIndex<'a> { cache: &'a Cache, tags: &'a Tags, hasher: &'a HashStrategy, - build_configuration: &'a ConfigSettings, + config_settings: &'a ConfigSettings, + config_settings_package: &'a PackageConfigSettings, } impl<'a> BuiltWheelIndex<'a> { @@ -27,13 +30,15 @@ impl<'a> BuiltWheelIndex<'a> { cache: &'a Cache, tags: &'a Tags, hasher: &'a HashStrategy, - build_configuration: &'a ConfigSettings, + config_settings: &'a ConfigSettings, + config_settings_package: &'a PackageConfigSettings, ) -> Self { Self { cache, tags, hasher, - build_configuration, + config_settings, + config_settings_package, } } @@ -63,10 +68,11 @@ impl<'a> BuiltWheelIndex<'a> { let cache_shard = cache_shard.shard(revision.id()); // If there are build settings, we need to scope to a cache shard. - let cache_shard = if self.build_configuration.is_empty() { + let config_settings = self.config_settings_for(&source_dist.name); + let cache_shard = if config_settings.is_empty() { cache_shard } else { - cache_shard.shard(cache_digest(self.build_configuration)) + cache_shard.shard(cache_digest(&config_settings)) }; Ok(self.find(&cache_shard)) @@ -100,10 +106,11 @@ impl<'a> BuiltWheelIndex<'a> { let cache_shard = cache_shard.shard(revision.id()); // If there are build settings, we need to scope to a cache shard. - let cache_shard = if self.build_configuration.is_empty() { + let config_settings = self.config_settings_for(&source_dist.name); + let cache_shard = if config_settings.is_empty() { cache_shard } else { - cache_shard.shard(cache_digest(self.build_configuration)) + cache_shard.shard(cache_digest(&config_settings)) }; Ok(self @@ -148,10 +155,11 @@ impl<'a> BuiltWheelIndex<'a> { let cache_shard = cache_shard.shard(revision.id()); // If there are build settings, we need to scope to a cache shard. - let cache_shard = if self.build_configuration.is_empty() { + let config_settings = self.config_settings_for(&source_dist.name); + let cache_shard = if config_settings.is_empty() { cache_shard } else { - cache_shard.shard(cache_digest(self.build_configuration)) + cache_shard.shard(cache_digest(&config_settings)) }; Ok(self @@ -174,10 +182,11 @@ impl<'a> BuiltWheelIndex<'a> { ); // If there are build settings, we need to scope to a cache shard. - let cache_shard = if self.build_configuration.is_empty() { + let config_settings = self.config_settings_for(&source_dist.name); + let cache_shard = if config_settings.is_empty() { cache_shard } else { - cache_shard.shard(cache_digest(self.build_configuration)) + cache_shard.shard(cache_digest(&config_settings)) }; self.find(&cache_shard) @@ -239,4 +248,13 @@ impl<'a> BuiltWheelIndex<'a> { candidate } + + /// Determine the [`ConfigSettings`] for the given package name. + fn config_settings_for(&self, name: &PackageName) -> Cow<'_, ConfigSettings> { + if let Some(package_settings) = self.config_settings_package.get(name) { + Cow::Owned(package_settings.clone().merge(self.config_settings.clone())) + } else { + Cow::Borrowed(self.config_settings) + } + } } diff --git a/crates/uv-distribution/src/source/mod.rs b/crates/uv-distribution/src/source/mod.rs index 1308e3d77..080a1e52d 100644 --- a/crates/uv-distribution/src/source/mod.rs +++ b/crates/uv-distribution/src/source/mod.rs @@ -29,7 +29,7 @@ use uv_cache_key::cache_digest; use uv_client::{ CacheControl, CachedClientError, Connectivity, DataWithCachePolicy, RegistryClient, }; -use uv_configuration::{BuildKind, BuildOutput, SourceStrategy}; +use uv_configuration::{BuildKind, BuildOutput, ConfigSettings, SourceStrategy}; use uv_distribution_filename::{SourceDistExtension, WheelFilename}; use uv_distribution_types::{ BuildableSource, DirectorySourceUrl, GitSourceUrl, HashPolicy, Hashed, PathSourceUrl, @@ -373,6 +373,23 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> { Ok(metadata) } + /// Determine the [`ConfigSettings`] for the given package name. + fn config_settings_for(&self, name: Option<&PackageName>) -> Cow<'_, ConfigSettings> { + if let Some(name) = name { + if let Some(package_settings) = self.build_context.config_settings_package().get(name) { + Cow::Owned( + package_settings + .clone() + .merge(self.build_context.config_settings().clone()), + ) + } else { + Cow::Borrowed(self.build_context.config_settings()) + } + } else { + Cow::Borrowed(self.build_context.config_settings()) + } + } + /// Build a source distribution from a remote URL. async fn url<'data>( &self, @@ -407,11 +424,11 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> { let source_dist_entry = cache_shard.entry(SOURCE); // If there are build settings, we need to scope to a cache shard. - let config_settings = self.build_context.config_settings(); + let config_settings = self.config_settings_for(source.name()); let cache_shard = if config_settings.is_empty() { cache_shard } else { - cache_shard.shard(cache_digest(config_settings)) + cache_shard.shard(cache_digest(&&config_settings)) }; // If the cache contains a compatible wheel, return it. @@ -580,11 +597,11 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> { } // If there are build settings, we need to scope to a cache shard. - let config_settings = self.build_context.config_settings(); + let config_settings = self.config_settings_for(source.name()); let cache_shard = if config_settings.is_empty() { cache_shard } else { - cache_shard.shard(cache_digest(config_settings)) + cache_shard.shard(cache_digest(&config_settings)) }; // Otherwise, we either need to build the metadata. @@ -779,11 +796,11 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> { let source_entry = cache_shard.entry(SOURCE); // If there are build settings, we need to scope to a cache shard. - let config_settings = self.build_context.config_settings(); + let config_settings = self.config_settings_for(source.name()); let cache_shard = if config_settings.is_empty() { cache_shard } else { - cache_shard.shard(cache_digest(config_settings)) + cache_shard.shard(cache_digest(&config_settings)) }; // If the cache contains a compatible wheel, return it. @@ -941,11 +958,11 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> { } // If there are build settings, we need to scope to a cache shard. - let config_settings = self.build_context.config_settings(); + let config_settings = self.config_settings_for(source.name()); let cache_shard = if config_settings.is_empty() { cache_shard } else { - cache_shard.shard(cache_digest(config_settings)) + cache_shard.shard(cache_digest(&config_settings)) }; // Otherwise, we need to build a wheel. @@ -1083,11 +1100,11 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> { let cache_shard = cache_shard.shard(revision.id()); // If there are build settings, we need to scope to a cache shard. - let config_settings = self.build_context.config_settings(); + let config_settings = self.config_settings_for(source.name()); let cache_shard = if config_settings.is_empty() { cache_shard } else { - cache_shard.shard(cache_digest(config_settings)) + cache_shard.shard(cache_digest(&config_settings)) }; // If the cache contains a compatible wheel, return it. @@ -1271,11 +1288,11 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> { } // If there are build settings, we need to scope to a cache shard. - let config_settings = self.build_context.config_settings(); + let config_settings = self.config_settings_for(source.name()); let cache_shard = if config_settings.is_empty() { cache_shard } else { - cache_shard.shard(cache_digest(config_settings)) + cache_shard.shard(cache_digest(&config_settings)) }; // Otherwise, we need to build a wheel. @@ -1476,11 +1493,11 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> { let _lock = cache_shard.lock().await.map_err(Error::CacheWrite)?; // If there are build settings, we need to scope to a cache shard. - let config_settings = self.build_context.config_settings(); + let config_settings = self.config_settings_for(source.name()); let cache_shard = if config_settings.is_empty() { cache_shard } else { - cache_shard.shard(cache_digest(config_settings)) + cache_shard.shard(cache_digest(&config_settings)) }; // If the cache contains a compatible wheel, return it. @@ -1779,11 +1796,11 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> { } // If there are build settings, we need to scope to a cache shard. - let config_settings = self.build_context.config_settings(); + let config_settings = self.config_settings_for(source.name()); let cache_shard = if config_settings.is_empty() { cache_shard } else { - cache_shard.shard(cache_digest(config_settings)) + cache_shard.shard(cache_digest(&config_settings)) }; // Otherwise, we need to build a wheel. diff --git a/crates/uv-installer/src/plan.rs b/crates/uv-installer/src/plan.rs index e030e9b4d..69e10befc 100644 --- a/crates/uv-installer/src/plan.rs +++ b/crates/uv-installer/src/plan.rs @@ -4,7 +4,7 @@ use tracing::{debug, warn}; use uv_cache::{Cache, CacheBucket, WheelCache}; use uv_cache_info::Timestamp; -use uv_configuration::{BuildOptions, ConfigSettings, Reinstall}; +use uv_configuration::{BuildOptions, ConfigSettings, PackageConfigSettings, Reinstall}; use uv_distribution::{ BuiltWheelIndex, HttpArchivePointer, LocalArchivePointer, RegistryWheelIndex, }; @@ -52,6 +52,7 @@ impl<'a> Planner<'a> { hasher: &HashStrategy, index_locations: &IndexLocations, config_settings: &ConfigSettings, + config_settings_package: &PackageConfigSettings, cache: &Cache, venv: &PythonEnvironment, tags: &Tags, @@ -59,7 +60,13 @@ impl<'a> Planner<'a> { // Index all the already-downloaded wheels in the cache. let mut registry_index = RegistryWheelIndex::new(cache, tags, index_locations, hasher, config_settings); - let built_index = BuiltWheelIndex::new(cache, tags, hasher, config_settings); + let built_index = BuiltWheelIndex::new( + cache, + tags, + hasher, + config_settings, + config_settings_package, + ); let mut cached = vec![]; let mut remote = vec![]; diff --git a/crates/uv-settings/src/combine.rs b/crates/uv-settings/src/combine.rs index 8edbd2a05..738b00ffe 100644 --- a/crates/uv-settings/src/combine.rs +++ b/crates/uv-settings/src/combine.rs @@ -4,8 +4,8 @@ use std::path::PathBuf; use url::Url; use uv_configuration::{ - ConfigSettings, ExportFormat, IndexStrategy, KeyringProviderType, RequiredVersion, - TargetTriple, TrustedPublishing, + ConfigSettings, ExportFormat, IndexStrategy, KeyringProviderType, PackageConfigSettings, + RequiredVersion, TargetTriple, TrustedPublishing, }; use uv_distribution_types::{Index, IndexUrl, PipExtraIndex, PipFindLinks, PipIndex}; use uv_install_wheel::LinkMode; @@ -131,6 +131,17 @@ impl Combine for Option { } } +impl Combine for Option { + /// Combine two maps by merging the map in `self` with the map in `other`, if they're both + /// `Some`. + fn combine(self, other: Option) -> Option { + match (self, other) { + (Some(a), Some(b)) => Some(a.merge(b)), + (a, b) => a.or(b), + } + } +} + impl Combine for serde::de::IgnoredAny { fn combine(self, _other: Self) -> Self { self diff --git a/crates/uv-settings/src/settings.rs b/crates/uv-settings/src/settings.rs index e057cb40a..9eb765a1e 100644 --- a/crates/uv-settings/src/settings.rs +++ b/crates/uv-settings/src/settings.rs @@ -4,8 +4,8 @@ use serde::{Deserialize, Serialize}; use uv_cache_info::CacheKey; use uv_configuration::{ - ConfigSettings, IndexStrategy, KeyringProviderType, PackageNameSpecifier, RequiredVersion, - TargetTriple, TrustedHost, TrustedPublishing, + ConfigSettings, IndexStrategy, KeyringProviderType, PackageConfigSettings, + PackageNameSpecifier, RequiredVersion, TargetTriple, TrustedHost, TrustedPublishing, }; use uv_distribution_types::{ Index, IndexUrl, IndexUrlError, PipExtraIndex, PipFindLinks, PipIndex, StaticMetadata, @@ -361,6 +361,7 @@ pub struct ResolverOptions { pub fork_strategy: Option, pub dependency_metadata: Option>, pub config_settings: Option, + pub config_settings_package: Option, pub exclude_newer: Option, pub link_mode: Option, pub upgrade: Option, @@ -587,6 +588,18 @@ pub struct ResolverInstallerOptions { "# )] pub config_settings: Option, + /// Settings to pass to the [PEP 517](https://peps.python.org/pep-0517/) build backend for specific packages, + /// specified as `KEY=VALUE` pairs. + /// + /// Accepts a map from package names to string key-value pairs. + #[option( + default = "{}", + value_type = "dict", + example = r#" + config-settings-package = { numpy = { editable_mode = "compat" } } + "# + )] + pub config_settings_package: Option, /// Disable isolation when building source distributions. /// /// Assumes that build dependencies specified by [PEP 518](https://peps.python.org/pep-0518/) @@ -1333,6 +1346,16 @@ pub struct PipOptions { "# )] pub config_settings: Option, + /// Settings to pass to the [PEP 517](https://peps.python.org/pep-0517/) build backend for specific packages, + /// specified as `KEY=VALUE` pairs. + #[option( + default = "{}", + value_type = "dict", + example = r#" + config-settings-package = { numpy = { editable_mode = "compat" } } + "# + )] + pub config_settings_package: Option, /// The minimum Python version that should be supported by the resolved requirements (e.g., /// `3.8` or `3.8.17`). /// @@ -1651,6 +1674,7 @@ impl From for ResolverOptions { fork_strategy: value.fork_strategy, dependency_metadata: value.dependency_metadata, config_settings: value.config_settings, + config_settings_package: value.config_settings_package, exclude_newer: value.exclude_newer, link_mode: value.link_mode, upgrade: value.upgrade, @@ -1714,6 +1738,7 @@ pub struct ToolOptions { pub fork_strategy: Option, pub dependency_metadata: Option>, pub config_settings: Option, + pub config_settings_package: Option, pub no_build_isolation: Option, pub no_build_isolation_package: Option>, pub exclude_newer: Option, @@ -1741,6 +1766,7 @@ impl From for ToolOptions { fork_strategy: value.fork_strategy, dependency_metadata: value.dependency_metadata, config_settings: value.config_settings, + config_settings_package: value.config_settings_package, no_build_isolation: value.no_build_isolation, no_build_isolation_package: value.no_build_isolation_package, exclude_newer: value.exclude_newer, @@ -1770,6 +1796,7 @@ impl From for ResolverInstallerOptions { fork_strategy: value.fork_strategy, dependency_metadata: value.dependency_metadata, config_settings: value.config_settings, + config_settings_package: value.config_settings_package, no_build_isolation: value.no_build_isolation, no_build_isolation_package: value.no_build_isolation_package, exclude_newer: value.exclude_newer, @@ -1822,6 +1849,7 @@ pub struct OptionsWire { fork_strategy: Option, dependency_metadata: Option>, config_settings: Option, + config_settings_package: Option, no_build_isolation: Option, no_build_isolation_package: Option>, exclude_newer: Option, @@ -1911,6 +1939,7 @@ impl From for Options { fork_strategy, dependency_metadata, config_settings, + config_settings_package, no_build_isolation, no_build_isolation_package, exclude_newer, @@ -1977,6 +2006,7 @@ impl From for Options { fork_strategy, dependency_metadata, config_settings, + config_settings_package, no_build_isolation, no_build_isolation_package, exclude_newer, diff --git a/crates/uv-types/src/traits.rs b/crates/uv-types/src/traits.rs index a95367fef..e3f4ee012 100644 --- a/crates/uv-types/src/traits.rs +++ b/crates/uv-types/src/traits.rs @@ -7,7 +7,9 @@ use anyhow::Result; use rustc_hash::FxHashSet; use uv_cache::Cache; -use uv_configuration::{BuildKind, BuildOptions, BuildOutput, ConfigSettings, SourceStrategy}; +use uv_configuration::{ + BuildKind, BuildOptions, BuildOutput, ConfigSettings, PackageConfigSettings, SourceStrategy, +}; use uv_distribution_filename::DistFilename; use uv_distribution_types::{ CachedDist, DependencyMetadata, DistributionId, IndexCapabilities, IndexLocations, @@ -87,6 +89,9 @@ pub trait BuildContext { /// The [`ConfigSettings`] used to build distributions. fn config_settings(&self) -> &ConfigSettings; + /// The [`ConfigSettings`] used to build a specific package. + fn config_settings_package(&self) -> &PackageConfigSettings; + /// Whether to incorporate `tool.uv.sources` when resolving requirements. fn sources(&self) -> SourceStrategy; diff --git a/crates/uv/src/commands/build_frontend.rs b/crates/uv/src/commands/build_frontend.rs index a830f7aef..b3f9e5c89 100644 --- a/crates/uv/src/commands/build_frontend.rs +++ b/crates/uv/src/commands/build_frontend.rs @@ -16,7 +16,7 @@ use uv_client::{BaseClientBuilder, FlatIndexClient, RegistryClientBuilder}; use uv_configuration::{ BuildKind, BuildOptions, BuildOutput, Concurrency, ConfigSettings, Constraints, DependencyGroupsWithDefaults, HashCheckingMode, IndexStrategy, KeyringProviderType, - PreviewMode, SourceStrategy, + PackageConfigSettings, PreviewMode, SourceStrategy, }; use uv_dispatch::{BuildDispatch, SharedState}; use uv_distribution_filename::{ @@ -197,6 +197,7 @@ async fn build_impl( fork_strategy: _, dependency_metadata, config_setting, + config_settings_package, no_build_isolation, no_build_isolation_package, exclude_newer, @@ -357,6 +358,7 @@ async fn build_impl( dependency_metadata, *link_mode, config_setting, + config_settings_package, preview, ); async { @@ -434,6 +436,7 @@ async fn build_package( dependency_metadata: &DependencyMetadata, link_mode: LinkMode, config_setting: &ConfigSettings, + config_settings_package: &PackageConfigSettings, preview: PreviewMode, ) -> Result, Error> { let output_dir = if let Some(output_dir) = output_dir { @@ -568,6 +571,7 @@ async fn build_package( state.clone(), index_strategy, config_setting, + config_settings_package, build_isolation, link_mode, build_options, diff --git a/crates/uv/src/commands/pip/compile.rs b/crates/uv/src/commands/pip/compile.rs index c40716763..a5116327b 100644 --- a/crates/uv/src/commands/pip/compile.rs +++ b/crates/uv/src/commands/pip/compile.rs @@ -14,7 +14,8 @@ use uv_cache::Cache; use uv_client::{BaseClientBuilder, FlatIndexClient, RegistryClientBuilder}; use uv_configuration::{ BuildOptions, Concurrency, ConfigSettings, Constraints, ExportFormat, ExtrasSpecification, - IndexStrategy, NoBinary, NoBuild, PreviewMode, Reinstall, SourceStrategy, Upgrade, + IndexStrategy, NoBinary, NoBuild, PackageConfigSettings, PreviewMode, Reinstall, + SourceStrategy, Upgrade, }; use uv_configuration::{KeyringProviderType, TargetTriple}; use uv_dispatch::{BuildDispatch, SharedState}; @@ -90,6 +91,7 @@ pub(crate) async fn pip_compile( keyring_provider: KeyringProviderType, network_settings: &NetworkSettings, config_settings: ConfigSettings, + config_settings_package: PackageConfigSettings, no_build_isolation: bool, no_build_isolation_package: Vec, build_options: BuildOptions, @@ -477,6 +479,7 @@ pub(crate) async fn pip_compile( state, index_strategy, &config_settings, + &config_settings_package, build_isolation, link_mode, &build_options, diff --git a/crates/uv/src/commands/pip/install.rs b/crates/uv/src/commands/pip/install.rs index bbfe99c50..79e18bd98 100644 --- a/crates/uv/src/commands/pip/install.rs +++ b/crates/uv/src/commands/pip/install.rs @@ -11,7 +11,8 @@ use uv_cache::Cache; use uv_client::{BaseClientBuilder, FlatIndexClient, RegistryClientBuilder}; use uv_configuration::{ BuildOptions, Concurrency, ConfigSettings, Constraints, DryRun, ExtrasSpecification, - HashCheckingMode, IndexStrategy, PreviewMode, Reinstall, SourceStrategy, Upgrade, + HashCheckingMode, IndexStrategy, PackageConfigSettings, PreviewMode, Reinstall, SourceStrategy, + Upgrade, }; use uv_configuration::{KeyringProviderType, TargetTriple}; use uv_dispatch::{BuildDispatch, SharedState}; @@ -75,6 +76,7 @@ pub(crate) async fn pip_install( hash_checking: Option, installer_metadata: bool, config_settings: &ConfigSettings, + config_settings_package: &PackageConfigSettings, no_build_isolation: bool, no_build_isolation_package: Vec, build_options: BuildOptions, @@ -422,6 +424,7 @@ pub(crate) async fn pip_install( state.clone(), index_strategy, config_settings, + config_settings_package, build_isolation, link_mode, &build_options, @@ -513,6 +516,7 @@ pub(crate) async fn pip_install( compile, &index_locations, config_settings, + config_settings_package, &hasher, &tags, &client, diff --git a/crates/uv/src/commands/pip/operations.rs b/crates/uv/src/commands/pip/operations.rs index 55ab2aa1b..117321c14 100644 --- a/crates/uv/src/commands/pip/operations.rs +++ b/crates/uv/src/commands/pip/operations.rs @@ -13,7 +13,7 @@ use uv_cache::Cache; use uv_client::{BaseClientBuilder, RegistryClient}; use uv_configuration::{ BuildOptions, Concurrency, ConfigSettings, Constraints, DependencyGroups, DryRun, - ExtrasSpecification, Overrides, Reinstall, Upgrade, + ExtrasSpecification, Overrides, PackageConfigSettings, Reinstall, Upgrade, }; use uv_dispatch::BuildDispatch; use uv_distribution::{DistributionDatabase, SourcedDependencyGroups}; @@ -445,6 +445,7 @@ pub(crate) async fn install( compile: bool, index_urls: &IndexLocations, config_settings: &ConfigSettings, + config_settings_package: &PackageConfigSettings, hasher: &HashStrategy, tags: &Tags, client: &RegistryClient, @@ -470,6 +471,7 @@ pub(crate) async fn install( hasher, index_urls, config_settings, + config_settings_package, cache, venv, tags, diff --git a/crates/uv/src/commands/pip/sync.rs b/crates/uv/src/commands/pip/sync.rs index 6858ddad0..61999825e 100644 --- a/crates/uv/src/commands/pip/sync.rs +++ b/crates/uv/src/commands/pip/sync.rs @@ -9,7 +9,8 @@ use uv_cache::Cache; use uv_client::{BaseClientBuilder, FlatIndexClient, RegistryClientBuilder}; use uv_configuration::{ BuildOptions, Concurrency, ConfigSettings, Constraints, DryRun, ExtrasSpecification, - HashCheckingMode, IndexStrategy, PreviewMode, Reinstall, SourceStrategy, Upgrade, + HashCheckingMode, IndexStrategy, PackageConfigSettings, PreviewMode, Reinstall, SourceStrategy, + Upgrade, }; use uv_configuration::{KeyringProviderType, TargetTriple}; use uv_dispatch::{BuildDispatch, SharedState}; @@ -60,6 +61,7 @@ pub(crate) async fn pip_sync( allow_empty_requirements: bool, installer_metadata: bool, config_settings: &ConfigSettings, + config_settings_package: &PackageConfigSettings, no_build_isolation: bool, no_build_isolation_package: Vec, build_options: BuildOptions, @@ -355,6 +357,7 @@ pub(crate) async fn pip_sync( state.clone(), index_strategy, config_settings, + config_settings_package, build_isolation, link_mode, &build_options, @@ -448,6 +451,7 @@ pub(crate) async fn pip_sync( compile, &index_locations, config_settings, + config_settings_package, &hasher, &tags, &client, diff --git a/crates/uv/src/commands/project/add.rs b/crates/uv/src/commands/project/add.rs index 28cc2dcd5..12535f859 100644 --- a/crates/uv/src/commands/project/add.rs +++ b/crates/uv/src/commands/project/add.rs @@ -436,6 +436,7 @@ pub(crate) async fn add( state.clone().into_inner(), settings.resolver.index_strategy, &settings.resolver.config_setting, + &settings.resolver.config_settings_package, build_isolation, settings.resolver.link_mode, &settings.resolver.build_options, diff --git a/crates/uv/src/commands/project/lock.rs b/crates/uv/src/commands/project/lock.rs index e23bd97c2..706c86593 100644 --- a/crates/uv/src/commands/project/lock.rs +++ b/crates/uv/src/commands/project/lock.rs @@ -432,6 +432,7 @@ async fn do_lock( fork_strategy, dependency_metadata, config_setting, + config_settings_package, no_build_isolation, no_build_isolation_package, exclude_newer, @@ -674,6 +675,7 @@ async fn do_lock( state.fork().into_inner(), *index_strategy, config_setting, + config_settings_package, build_isolation, *link_mode, build_options, diff --git a/crates/uv/src/commands/project/mod.rs b/crates/uv/src/commands/project/mod.rs index cce02a70b..becd2a26e 100644 --- a/crates/uv/src/commands/project/mod.rs +++ b/crates/uv/src/commands/project/mod.rs @@ -1674,6 +1674,7 @@ pub(crate) async fn resolve_names( ResolverSettings { build_options, config_setting, + config_settings_package, dependency_metadata, exclude_newer, fork_strategy: _, @@ -1742,6 +1743,7 @@ pub(crate) async fn resolve_names( state.clone(), *index_strategy, config_setting, + config_settings_package, build_isolation, *link_mode, build_options, @@ -1832,6 +1834,7 @@ pub(crate) async fn resolve_environment( fork_strategy, dependency_metadata, config_setting, + config_settings_package, no_build_isolation, no_build_isolation_package, exclude_newer, @@ -1948,6 +1951,7 @@ pub(crate) async fn resolve_environment( state.clone().into_inner(), *index_strategy, config_setting, + config_settings_package, build_isolation, *link_mode, build_options, @@ -2013,6 +2017,7 @@ pub(crate) async fn sync_environment( keyring_provider, dependency_metadata, config_setting, + config_settings_package, no_build_isolation, no_build_isolation_package, exclude_newer, @@ -2084,6 +2089,7 @@ pub(crate) async fn sync_environment( state.clone().into_inner(), index_strategy, config_setting, + config_settings_package, build_isolation, link_mode, build_options, @@ -2106,6 +2112,7 @@ pub(crate) async fn sync_environment( compile_bytecode, index_locations, config_setting, + config_settings_package, &hasher, tags, &client, @@ -2169,6 +2176,7 @@ pub(crate) async fn update_environment( ResolverSettings { build_options, config_setting, + config_settings_package, dependency_metadata, exclude_newer, fork_strategy, @@ -2305,6 +2313,7 @@ pub(crate) async fn update_environment( state.clone(), *index_strategy, config_setting, + config_settings_package, build_isolation, *link_mode, build_options, @@ -2362,6 +2371,7 @@ pub(crate) async fn update_environment( *compile_bytecode, index_locations, config_setting, + config_settings_package, &hasher, tags, &client, diff --git a/crates/uv/src/commands/project/sync.rs b/crates/uv/src/commands/project/sync.rs index 8d2dd9629..adf3b61f2 100644 --- a/crates/uv/src/commands/project/sync.rs +++ b/crates/uv/src/commands/project/sync.rs @@ -573,6 +573,7 @@ pub(super) async fn do_sync( keyring_provider, dependency_metadata, config_setting, + config_settings_package, no_build_isolation, no_build_isolation_package, exclude_newer, @@ -709,6 +710,7 @@ pub(super) async fn do_sync( state.clone().into_inner(), index_strategy, config_setting, + config_settings_package, build_isolation, link_mode, build_options, @@ -733,6 +735,7 @@ pub(super) async fn do_sync( compile_bytecode, index_locations, config_setting, + config_settings_package, &hasher, &tags, &client, diff --git a/crates/uv/src/commands/project/tree.rs b/crates/uv/src/commands/project/tree.rs index cd1339d3e..756820dc7 100644 --- a/crates/uv/src/commands/project/tree.rs +++ b/crates/uv/src/commands/project/tree.rs @@ -200,6 +200,7 @@ pub(crate) async fn tree( fork_strategy: _, dependency_metadata: _, config_setting: _, + config_settings_package: _, no_build_isolation: _, no_build_isolation_package: _, exclude_newer: _, diff --git a/crates/uv/src/commands/venv.rs b/crates/uv/src/commands/venv.rs index 92eb1ead7..9d3b87fe1 100644 --- a/crates/uv/src/commands/venv.rs +++ b/crates/uv/src/commands/venv.rs @@ -12,7 +12,7 @@ use uv_cache::Cache; use uv_client::{BaseClientBuilder, FlatIndexClient, RegistryClientBuilder}; use uv_configuration::{ BuildOptions, Concurrency, ConfigSettings, Constraints, DependencyGroups, IndexStrategy, - KeyringProviderType, NoBinary, NoBuild, PreviewMode, SourceStrategy, + KeyringProviderType, NoBinary, NoBuild, PackageConfigSettings, PreviewMode, SourceStrategy, }; use uv_dispatch::{BuildDispatch, SharedState}; use uv_distribution_types::Requirement; @@ -269,6 +269,7 @@ pub(crate) async fn venv( let build_constraints = Constraints::default(); let build_hasher = HashStrategy::default(); let config_settings = ConfigSettings::default(); + let config_settings_package = PackageConfigSettings::default(); let sources = SourceStrategy::Disabled; // Do not allow builds @@ -286,6 +287,7 @@ pub(crate) async fn venv( state.clone(), index_strategy, &config_settings, + &config_settings_package, BuildIsolation::Isolated, link_mode, &build_options, diff --git a/crates/uv/src/lib.rs b/crates/uv/src/lib.rs index 9c9b41065..0f6c9465f 100644 --- a/crates/uv/src/lib.rs +++ b/crates/uv/src/lib.rs @@ -524,6 +524,7 @@ async fn run(mut cli: Cli) -> Result { args.settings.keyring_provider, &globals.network_settings, args.settings.config_setting, + args.settings.config_settings_package, args.settings.no_build_isolation, args.settings.no_build_isolation_package, args.settings.build_options, @@ -594,6 +595,7 @@ async fn run(mut cli: Cli) -> Result { args.settings.allow_empty_requirements, globals.installer_metadata, &args.settings.config_setting, + &args.settings.config_settings_package, args.settings.no_build_isolation, args.settings.no_build_isolation_package, args.settings.build_options, @@ -745,6 +747,7 @@ async fn run(mut cli: Cli) -> Result { args.settings.hash_checking, globals.installer_metadata, &args.settings.config_setting, + &args.settings.config_settings_package, args.settings.no_build_isolation, args.settings.no_build_isolation_package, args.settings.build_options, diff --git a/crates/uv/src/settings.rs b/crates/uv/src/settings.rs index 1ebeecba8..aa105cf97 100644 --- a/crates/uv/src/settings.rs +++ b/crates/uv/src/settings.rs @@ -23,9 +23,9 @@ use uv_client::Connectivity; use uv_configuration::{ BuildOptions, Concurrency, ConfigSettings, DependencyGroups, DryRun, EditableMode, ExportFormat, ExtrasSpecification, HashCheckingMode, IndexStrategy, InstallOptions, - KeyringProviderType, NoBinary, NoBuild, PreviewMode, ProjectBuildBackend, Reinstall, - RequiredVersion, SourceStrategy, TargetTriple, TrustedHost, TrustedPublishing, Upgrade, - VersionControlSystem, + KeyringProviderType, NoBinary, NoBuild, PackageConfigSettings, PreviewMode, + ProjectBuildBackend, Reinstall, RequiredVersion, SourceStrategy, TargetTriple, TrustedHost, + TrustedPublishing, Upgrade, VersionControlSystem, }; use uv_distribution_types::{DependencyMetadata, Index, IndexLocations, IndexUrl, Requirement}; use uv_install_wheel::LinkMode; @@ -712,6 +712,7 @@ impl ToolUpgradeSettings { pre, fork_strategy, config_setting, + config_setting_package: config_settings_package, no_build_isolation, no_build_isolation_package, build_isolation, @@ -746,6 +747,7 @@ impl ToolUpgradeSettings { pre, fork_strategy, config_setting, + config_settings_package, no_build_isolation, no_build_isolation_package, build_isolation, @@ -2694,6 +2696,7 @@ pub(crate) struct InstallerSettingsRef<'a> { pub(crate) keyring_provider: KeyringProviderType, pub(crate) dependency_metadata: &'a DependencyMetadata, pub(crate) config_setting: &'a ConfigSettings, + pub(crate) config_settings_package: &'a PackageConfigSettings, pub(crate) no_build_isolation: bool, pub(crate) no_build_isolation_package: &'a [PackageName], pub(crate) exclude_newer: Option, @@ -2712,6 +2715,7 @@ pub(crate) struct InstallerSettingsRef<'a> { pub(crate) struct ResolverSettings { pub(crate) build_options: BuildOptions, pub(crate) config_setting: ConfigSettings, + pub(crate) config_settings_package: PackageConfigSettings, pub(crate) dependency_metadata: DependencyMetadata, pub(crate) exclude_newer: Option, pub(crate) fork_strategy: ForkStrategy, @@ -2770,6 +2774,7 @@ impl From for ResolverSettings { index_strategy: value.index_strategy.unwrap_or_default(), keyring_provider: value.keyring_provider.unwrap_or_default(), config_setting: value.config_settings.unwrap_or_default(), + config_settings_package: value.config_settings_package.unwrap_or_default(), no_build_isolation: value.no_build_isolation.unwrap_or_default(), no_build_isolation_package: value.no_build_isolation_package.unwrap_or_default(), exclude_newer: value.exclude_newer, @@ -2849,6 +2854,7 @@ impl From for ResolverInstallerSettings { NoBuild::from_args(value.no_build, value.no_build_package.unwrap_or_default()), ), config_setting: value.config_settings.unwrap_or_default(), + config_settings_package: value.config_settings_package.unwrap_or_default(), dependency_metadata: DependencyMetadata::from_entries( value.dependency_metadata.into_iter().flatten(), ), @@ -2918,6 +2924,7 @@ pub(crate) struct PipSettings { pub(crate) custom_compile_command: Option, pub(crate) generate_hashes: bool, pub(crate) config_setting: ConfigSettings, + pub(crate) config_settings_package: PackageConfigSettings, pub(crate) python_version: Option, pub(crate) python_platform: Option, pub(crate) universal: bool, @@ -2987,6 +2994,7 @@ impl PipSettings { custom_compile_command, generate_hashes, config_settings, + config_settings_package, python_version, python_platform, universal, @@ -3022,6 +3030,7 @@ impl PipSettings { fork_strategy: top_level_fork_strategy, dependency_metadata: top_level_dependency_metadata, config_settings: top_level_config_settings, + config_settings_package: top_level_config_settings_package, no_build_isolation: top_level_no_build_isolation, no_build_isolation_package: top_level_no_build_isolation_package, exclude_newer: top_level_exclude_newer, @@ -3054,6 +3063,8 @@ impl PipSettings { let fork_strategy = fork_strategy.combine(top_level_fork_strategy); let dependency_metadata = dependency_metadata.combine(top_level_dependency_metadata); let config_settings = config_settings.combine(top_level_config_settings); + let config_settings_package = + config_settings_package.combine(top_level_config_settings_package); let no_build_isolation = no_build_isolation.combine(top_level_no_build_isolation); let no_build_isolation_package = no_build_isolation_package.combine(top_level_no_build_isolation_package); @@ -3156,6 +3167,10 @@ impl PipSettings { .config_settings .combine(config_settings) .unwrap_or_default(), + config_settings_package: args + .config_settings_package + .combine(config_settings_package) + .unwrap_or_default(), torch_backend: args.torch_backend.combine(torch_backend), python_version: args.python_version.combine(python_version), python_platform: args.python_platform.combine(python_platform), @@ -3249,6 +3264,7 @@ impl<'a> From<&'a ResolverInstallerSettings> for InstallerSettingsRef<'a> { keyring_provider: settings.resolver.keyring_provider, dependency_metadata: &settings.resolver.dependency_metadata, config_setting: &settings.resolver.config_setting, + config_settings_package: &settings.resolver.config_settings_package, no_build_isolation: settings.resolver.no_build_isolation, no_build_isolation_package: &settings.resolver.no_build_isolation_package, exclude_newer: settings.resolver.exclude_newer, diff --git a/crates/uv/tests/it/pip_install.rs b/crates/uv/tests/it/pip_install.rs index 2a7b0f404..a977ac813 100644 --- a/crates/uv/tests/it/pip_install.rs +++ b/crates/uv/tests/it/pip_install.rs @@ -4054,13 +4054,13 @@ fn config_settings_path() -> Result<()> { "### ); - // When installed without `--editable_mode=compat`, the `finder.py` file should be present. + // When installed without `editable_mode=compat`, the `finder.py` file should be present. let finder = context .site_packages() .join("__editable___setuptools_editable_0_1_0_finder.py"); assert!(finder.exists()); - // Reinstalling with `--editable_mode=compat` should be a no-op; changes in build configuration + // Reinstalling with `editable_mode=compat` should be a no-op; changes in build configuration // don't invalidate the environment. uv_snapshot!(context.filters(), context.pip_install() .arg("-r") @@ -4089,7 +4089,7 @@ fn config_settings_path() -> Result<()> { - setuptools-editable==0.1.0 (from file://[WORKSPACE]/scripts/packages/setuptools_editable) "###); - // Install the editable package with `--editable_mode=compat`. We should ignore the cached + // Install the editable package with `editable_mode=compat`. We should ignore the cached // build configuration and rebuild. uv_snapshot!(context.filters(), context.pip_install() .arg("-r") @@ -4109,7 +4109,7 @@ fn config_settings_path() -> Result<()> { "### ); - // When installed without `--editable_mode=compat`, the `finder.py` file should _not_ be present. + // When installed without `editable_mode=compat`, the `finder.py` file should _not_ be present. let finder = context .site_packages() .join("__editable___setuptools_editable_0_1_0_finder.py"); @@ -11739,3 +11739,114 @@ fn install_python_preference() { Audited 1 package in [TIME] "); } + +#[test] +fn config_settings_package() -> Result<()> { + let context = TestContext::new("3.12"); + + let requirements_txt = context.temp_dir.child("requirements.txt"); + requirements_txt.write_str(&format!( + "-e {}", + context + .workspace_root + .join("scripts/packages/setuptools_editable") + .display() + ))?; + + // Install the editable package. + uv_snapshot!(context.filters(), context.pip_install() + .arg("-r") + .arg("requirements.txt"), @r###" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Resolved 2 packages in [TIME] + Prepared 2 packages in [TIME] + Installed 2 packages in [TIME] + + iniconfig==2.0.0 + + setuptools-editable==0.1.0 (from file://[WORKSPACE]/scripts/packages/setuptools_editable) + "### + ); + + // When installed without `editable_mode=compat`, the `finder.py` file should be present. + let finder = context + .site_packages() + .join("__editable___setuptools_editable_0_1_0_finder.py"); + assert!(finder.exists()); + + // Uninstall the package. + uv_snapshot!(context.filters(), context.pip_uninstall() + .arg("setuptools-editable"), @r###" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Uninstalled 1 package in [TIME] + - setuptools-editable==0.1.0 (from file://[WORKSPACE]/scripts/packages/setuptools_editable) + "###); + + // Install the editable package with `editable_mode=compat`, scoped to the package. + uv_snapshot!(context.filters(), context.pip_install() + .arg("-r") + .arg("requirements.txt") + .arg("--config-settings-package") + .arg("setuptools-editable:editable_mode=compat"), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Resolved 2 packages in [TIME] + Prepared 1 package in [TIME] + Installed 1 package in [TIME] + + setuptools-editable==0.1.0 (from file://[WORKSPACE]/scripts/packages/setuptools_editable) + " + ); + + // When installed with `editable_mode=compat`, the `finder.py` file should _not_ be present. + let finder = context + .site_packages() + .join("__editable___setuptools_editable_0_1_0_finder.py"); + assert!(!finder.exists()); + + // Uninstall the package. + uv_snapshot!(context.filters(), context.pip_uninstall() + .arg("setuptools-editable"), @r###" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Uninstalled 1 package in [TIME] + - setuptools-editable==0.1.0 (from file://[WORKSPACE]/scripts/packages/setuptools_editable) + "###); + + // Install the editable package with `editable_mode=compat`, by scoped to a different package. + uv_snapshot!(context.filters(), context.pip_install() + .arg("-r") + .arg("requirements.txt") + .arg("--config-settings-package") + .arg("setuptools:editable_mode=compat") + , @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Resolved 2 packages in [TIME] + Installed 1 package in [TIME] + + setuptools-editable==0.1.0 (from file://[WORKSPACE]/scripts/packages/setuptools_editable) + " + ); + + // When installed without `editable_mode=compat`, the `finder.py` file should be present. + let finder = context + .site_packages() + .join("__editable___setuptools_editable_0_1_0_finder.py"); + assert!(finder.exists()); + + Ok(()) +} diff --git a/crates/uv/tests/it/show_settings.rs b/crates/uv/tests/it/show_settings.rs index 2637af8ac..500e78965 100644 --- a/crates/uv/tests/it/show_settings.rs +++ b/crates/uv/tests/it/show_settings.rs @@ -203,6 +203,9 @@ fn resolve_uv_toml() -> anyhow::Result<()> { config_setting: ConfigSettings( {}, ), + config_settings_package: PackageConfigSettings( + {}, + ), python_version: None, python_platform: None, universal: false, @@ -385,6 +388,9 @@ fn resolve_uv_toml() -> anyhow::Result<()> { config_setting: ConfigSettings( {}, ), + config_settings_package: PackageConfigSettings( + {}, + ), python_version: None, python_platform: None, universal: false, @@ -568,6 +574,9 @@ fn resolve_uv_toml() -> anyhow::Result<()> { config_setting: ConfigSettings( {}, ), + config_settings_package: PackageConfigSettings( + {}, + ), python_version: None, python_platform: None, universal: false, @@ -783,6 +792,9 @@ fn resolve_pyproject_toml() -> anyhow::Result<()> { config_setting: ConfigSettings( {}, ), + config_settings_package: PackageConfigSettings( + {}, + ), python_version: None, python_platform: None, universal: false, @@ -933,6 +945,9 @@ fn resolve_pyproject_toml() -> anyhow::Result<()> { config_setting: ConfigSettings( {}, ), + config_settings_package: PackageConfigSettings( + {}, + ), python_version: None, python_platform: None, universal: false, @@ -1127,6 +1142,9 @@ fn resolve_pyproject_toml() -> anyhow::Result<()> { config_setting: ConfigSettings( {}, ), + config_settings_package: PackageConfigSettings( + {}, + ), python_version: None, python_platform: Some( X8664UnknownLinuxGnu, @@ -1369,6 +1387,9 @@ fn resolve_index_url() -> anyhow::Result<()> { config_setting: ConfigSettings( {}, ), + config_settings_package: PackageConfigSettings( + {}, + ), python_version: None, python_platform: None, universal: false, @@ -1621,6 +1642,9 @@ fn resolve_index_url() -> anyhow::Result<()> { config_setting: ConfigSettings( {}, ), + config_settings_package: PackageConfigSettings( + {}, + ), python_version: None, python_platform: None, universal: false, @@ -1828,6 +1852,9 @@ fn resolve_find_links() -> anyhow::Result<()> { config_setting: ConfigSettings( {}, ), + config_settings_package: PackageConfigSettings( + {}, + ), python_version: None, python_platform: None, universal: false, @@ -2000,6 +2027,9 @@ fn resolve_top_level() -> anyhow::Result<()> { config_setting: ConfigSettings( {}, ), + config_settings_package: PackageConfigSettings( + {}, + ), python_version: None, python_platform: None, universal: false, @@ -2232,6 +2262,9 @@ fn resolve_top_level() -> anyhow::Result<()> { config_setting: ConfigSettings( {}, ), + config_settings_package: PackageConfigSettings( + {}, + ), python_version: None, python_platform: None, universal: false, @@ -2447,6 +2480,9 @@ fn resolve_top_level() -> anyhow::Result<()> { config_setting: ConfigSettings( {}, ), + config_settings_package: PackageConfigSettings( + {}, + ), python_version: None, python_platform: None, universal: false, @@ -2618,6 +2654,9 @@ fn resolve_user_configuration() -> anyhow::Result<()> { config_setting: ConfigSettings( {}, ), + config_settings_package: PackageConfigSettings( + {}, + ), python_version: None, python_platform: None, universal: false, @@ -2773,6 +2812,9 @@ fn resolve_user_configuration() -> anyhow::Result<()> { config_setting: ConfigSettings( {}, ), + config_settings_package: PackageConfigSettings( + {}, + ), python_version: None, python_platform: None, universal: false, @@ -2928,6 +2970,9 @@ fn resolve_user_configuration() -> anyhow::Result<()> { config_setting: ConfigSettings( {}, ), + config_settings_package: PackageConfigSettings( + {}, + ), python_version: None, python_platform: None, universal: false, @@ -3085,6 +3130,9 @@ fn resolve_user_configuration() -> anyhow::Result<()> { config_setting: ConfigSettings( {}, ), + config_settings_package: PackageConfigSettings( + {}, + ), python_version: None, python_platform: None, universal: false, @@ -3208,6 +3256,7 @@ fn resolve_tool() -> anyhow::Result<()> { fork_strategy: None, dependency_metadata: None, config_settings: None, + config_settings_package: None, no_build_isolation: None, no_build_isolation_package: None, exclude_newer: None, @@ -3234,6 +3283,9 @@ fn resolve_tool() -> anyhow::Result<()> { config_setting: ConfigSettings( {}, ), + config_settings_package: PackageConfigSettings( + {}, + ), dependency_metadata: DependencyMetadata( {}, ), @@ -3426,6 +3478,9 @@ fn resolve_poetry_toml() -> anyhow::Result<()> { config_setting: ConfigSettings( {}, ), + config_settings_package: PackageConfigSettings( + {}, + ), python_version: None, python_platform: None, universal: false, @@ -3643,6 +3698,9 @@ fn resolve_both() -> anyhow::Result<()> { config_setting: ConfigSettings( {}, ), + config_settings_package: PackageConfigSettings( + {}, + ), python_version: None, python_platform: None, universal: false, @@ -3950,6 +4008,9 @@ fn resolve_config_file() -> anyhow::Result<()> { config_setting: ConfigSettings( {}, ), + config_settings_package: PackageConfigSettings( + {}, + ), python_version: None, python_platform: None, universal: false, @@ -4004,7 +4065,7 @@ fn resolve_config_file() -> anyhow::Result<()> { | 1 | [project] | ^^^^^^^ - unknown field `project`, expected one of `required-version`, `native-tls`, `offline`, `no-cache`, `cache-dir`, `preview`, `python-preference`, `python-downloads`, `concurrent-downloads`, `concurrent-builds`, `concurrent-installs`, `index`, `index-url`, `extra-index-url`, `no-index`, `find-links`, `index-strategy`, `keyring-provider`, `allow-insecure-host`, `resolution`, `prerelease`, `fork-strategy`, `dependency-metadata`, `config-settings`, `no-build-isolation`, `no-build-isolation-package`, `exclude-newer`, `link-mode`, `compile-bytecode`, `no-sources`, `upgrade`, `upgrade-package`, `reinstall`, `reinstall-package`, `no-build`, `no-build-package`, `no-binary`, `no-binary-package`, `python-install-mirror`, `pypy-install-mirror`, `python-downloads-json-url`, `publish-url`, `trusted-publishing`, `check-url`, `add-bounds`, `pip`, `cache-keys`, `override-dependencies`, `constraint-dependencies`, `build-constraint-dependencies`, `environments`, `required-environments`, `conflicts`, `workspace`, `sources`, `managed`, `package`, `default-groups`, `dependency-groups`, `dev-dependencies`, `build-backend` + unknown field `project`, expected one of `required-version`, `native-tls`, `offline`, `no-cache`, `cache-dir`, `preview`, `python-preference`, `python-downloads`, `concurrent-downloads`, `concurrent-builds`, `concurrent-installs`, `index`, `index-url`, `extra-index-url`, `no-index`, `find-links`, `index-strategy`, `keyring-provider`, `allow-insecure-host`, `resolution`, `prerelease`, `fork-strategy`, `dependency-metadata`, `config-settings`, `config-settings-package`, `no-build-isolation`, `no-build-isolation-package`, `exclude-newer`, `link-mode`, `compile-bytecode`, `no-sources`, `upgrade`, `upgrade-package`, `reinstall`, `reinstall-package`, `no-build`, `no-build-package`, `no-binary`, `no-binary-package`, `python-install-mirror`, `pypy-install-mirror`, `python-downloads-json-url`, `publish-url`, `trusted-publishing`, `check-url`, `add-bounds`, `pip`, `cache-keys`, `override-dependencies`, `constraint-dependencies`, `build-constraint-dependencies`, `environments`, `required-environments`, `conflicts`, `workspace`, `sources`, `managed`, `package`, `default-groups`, `dependency-groups`, `dev-dependencies`, `build-backend` " ); @@ -4199,6 +4260,9 @@ fn resolve_skip_empty() -> anyhow::Result<()> { config_setting: ConfigSettings( {}, ), + config_settings_package: PackageConfigSettings( + {}, + ), python_version: None, python_platform: None, universal: false, @@ -4357,6 +4421,9 @@ fn resolve_skip_empty() -> anyhow::Result<()> { config_setting: ConfigSettings( {}, ), + config_settings_package: PackageConfigSettings( + {}, + ), python_version: None, python_platform: None, universal: false, @@ -4534,6 +4601,9 @@ fn allow_insecure_host() -> anyhow::Result<()> { config_setting: ConfigSettings( {}, ), + config_settings_package: PackageConfigSettings( + {}, + ), python_version: None, python_platform: None, universal: false, @@ -4772,6 +4842,9 @@ fn index_priority() -> anyhow::Result<()> { config_setting: ConfigSettings( {}, ), + config_settings_package: PackageConfigSettings( + {}, + ), python_version: None, python_platform: None, universal: false, @@ -4989,6 +5062,9 @@ fn index_priority() -> anyhow::Result<()> { config_setting: ConfigSettings( {}, ), + config_settings_package: PackageConfigSettings( + {}, + ), python_version: None, python_platform: None, universal: false, @@ -5212,6 +5288,9 @@ fn index_priority() -> anyhow::Result<()> { config_setting: ConfigSettings( {}, ), + config_settings_package: PackageConfigSettings( + {}, + ), python_version: None, python_platform: None, universal: false, @@ -5430,6 +5509,9 @@ fn index_priority() -> anyhow::Result<()> { config_setting: ConfigSettings( {}, ), + config_settings_package: PackageConfigSettings( + {}, + ), python_version: None, python_platform: None, universal: false, @@ -5655,6 +5737,9 @@ fn index_priority() -> anyhow::Result<()> { config_setting: ConfigSettings( {}, ), + config_settings_package: PackageConfigSettings( + {}, + ), python_version: None, python_platform: None, universal: false, @@ -5873,6 +5958,9 @@ fn index_priority() -> anyhow::Result<()> { config_setting: ConfigSettings( {}, ), + config_settings_package: PackageConfigSettings( + {}, + ), python_version: None, python_platform: None, universal: false, @@ -6035,6 +6123,9 @@ fn verify_hashes() -> anyhow::Result<()> { config_setting: ConfigSettings( {}, ), + config_settings_package: PackageConfigSettings( + {}, + ), python_version: None, python_platform: None, universal: false, @@ -6183,6 +6274,9 @@ fn verify_hashes() -> anyhow::Result<()> { config_setting: ConfigSettings( {}, ), + config_settings_package: PackageConfigSettings( + {}, + ), python_version: None, python_platform: None, universal: false, @@ -6329,6 +6423,9 @@ fn verify_hashes() -> anyhow::Result<()> { config_setting: ConfigSettings( {}, ), + config_settings_package: PackageConfigSettings( + {}, + ), python_version: None, python_platform: None, universal: false, @@ -6477,6 +6574,9 @@ fn verify_hashes() -> anyhow::Result<()> { config_setting: ConfigSettings( {}, ), + config_settings_package: PackageConfigSettings( + {}, + ), python_version: None, python_platform: None, universal: false, @@ -6623,6 +6723,9 @@ fn verify_hashes() -> anyhow::Result<()> { config_setting: ConfigSettings( {}, ), + config_settings_package: PackageConfigSettings( + {}, + ), python_version: None, python_platform: None, universal: false, @@ -6770,6 +6873,9 @@ fn verify_hashes() -> anyhow::Result<()> { config_setting: ConfigSettings( {}, ), + config_settings_package: PackageConfigSettings( + {}, + ), python_version: None, python_platform: None, universal: false, diff --git a/crates/uv/tests/it/sync.rs b/crates/uv/tests/it/sync.rs index 5a8d79447..16c4d673a 100644 --- a/crates/uv/tests/it/sync.rs +++ b/crates/uv/tests/it/sync.rs @@ -11126,3 +11126,146 @@ fn sync_python_preference() -> Result<()> { Ok(()) } + +#[test] +fn sync_config_settings_package() -> Result<()> { + let context = TestContext::new("3.12").with_exclude_newer("2025-07-25T00:00:00Z"); + + // Create a child project that uses `setuptools`. + let dependency = context.temp_dir.child("dependency"); + dependency.child("pyproject.toml").write_str( + r#" + [project] + name = "dependency" + version = "0.1.0" + requires-python = ">=3.12" + dependencies = [] + [build-system] + requires = ["setuptools>=42"] + build-backend = "setuptools.build_meta" + "#, + )?; + dependency + .child("dependency") + .child("__init__.py") + .touch()?; + + // Install the `dependency` without `editable_mode=compat`. + let pyproject_toml = context.temp_dir.child("pyproject.toml"); + pyproject_toml.write_str( + r#" + [project] + name = "project" + version = "0.1.0" + requires-python = ">=3.12" + dependencies = ["dependency"] + + [tool.uv.sources] + dependency = { path = "dependency", editable = true } + "#, + )?; + + // Lock the project + context.lock().assert().success(); + + uv_snapshot!(context.filters(), context.sync(), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Resolved 2 packages in [TIME] + Prepared 1 package in [TIME] + Installed 1 package in [TIME] + + dependency==0.1.0 (from file://[TEMP_DIR]/dependency) + "); + + // When installed without `editable_mode=compat`, the `finder.py` file should be present. + let finder = context + .site_packages() + .join("__editable___dependency_0_1_0_finder.py"); + assert!(finder.exists()); + + // Remove the virtual environment. + fs_err::remove_dir_all(&context.venv)?; + + // Install the `dependency` with `editable_mode=compat` scoped to the package. + let pyproject_toml = context.temp_dir.child("pyproject.toml"); + pyproject_toml.write_str( + r#" + [project] + name = "project" + version = "0.1.0" + requires-python = ">=3.12" + dependencies = ["dependency"] + + [tool.uv.sources] + dependency = { path = "dependency", editable = true } + + [tool.uv.config-settings-package] + dependency = { editable_mode = "compat" } + "#, + )?; + + uv_snapshot!(context.filters(), context.sync(), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Using CPython 3.12.[X] interpreter at: [PYTHON-3.12] + Creating virtual environment at: .venv + Resolved 2 packages in [TIME] + Prepared 1 package in [TIME] + Installed 1 package in [TIME] + + dependency==0.1.0 (from file://[TEMP_DIR]/dependency) + "); + + // When installed with `editable_mode=compat`, the `finder.py` file should _not_ be present. + let finder = context + .site_packages() + .join("__editable___dependency_0_1_0_finder.py"); + assert!(!finder.exists()); + + // Remove the virtual environment. + fs_err::remove_dir_all(&context.venv)?; + + // Install the `dependency` with `editable_mode=compat` scoped to another package. + let pyproject_toml = context.temp_dir.child("pyproject.toml"); + pyproject_toml.write_str( + r#" + [project] + name = "project" + version = "0.1.0" + requires-python = ">=3.12" + dependencies = ["dependency"] + + [tool.uv.sources] + dependency = { path = "dependency", editable = true } + + [tool.uv.config-settings-package] + setuptools = { editable_mode = "compat" } + "#, + )?; + + uv_snapshot!(context.filters(), context.sync(), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Using CPython 3.12.[X] interpreter at: [PYTHON-3.12] + Creating virtual environment at: .venv + Resolved 2 packages in [TIME] + Installed 1 package in [TIME] + + dependency==0.1.0 (from file://[TEMP_DIR]/dependency) + "); + + // When installed without `editable_mode=compat`, the `finder.py` file should be present. + let finder = context + .site_packages() + .join("__editable___dependency_0_1_0_finder.py"); + assert!(finder.exists()); + + Ok(()) +} diff --git a/docs/reference/cli.md b/docs/reference/cli.md index 4fc832cdb..2ca95dce0 100644 --- a/docs/reference/cli.md +++ b/docs/reference/cli.md @@ -84,6 +84,7 @@ uv run [OPTIONS] [COMMAND]

      May also be set with the UV_COMPILE_BYTECODE environment variable.

    --config-file config-file

    The path to a uv.toml file to use for configuration.

    While uv configuration can be included in a pyproject.toml file, it is not allowed in this context.

    May also be set with the UV_CONFIG_FILE environment variable.

    --config-setting, --config-settings, -C config-setting

    Settings to pass to the PEP 517 build backend, specified as KEY=VALUE pairs

    +
    --config-settings-package, --config-settings-package config-settings-package

    Settings to pass to the PEP 517 build backend for a specific package, specified as PACKAGE:KEY=VALUE pairs

    --default-index default-index

    The URL of the default package index (by default: https://pypi.org/simple).

    Accepts either a repository compliant with PEP 503 (the simple repository API), or a local directory laid out in the same format.

    The index given by this flag is given lower priority than all other indexes specified via the --index flag.

    @@ -442,6 +443,7 @@ uv add [OPTIONS] >

    May also be set with the UV_COMPILE_BYTECODE environment variable.

    --config-file config-file

    The path to a uv.toml file to use for configuration.

    While uv configuration can be included in a pyproject.toml file, it is not allowed in this context.

    May also be set with the UV_CONFIG_FILE environment variable.

    --config-setting, --config-settings, -C config-setting

    Settings to pass to the PEP 517 build backend, specified as KEY=VALUE pairs

    +
    --config-settings-package, --config-settings-package config-settings-package

    Settings to pass to the PEP 517 build backend for a specific package, specified as PACKAGE:KEY=VALUE pairs

    --constraints, --constraint, -c constraints

    Constrain versions using the given requirements files.

    Constraints files are requirements.txt-like files that only control the version of a requirement that's installed. The constraints will not be added to the project's pyproject.toml file, but will be respected during dependency resolution.

    This is equivalent to pip's --constraint option.

    @@ -639,6 +641,7 @@ uv remove [OPTIONS] ...

    May also be set with the UV_COMPILE_BYTECODE environment variable.

    --config-file config-file

    The path to a uv.toml file to use for configuration.

    While uv configuration can be included in a pyproject.toml file, it is not allowed in this context.

    May also be set with the UV_CONFIG_FILE environment variable.

    --config-setting, --config-settings, -C config-setting

    Settings to pass to the PEP 517 build backend, specified as KEY=VALUE pairs

    +
    --config-settings-package, --config-settings-package config-settings-package

    Settings to pass to the PEP 517 build backend for a specific package, specified as PACKAGE:KEY=VALUE pairs

    --default-index default-index

    The URL of the default package index (by default: https://pypi.org/simple).

    Accepts either a repository compliant with PEP 503 (the simple repository API), or a local directory laid out in the same format.

    The index given by this flag is given lower priority than all other indexes specified via the --index flag.

    @@ -817,6 +820,7 @@ uv version [OPTIONS] [VALUE]

    May also be set with the UV_COMPILE_BYTECODE environment variable.

    --config-file config-file

    The path to a uv.toml file to use for configuration.

    While uv configuration can be included in a pyproject.toml file, it is not allowed in this context.

    May also be set with the UV_CONFIG_FILE environment variable.

    --config-setting, --config-settings, -C config-setting

    Settings to pass to the PEP 517 build backend, specified as KEY=VALUE pairs

    +
    --config-settings-package, --config-settings-package config-settings-package

    Settings to pass to the PEP 517 build backend for a specific package, specified as PACKAGE:KEY=VALUE pairs

    --default-index default-index

    The URL of the default package index (by default: https://pypi.org/simple).

    Accepts either a repository compliant with PEP 503 (the simple repository API), or a local directory laid out in the same format.

    The index given by this flag is given lower priority than all other indexes specified via the --index flag.

    @@ -1001,6 +1005,7 @@ uv sync [OPTIONS]

    May also be set with the UV_COMPILE_BYTECODE environment variable.

    --config-file config-file

    The path to a uv.toml file to use for configuration.

    While uv configuration can be included in a pyproject.toml file, it is not allowed in this context.

    May also be set with the UV_CONFIG_FILE environment variable.

    --config-setting, --config-settings, -C config-setting

    Settings to pass to the PEP 517 build backend, specified as KEY=VALUE pairs

    +
    --config-settings-package, --config-settings-package config-settings-package

    Settings to pass to the PEP 517 build backend for a specific package, specified as PACKAGE:KEY=VALUE pairs

    --default-index default-index

    The URL of the default package index (by default: https://pypi.org/simple).

    Accepts either a repository compliant with PEP 503 (the simple repository API), or a local directory laid out in the same format.

    The index given by this flag is given lower priority than all other indexes specified via the --index flag.

    @@ -1248,6 +1253,7 @@ uv lock [OPTIONS]
    --config-file config-file

    The path to a uv.toml file to use for configuration.

    While uv configuration can be included in a pyproject.toml file, it is not allowed in this context.

    May also be set with the UV_CONFIG_FILE environment variable.

    --config-setting, --config-settings, -C config-setting

    Settings to pass to the PEP 517 build backend, specified as KEY=VALUE pairs

    +
    --config-settings-package, --config-settings-package config-settings-package

    Settings to pass to the PEP 517 build backend for a specific package, specified as PACKAGE:KEY=VALUE pairs

    --default-index default-index

    The URL of the default package index (by default: https://pypi.org/simple).

    Accepts either a repository compliant with PEP 503 (the simple repository API), or a local directory laid out in the same format.

    The index given by this flag is given lower priority than all other indexes specified via the --index flag.

    @@ -1411,6 +1417,7 @@ uv export [OPTIONS]
    --config-file config-file

    The path to a uv.toml file to use for configuration.

    While uv configuration can be included in a pyproject.toml file, it is not allowed in this context.

    May also be set with the UV_CONFIG_FILE environment variable.

    --config-setting, --config-settings, -C config-setting

    Settings to pass to the PEP 517 build backend, specified as KEY=VALUE pairs

    +
    --config-settings-package, --config-settings-package config-settings-package

    Settings to pass to the PEP 517 build backend for a specific package, specified as PACKAGE:KEY=VALUE pairs

    --default-index default-index

    The URL of the default package index (by default: https://pypi.org/simple).

    Accepts either a repository compliant with PEP 503 (the simple repository API), or a local directory laid out in the same format.

    The index given by this flag is given lower priority than all other indexes specified via the --index flag.

    @@ -1605,6 +1612,7 @@ uv tree [OPTIONS]
    --config-file config-file

    The path to a uv.toml file to use for configuration.

    While uv configuration can be included in a pyproject.toml file, it is not allowed in this context.

    May also be set with the UV_CONFIG_FILE environment variable.

    --config-setting, --config-settings, -C config-setting

    Settings to pass to the PEP 517 build backend, specified as KEY=VALUE pairs

    +
    --config-settings-package, --config-settings-package config-settings-package

    Settings to pass to the PEP 517 build backend for a specific package, specified as PACKAGE:KEY=VALUE pairs

    --default-index default-index

    The URL of the default package index (by default: https://pypi.org/simple).

    Accepts either a repository compliant with PEP 503 (the simple repository API), or a local directory laid out in the same format.

    The index given by this flag is given lower priority than all other indexes specified via the --index flag.

    @@ -1863,6 +1871,7 @@ uv tool run [OPTIONS] [COMMAND]

    May also be set with the UV_COMPILE_BYTECODE environment variable.

    --config-file config-file

    The path to a uv.toml file to use for configuration.

    While uv configuration can be included in a pyproject.toml file, it is not allowed in this context.

    May also be set with the UV_CONFIG_FILE environment variable.

    --config-setting, --config-settings, -C config-setting

    Settings to pass to the PEP 517 build backend, specified as KEY=VALUE pairs

    +
    --config-settings-package, --config-settings-package config-settings-package

    Settings to pass to the PEP 517 build backend for a specific package, specified as PACKAGE:KEY=VALUE pairs

    --constraints, --constraint, -c constraints

    Constrain versions using the given requirements files.

    Constraints files are requirements.txt-like files that only control the version of a requirement that's installed. However, including a package in a constraints file will not trigger the installation of that package.

    This is equivalent to pip's --constraint option.

    @@ -2035,6 +2044,7 @@ uv tool install [OPTIONS]

    May also be set with the UV_COMPILE_BYTECODE environment variable.

    --config-file config-file

    The path to a uv.toml file to use for configuration.

    While uv configuration can be included in a pyproject.toml file, it is not allowed in this context.

    May also be set with the UV_CONFIG_FILE environment variable.

    --config-setting, --config-settings, -C config-setting

    Settings to pass to the PEP 517 build backend, specified as KEY=VALUE pairs

    +
    --config-settings-package, --config-settings-package config-settings-package

    Settings to pass to the PEP 517 build backend for a specific package, specified as PACKAGE:KEY=VALUE pairs

    --constraints, --constraint, -c constraints

    Constrain versions using the given requirements files.

    Constraints files are requirements.txt-like files that only control the version of a requirement that's installed. However, including a package in a constraints file will not trigger the installation of that package.

    This is equivalent to pip's --constraint option.

    @@ -2202,6 +2212,7 @@ uv tool upgrade [OPTIONS] ...

    May also be set with the UV_COMPILE_BYTECODE environment variable.

    --config-file config-file

    The path to a uv.toml file to use for configuration.

    While uv configuration can be included in a pyproject.toml file, it is not allowed in this context.

    May also be set with the UV_CONFIG_FILE environment variable.

    --config-setting, --config-settings, -C config-setting

    Settings to pass to the PEP 517 build backend, specified as KEY=VALUE pairs

    +
    --config-setting-package, --config-settings-package config-setting-package

    Settings to pass to the PEP 517 build backend for a specific package, specified as PACKAGE:KEY=VALUE pairs

    --default-index default-index

    The URL of the default package index (by default: https://pypi.org/simple).

    Accepts either a repository compliant with PEP 503 (the simple repository API), or a local directory laid out in the same format.

    The index given by this flag is given lower priority than all other indexes specified via the --index flag.

    @@ -3345,6 +3356,7 @@ uv pip compile [OPTIONS] >
    --config-file config-file

    The path to a uv.toml file to use for configuration.

    While uv configuration can be included in a pyproject.toml file, it is not allowed in this context.

    May also be set with the UV_CONFIG_FILE environment variable.

    --config-setting, --config-settings, -C config-setting

    Settings to pass to the PEP 517 build backend, specified as KEY=VALUE pairs

    +
    --config-settings-package, --config-settings-package config-settings-package

    Settings to pass to the PEP 517 build backend for a specific package, specified as PACKAGE:KEY=VALUE pairs

    --constraints, --constraint, -c constraints

    Constrain versions using the given requirements files.

    Constraints files are requirements.txt-like files that only control the version of a requirement that's installed. However, including a package in a constraints file will not trigger the installation of that package.

    This is equivalent to pip's --constraint option.

    @@ -3650,6 +3662,7 @@ uv pip sync [OPTIONS] ...

    May also be set with the UV_COMPILE_BYTECODE environment variable.

    --config-file config-file

    The path to a uv.toml file to use for configuration.

    While uv configuration can be included in a pyproject.toml file, it is not allowed in this context.

    May also be set with the UV_CONFIG_FILE environment variable.

    --config-setting, --config-settings, -C config-setting

    Settings to pass to the PEP 517 build backend, specified as KEY=VALUE pairs

    +
    --config-settings-package, --config-settings-package config-settings-package

    Settings to pass to the PEP 517 build backend for a specific package, specified as PACKAGE:KEY=VALUE pairs

    --constraints, --constraint, -c constraints

    Constrain versions using the given requirements files.

    Constraints files are requirements.txt-like files that only control the version of a requirement that's installed. However, including a package in a constraints file will not trigger the installation of that package.

    This is equivalent to pip's --constraint option.

    @@ -3900,6 +3913,7 @@ uv pip install [OPTIONS] |--editable May also be set with the UV_COMPILE_BYTECODE environment variable.

    --config-file config-file

    The path to a uv.toml file to use for configuration.

    While uv configuration can be included in a pyproject.toml file, it is not allowed in this context.

    May also be set with the UV_CONFIG_FILE environment variable.

    --config-setting, --config-settings, -C config-setting

    Settings to pass to the PEP 517 build backend, specified as KEY=VALUE pairs

    +
    --config-settings-package, --config-settings-package config-settings-package

    Settings to pass to the PEP 517 build backend for a specific package, specified as PACKAGE:KEY=VALUE pairs

    --constraints, --constraint, -c constraints

    Constrain versions using the given requirements files.

    Constraints files are requirements.txt-like files that only control the version of a requirement that's installed. However, including a package in a constraints file will not trigger the installation of that package.

    This is equivalent to pip's --constraint option.

    @@ -4845,6 +4859,7 @@ uv build [OPTIONS] [SRC]
    --config-file config-file

    The path to a uv.toml file to use for configuration.

    While uv configuration can be included in a pyproject.toml file, it is not allowed in this context.

    May also be set with the UV_CONFIG_FILE environment variable.

    --config-setting, --config-settings, -C config-setting

    Settings to pass to the PEP 517 build backend, specified as KEY=VALUE pairs

    +
    --config-settings-package, --config-settings-package config-settings-package

    Settings to pass to the PEP 517 build backend for a specific package, specified as PACKAGE:KEY=VALUE pairs

    --default-index default-index

    The URL of the default package index (by default: https://pypi.org/simple).

    Accepts either a repository compliant with PEP 503 (the simple repository API), or a local directory laid out in the same format.

    The index given by this flag is given lower priority than all other indexes specified via the --index flag.

    diff --git a/docs/reference/settings.md b/docs/reference/settings.md index bdee1e4a1..55d3f8ae4 100644 --- a/docs/reference/settings.md +++ b/docs/reference/settings.md @@ -1006,6 +1006,33 @@ specified as `KEY=VALUE` pairs. --- +### [`config-settings-package`](#config-settings-package) {: #config-settings-package } + +Settings to pass to the [PEP 517](https://peps.python.org/pep-0517/) build backend for specific packages, +specified as `KEY=VALUE` pairs. + +Accepts a map from package names to string key-value pairs. + +**Default value**: `{}` + +**Type**: `dict` + +**Example usage**: + +=== "pyproject.toml" + + ```toml + [tool.uv] + config-settings-package = { numpy = { editable_mode = "compat" } } + ``` +=== "uv.toml" + + ```toml + config-settings-package = { numpy = { editable_mode = "compat" } } + ``` + +--- + ### [`dependency-metadata`](#dependency-metadata) {: #dependency-metadata } Pre-defined static metadata for dependencies of the project (direct or transitive). When @@ -2244,6 +2271,33 @@ specified as `KEY=VALUE` pairs. --- +#### [`config-settings-package`](#pip_config-settings-package) {: #pip_config-settings-package } + + +Settings to pass to the [PEP 517](https://peps.python.org/pep-0517/) build backend for specific packages, +specified as `KEY=VALUE` pairs. + +**Default value**: `{}` + +**Type**: `dict` + +**Example usage**: + +=== "pyproject.toml" + + ```toml + [tool.uv.pip] + config-settings-package = { numpy = { editable_mode = "compat" } } + ``` +=== "uv.toml" + + ```toml + [pip] + config-settings-package = { numpy = { editable_mode = "compat" } } + ``` + +--- + #### [`custom-compile-command`](#pip_custom-compile-command) {: #pip_custom-compile-command } diff --git a/uv.schema.json b/uv.schema.json index ba89f65f4..22b30cd06 100644 --- a/uv.schema.json +++ b/uv.schema.json @@ -119,6 +119,17 @@ } ] }, + "config-settings-package": { + "description": "Settings to pass to the [PEP 517](https://peps.python.org/pep-0517/) build backend for specific packages,\nspecified as `KEY=VALUE` pairs.\n\nAccepts a map from package names to string key-value pairs.", + "anyOf": [ + { + "$ref": "#/definitions/PackageConfigSettings" + }, + { + "type": "null" + } + ] + }, "conflicts": { "description": "A list of sets of conflicting groups or extras.", "anyOf": [ @@ -1104,6 +1115,13 @@ } ] }, + "PackageConfigSettings": { + "description": "Settings to pass to PEP 517 build backends on a per-package basis.", + "type": "object", + "additionalProperties": { + "$ref": "#/definitions/ConfigSettings" + } + }, "PackageName": { "description": "The normalized name of a package.\n\nConverts the name to lowercase and collapses runs of `-`, `_`, and `.` down to a single `-`.\nFor example, `---`, `.`, and `__` are all converted to a single `-`.\n\nSee: ", "type": "string" @@ -1185,6 +1203,17 @@ } ] }, + "config-settings-package": { + "description": "Settings to pass to the [PEP 517](https://peps.python.org/pep-0517/) build backend for specific packages,\nspecified as `KEY=VALUE` pairs.", + "anyOf": [ + { + "$ref": "#/definitions/PackageConfigSettings" + }, + { + "type": "null" + } + ] + }, "custom-compile-command": { "description": "The header comment to include at the top of the output file generated by `uv pip compile`.\n\nUsed to reflect custom build scripts and commands that wrap `uv pip compile`.", "type": [ From bce2ea480d8efc3fb4640b759a1894d0eb760e4e Mon Sep 17 00:00:00 2001 From: konsti Date: Fri, 18 Jul 2025 12:50:04 +0200 Subject: [PATCH 083/130] Escape requires version for built_by_uv test (#14706) This keeps the hash stable across uv releases. Fixes #14695 --- Cargo.lock | 1 + crates/uv-build-backend/Cargo.toml | 1 + crates/uv-build-backend/src/lib.rs | 29 ++++++++++++++++++++--------- 3 files changed, 22 insertions(+), 9 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 78429b08f..77dfad413 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4817,6 +4817,7 @@ dependencies = [ "indoc", "insta", "itertools 0.14.0", + "regex", "rustc-hash", "schemars", "serde", diff --git a/crates/uv-build-backend/Cargo.toml b/crates/uv-build-backend/Cargo.toml index 7714423d4..677cbc222 100644 --- a/crates/uv-build-backend/Cargo.toml +++ b/crates/uv-build-backend/Cargo.toml @@ -57,4 +57,5 @@ schemars = ["dep:schemars", "uv-pypi-types/schemars"] [dev-dependencies] indoc = { workspace = true } insta = { version = "1.40.0", features = ["filters"] } +regex = { workspace = true } tempfile = { workspace = true } diff --git a/crates/uv-build-backend/src/lib.rs b/crates/uv-build-backend/src/lib.rs index 8add8dda3..5e0efd6d5 100644 --- a/crates/uv-build-backend/src/lib.rs +++ b/crates/uv-build-backend/src/lib.rs @@ -355,6 +355,7 @@ mod tests { use indoc::indoc; use insta::assert_snapshot; use itertools::Itertools; + use regex::Regex; use sha2::Digest; use std::io::{BufReader, Read}; use std::iter; @@ -362,6 +363,8 @@ mod tests { use uv_distribution_filename::{SourceDistFilename, WheelFilename}; use uv_fs::{copy_dir_all, relative_to}; + const MOCK_UV_VERSION: &str = "1.0.0+test"; + fn format_err(err: &Error) -> String { let context = iter::successors(std::error::Error::source(&err), |&err| err.source()) .map(|err| format!(" Caused by: {err}")) @@ -388,19 +391,19 @@ mod tests { fn build(source_root: &Path, dist: &Path) -> Result { // Build a direct wheel, capture all its properties to compare it with the indirect wheel // latest and remove it since it has the same filename as the indirect wheel. - let (_name, direct_wheel_list_files) = list_wheel(source_root, "1.0.0+test")?; - let direct_wheel_filename = build_wheel(source_root, dist, None, "1.0.0+test")?; + let (_name, direct_wheel_list_files) = list_wheel(source_root, MOCK_UV_VERSION)?; + let direct_wheel_filename = build_wheel(source_root, dist, None, MOCK_UV_VERSION)?; let direct_wheel_path = dist.join(direct_wheel_filename.to_string()); let direct_wheel_contents = wheel_contents(&direct_wheel_path); let direct_wheel_hash = sha2::Sha256::digest(fs_err::read(&direct_wheel_path)?); fs_err::remove_file(&direct_wheel_path)?; // Build a source distribution. - let (_name, source_dist_list_files) = list_source_dist(source_root, "1.0.0+test")?; + let (_name, source_dist_list_files) = list_source_dist(source_root, MOCK_UV_VERSION)?; // TODO(konsti): This should run in the unpacked source dist tempdir, but we need to // normalize the path. - let (_name, wheel_list_files) = list_wheel(source_root, "1.0.0+test")?; - let source_dist_filename = build_source_dist(source_root, dist, "1.0.0+test")?; + let (_name, wheel_list_files) = list_wheel(source_root, MOCK_UV_VERSION)?; + let source_dist_filename = build_source_dist(source_root, dist, MOCK_UV_VERSION)?; let source_dist_path = dist.join(source_dist_filename.to_string()); let source_dist_contents = sdist_contents(&source_dist_path); @@ -414,7 +417,7 @@ mod tests { source_dist_filename.name.as_dist_info_name(), source_dist_filename.version )); - let wheel_filename = build_wheel(&sdist_top_level_directory, dist, None, "1.0.0+test")?; + let wheel_filename = build_wheel(&sdist_top_level_directory, dist, None, MOCK_UV_VERSION)?; let wheel_contents = wheel_contents(&dist.join(wheel_filename.to_string())); // Check that direct and indirect wheels are identical. @@ -515,14 +518,14 @@ mod tests { ] { copy_dir_all(built_by_uv.join(dir), src.path().join(dir)).unwrap(); } - for dir in [ + for filename in [ "pyproject.toml", "README.md", "uv.lock", "LICENSE-APACHE", "LICENSE-MIT", ] { - fs_err::copy(built_by_uv.join(dir), src.path().join(dir)).unwrap(); + fs_err::copy(built_by_uv.join(filename), src.path().join(filename)).unwrap(); } // Clear executable bit on Unix to build the same archive between Unix and Windows. @@ -539,6 +542,14 @@ mod tests { fs_err::set_permissions(&path, perms).unwrap(); } + // Redact the uv_build version to keep the hash stable across releases + let pyproject_toml = fs_err::read_to_string(src.path().join("pyproject.toml")).unwrap(); + let current_requires = + Regex::new(r#"requires = \["uv_build>=[0-9.]+,<[0-9.]+"\]"#).unwrap(); + let mocked_requires = r#"requires = ["uv_build>=1,<2"]"#; + let pyproject_toml = current_requires.replace(pyproject_toml.as_str(), mocked_requires); + fs_err::write(src.path().join("pyproject.toml"), pyproject_toml.as_bytes()).unwrap(); + // Add some files to be excluded let module_root = src.path().join("src").join("built_by_uv"); fs_err::create_dir_all(module_root.join("__pycache__")).unwrap(); @@ -557,7 +568,7 @@ mod tests { // Check that the source dist is reproducible across platforms. assert_snapshot!( format!("{:x}", sha2::Sha256::digest(fs_err::read(&source_dist_path).unwrap())), - @"9a7f7181c5e69ac14e411a2500fed153a1e6ea41cd5da6f24f226c4cddacf6b7" + @"871d1f859140721b67cbeaca074e7a2740c88c38028d0509eba87d1285f1da9e" ); // Check both the files we report and the actual files assert_snapshot!(format_file_list(build.source_dist_list_files, src.path()), @r" From 327c2bcd8a9567f46de4f2d86a5a744968346cf7 Mon Sep 17 00:00:00 2001 From: konsti Date: Fri, 18 Jul 2025 14:03:55 +0200 Subject: [PATCH 084/130] Use SHA256 from GitHub API for Python downloads (#14708) We recently ran over the file limit and had to drop hash file from the releases page in favor of bulk SHA256SUMS files (https://github.com/astral-sh/python-build-standalone/pull/691). Conveniently, GitHub has recently started to add a SHA256 digest to the API. GitHub did not backfill the hashes for the old releases, so use the API hashes for newer assets, and eventually only download SHA256SUMS for older releases. --- crates/uv-python/fetch-download-metadata.py | 17 +++++++++++++++-- 1 file changed, 15 insertions(+), 2 deletions(-) diff --git a/crates/uv-python/fetch-download-metadata.py b/crates/uv-python/fetch-download-metadata.py index 08adaecea..3dd0817f3 100755 --- a/crates/uv-python/fetch-download-metadata.py +++ b/crates/uv-python/fetch-download-metadata.py @@ -255,8 +255,13 @@ class CPythonFinder(Finder): # Sort the assets to ensure deterministic results row["assets"].sort(key=lambda asset: asset["browser_download_url"]) for asset in row["assets"]: + # On older versions, GitHub didn't backfill the digest. + if digest := asset["digest"]: + sha256 = digest.removeprefix("sha256:") + else: + sha256 = None url = asset["browser_download_url"] - download = self._parse_download_url(url) + download = self._parse_download_url(url, sha256) if download is None: continue if ( @@ -305,6 +310,9 @@ class CPythonFinder(Finder): """Fetch the checksums for the given downloads.""" checksum_urls = set() for download in downloads: + # Skip the newer releases where we got the hash from the GitHub API + if download.sha256: + continue release_base_url = download.url.rsplit("/", maxsplit=1)[0] checksum_url = release_base_url + "/SHA256SUMS" checksum_urls.add(checksum_url) @@ -343,9 +351,13 @@ class CPythonFinder(Finder): checksums[filename] = checksum for download in downloads: + if download.sha256: + continue download.sha256 = checksums.get(download.filename) - def _parse_download_url(self, url: str) -> PythonDownload | None: + def _parse_download_url( + self, url: str, sha256: str | None + ) -> PythonDownload | None: """Parse an indygreg download URL into a PythonDownload object.""" # Ex) # https://github.com/astral-sh/python-build-standalone/releases/download/20240107/cpython-3.12.1%2B20240107-aarch64-unknown-linux-gnu-lto-full.tar.zst @@ -391,6 +403,7 @@ class CPythonFinder(Finder): url=url, build_options=build_options, variant=variant, + sha256=sha256, ) def _normalize_triple(self, triple: str) -> PlatformTriple | None: From 8f2f43c5614e0e0723afdd039a819d05ed880fc3 Mon Sep 17 00:00:00 2001 From: konsti Date: Fri, 18 Jul 2025 14:08:49 +0200 Subject: [PATCH 085/130] Add a reusable path-or-URL parser (#14712) Reviewing #14687, I noticed that we had implemented a `Url::from_url_or_path`-like function, but it wasn't reusable. This change `Verbatim::from_url_or_path` so we can use it in other places too. The PEP 508 parser is an odd place for this, but that's where `VerbatimUrl` and `Scheme` are already living. --- crates/uv-distribution-types/src/index_url.rs | 29 +----------- crates/uv-pep508/src/verbatim_url.rs | 46 +++++++++++++++++++ 2 files changed, 48 insertions(+), 27 deletions(-) diff --git a/crates/uv-distribution-types/src/index_url.rs b/crates/uv-distribution-types/src/index_url.rs index bd3e9abc2..cbc1a4eb1 100644 --- a/crates/uv-distribution-types/src/index_url.rs +++ b/crates/uv-distribution-types/src/index_url.rs @@ -39,33 +39,8 @@ impl IndexUrl { /// If no root directory is provided, relative paths are resolved against the current working /// directory. pub fn parse(path: &str, root_dir: Option<&Path>) -> Result { - let url = match split_scheme(path) { - Some((scheme, ..)) => { - match Scheme::parse(scheme) { - Some(_) => { - // Ex) `https://pypi.org/simple` - VerbatimUrl::parse_url(path)? - } - None => { - // Ex) `C:\Users\user\index` - if let Some(root_dir) = root_dir { - VerbatimUrl::from_path(path, root_dir)? - } else { - VerbatimUrl::from_absolute_path(std::path::absolute(path)?)? - } - } - } - } - None => { - // Ex) `/Users/user/index` - if let Some(root_dir) = root_dir { - VerbatimUrl::from_path(path, root_dir)? - } else { - VerbatimUrl::from_absolute_path(std::path::absolute(path)?)? - } - } - }; - Ok(Self::from(url.with_given(path))) + let url = VerbatimUrl::from_url_or_path(path, root_dir)?; + Ok(Self::from(url)) } /// Return the root [`Url`] of the index, if applicable. diff --git a/crates/uv-pep508/src/verbatim_url.rs b/crates/uv-pep508/src/verbatim_url.rs index 37d07b40b..2911de938 100644 --- a/crates/uv-pep508/src/verbatim_url.rs +++ b/crates/uv-pep508/src/verbatim_url.rs @@ -58,6 +58,48 @@ impl VerbatimUrl { }) } + /// Convert a [`VerbatimUrl`] from a path or a URL. + /// + /// If no root directory is provided, relative paths are resolved against the current working + /// directory. + pub fn from_url_or_path( + input: &str, + root_dir: Option<&Path>, + ) -> Result { + let url = match split_scheme(input) { + Some((scheme, ..)) => { + match Scheme::parse(scheme) { + Some(_) => { + // Ex) `https://pypi.org/simple` + Self::parse_url(input)? + } + None => { + // Ex) `C:\Users\user\index` + if let Some(root_dir) = root_dir { + Self::from_path(input, root_dir)? + } else { + let absolute_path = std::path::absolute(input).map_err(|err| { + VerbatimUrlError::Absolute(input.to_string(), err) + })?; + Self::from_absolute_path(absolute_path)? + } + } + } + } + None => { + // Ex) `/Users/user/index` + if let Some(root_dir) = root_dir { + Self::from_path(input, root_dir)? + } else { + let absolute_path = std::path::absolute(input) + .map_err(|err| VerbatimUrlError::Absolute(input.to_string(), err))?; + Self::from_absolute_path(absolute_path)? + } + } + }; + Ok(url.with_given(input)) + } + /// Parse a URL from an absolute or relative path. #[cfg(feature = "non-pep508-extensions")] // PEP 508 arguably only allows absolute file URLs. pub fn from_path( @@ -362,6 +404,10 @@ pub enum VerbatimUrlError { #[error("path could not be normalized: {0}")] Normalization(PathBuf, #[source] std::io::Error), + /// Received a path that could not be converted to an absolute path. + #[error("path could not be converted to an absolute path: {0}")] + Absolute(String, #[source] std::io::Error), + /// Received a path that could not be normalized. #[cfg(not(feature = "non-pep508-extensions"))] #[error("Not a URL (missing scheme): {0}")] From d1f4f8a358016a60d730ba1ca234ccfdf194ee8e Mon Sep 17 00:00:00 2001 From: konsti Date: Fri, 18 Jul 2025 14:47:56 +0200 Subject: [PATCH 086/130] More resilient registry removal (#14717) With the previous order of operations, there could be warnings from race conditions between two process A and B removing and installing Python versions. * A removes the files for CPython3.9.18 * B sees the key CPython3.9.18 * B sees that CPython3.9.18 has no files * A removes the key for CPython3.9.18 * B try to removes the key for CPython3.9.18, gets and error that it's already gone, issues a warning We make the more resilient in two ways: * We remove the registry key first, avoiding dangling registry keys in the removal process * We ignore not found errors in registry removal operations: If we try to remove something that's already gone, that's fine. Fixes #14714 (hopefully) --- crates/uv-python/src/windows_registry.rs | 6 ++++++ crates/uv/src/commands/python/uninstall.rs | 24 ++++++++++++---------- 2 files changed, 19 insertions(+), 11 deletions(-) diff --git a/crates/uv-python/src/windows_registry.rs b/crates/uv-python/src/windows_registry.rs index 7c6f6f307..f722db60c 100644 --- a/crates/uv-python/src/windows_registry.rs +++ b/crates/uv-python/src/windows_registry.rs @@ -268,6 +268,9 @@ pub fn remove_orphan_registry_entries(installations: &[ManagedPythonInstallation // Separate assignment since `keys()` creates a borrow. let subkeys = match key.keys() { Ok(subkeys) => subkeys, + Err(err) if err.code() == ERROR_NOT_FOUND => { + return; + } Err(err) => { // TODO(konsti): We don't have an installation key here. warn_user_once!("Failed to list subkeys of HKCU:\\{astral_key}: {err}"); @@ -281,6 +284,9 @@ pub fn remove_orphan_registry_entries(installations: &[ManagedPythonInstallation let python_entry = format!("{astral_key}\\{subkey}"); debug!("Removing orphan registry key HKCU:\\{}", python_entry); if let Err(err) = CURRENT_USER.remove_tree(&python_entry) { + if err.code() == ERROR_NOT_FOUND { + continue; + } // TODO(konsti): We don't have an installation key here. warn_user_once!("Failed to remove orphan registry key HKCU:\\{python_entry}: {err}"); } diff --git a/crates/uv/src/commands/python/uninstall.rs b/crates/uv/src/commands/python/uninstall.rs index dd306fc4d..c2e2e6877 100644 --- a/crates/uv/src/commands/python/uninstall.rs +++ b/crates/uv/src/commands/python/uninstall.rs @@ -142,6 +142,19 @@ async fn do_uninstall( return Ok(ExitStatus::Failure); } + // Remove registry entries first, so we don't have dangling entries between the file removal + // and the registry removal. + let mut errors = vec![]; + #[cfg(windows)] + { + uv_python::windows_registry::remove_registry_entry( + &matching_installations, + all, + &mut errors, + ); + uv_python::windows_registry::remove_orphan_registry_entries(&installed_installations); + } + // Find and remove all relevant Python executables let mut uninstalled_executables: FxHashMap> = FxHashMap::default(); @@ -201,7 +214,6 @@ async fn do_uninstall( } let mut uninstalled = IndexSet::::default(); - let mut errors = vec![]; while let Some((key, result)) = tasks.next().await { if let Err(err) = result { errors.push((key.clone(), anyhow::Error::new(err))); @@ -210,16 +222,6 @@ async fn do_uninstall( } } - #[cfg(windows)] - { - uv_python::windows_registry::remove_registry_entry( - &matching_installations, - all, - &mut errors, - ); - uv_python::windows_registry::remove_orphan_registry_entries(&installed_installations); - } - // Read all existing managed installations and find the highest installed patch // for each installed minor version. Ensure the minor version link directory // is still valid. From 70875128be837b8ea96fb0cf6c9c2f9dea859195 Mon Sep 17 00:00:00 2001 From: Zanie Blue Date: Fri, 18 Jul 2025 07:49:25 -0500 Subject: [PATCH 087/130] Disable the Windows Registry updates during `python install` tests (#14718) --- crates/uv/tests/it/common/mod.rs | 3 +++ 1 file changed, 3 insertions(+) diff --git a/crates/uv/tests/it/common/mod.rs b/crates/uv/tests/it/common/mod.rs index ab4c38247..4c686cb77 100644 --- a/crates/uv/tests/it/common/mod.rs +++ b/crates/uv/tests/it/common/mod.rs @@ -795,6 +795,9 @@ impl TestContext { .env(EnvVars::UV_PYTHON_DOWNLOADS, "never") .env(EnvVars::UV_TEST_PYTHON_PATH, self.python_path()) .env(EnvVars::UV_EXCLUDE_NEWER, EXCLUDE_NEWER) + // When installations are allowed, we don't want to write to global state, like the + // Windows registry + .env(EnvVars::UV_PYTHON_INSTALL_REGISTRY, "0") // Since downloads, fetches and builds run in parallel, their message output order is // non-deterministic, so can't capture them in test output. .env(EnvVars::UV_TEST_NO_CLI_PROGRESS, "1") From a186fda2d27d74c631c50ffccb9413d31f95cc89 Mon Sep 17 00:00:00 2001 From: Zanie Blue Date: Fri, 18 Jul 2025 08:07:36 -0500 Subject: [PATCH 088/130] Elide traceback when `python -m uv` in interrupted with Ctrl-C on Windows (#14715) Closes https://github.com/astral-sh/uv/issues/14704 --- python/uv/__main__.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/python/uv/__main__.py b/python/uv/__main__.py index d8731c7ec..15c081867 100644 --- a/python/uv/__main__.py +++ b/python/uv/__main__.py @@ -37,7 +37,12 @@ def _run() -> None: if sys.platform == "win32": import subprocess - completed_process = subprocess.run([uv, *sys.argv[1:]], env=env) + # Avoid emitting a traceback on interrupt + try: + completed_process = subprocess.run([uv, *sys.argv[1:]], env=env) + except KeyboardInterrupt: + sys.exit(2) + sys.exit(completed_process.returncode) else: os.execvpe(uv, [uv, *sys.argv[1:]], env=env) From 574aa1ef110ef08293512eb200bd6881bb738179 Mon Sep 17 00:00:00 2001 From: konsti Date: Fri, 18 Jul 2025 15:26:47 +0200 Subject: [PATCH 089/130] Better error reporting for removing Python versions from the Windows registry (#14722) See https://github.com/astral-sh/uv/actions/runs/16370666070/job/46258004849 We didn't actual use a format string, showing the template instead. We don't show the causes in the error report, so we format it into one error. --- crates/uv-python/src/windows_registry.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/crates/uv-python/src/windows_registry.rs b/crates/uv-python/src/windows_registry.rs index f722db60c..0020f95e9 100644 --- a/crates/uv-python/src/windows_registry.rs +++ b/crates/uv-python/src/windows_registry.rs @@ -3,6 +3,7 @@ use crate::managed::ManagedPythonInstallation; use crate::platform::Arch; use crate::{COMPANY_DISPLAY_NAME, COMPANY_KEY, PythonInstallationKey, PythonVersion}; +use anyhow::anyhow; use std::cmp::Ordering; use std::collections::HashSet; use std::path::PathBuf; @@ -238,8 +239,7 @@ pub fn remove_registry_entry<'a>( } else { errors.push(( installation.key().clone(), - anyhow::Error::new(err) - .context("Failed to clear registry entries under HKCU:\\{python_entry}"), + anyhow!("Failed to clear registry entries under HKCU:\\{python_entry}: {err}"), )); } } From d0efe1ed9c4bec806e8449a471bd97c9be10ba14 Mon Sep 17 00:00:00 2001 From: Charlie Marsh Date: Fri, 18 Jul 2025 16:32:29 -0400 Subject: [PATCH 090/130] Apply Cache-Control overrides to response, not request headers (#14736) ## Summary This was just an oversight on my part in the initial implementation. Closes https://github.com/astral-sh/uv/issues/14719. ## Test Plan With: ```toml [project] name = "foo" version = "0.1.0" description = "Add your description here" readme = "README.md" requires-python = ">=3.13.2" dependencies = [ ] [[tool.uv.index]] url = "https://download.pytorch.org/whl/cpu" cache-control = { api = "max-age=600" } ``` Ran `cargo run lock -vvv` and verified that the PyTorch index response was cached (whereas it typically returns `cache-control: no-cache,no-store,must-revalidate`). --- crates/uv-client/src/cached_client.rs | 83 ++++++++++++++----- crates/uv-distribution-types/src/index_url.rs | 20 +++++ .../src/distribution_database.rs | 68 +++++++++++---- crates/uv-distribution/src/source/mod.rs | 80 +++++++++++++++--- 4 files changed, 201 insertions(+), 50 deletions(-) diff --git a/crates/uv-client/src/cached_client.rs b/crates/uv-client/src/cached_client.rs index f888ea5f1..4219decd5 100644 --- a/crates/uv-client/src/cached_client.rs +++ b/crates/uv-client/src/cached_client.rs @@ -304,7 +304,7 @@ impl CachedClient { .await? } else { debug!("No cache entry for: {}", req.url()); - let (response, cache_policy) = self.fresh_request(req).await?; + let (response, cache_policy) = self.fresh_request(req, cache_control).await?; CachedResponse::ModifiedOrNew { response, cache_policy, @@ -318,8 +318,13 @@ impl CachedClient { "Broken fresh cache entry (for payload) at {}, removing: {err}", cache_entry.path().display() ); - self.resend_and_heal_cache(fresh_req, cache_entry, response_callback) - .await + self.resend_and_heal_cache( + fresh_req, + cache_entry, + cache_control, + response_callback, + ) + .await } }, CachedResponse::NotModified { cached, new_policy } => { @@ -339,8 +344,13 @@ impl CachedClient { (for payload) at {}, removing: {err}", cache_entry.path().display() ); - self.resend_and_heal_cache(fresh_req, cache_entry, response_callback) - .await + self.resend_and_heal_cache( + fresh_req, + cache_entry, + cache_control, + response_callback, + ) + .await } } } @@ -355,8 +365,13 @@ impl CachedClient { // ETag didn't match). We need to make a fresh request. if response.status() == http::StatusCode::NOT_MODIFIED { warn!("Server returned unusable 304 for: {}", fresh_req.url()); - self.resend_and_heal_cache(fresh_req, cache_entry, response_callback) - .await + self.resend_and_heal_cache( + fresh_req, + cache_entry, + cache_control, + response_callback, + ) + .await } else { self.run_response_callback( cache_entry, @@ -379,9 +394,10 @@ impl CachedClient { &self, req: Request, cache_entry: &CacheEntry, + cache_control: CacheControl<'_>, response_callback: Callback, ) -> Result> { - let (response, cache_policy) = self.fresh_request(req).await?; + let (response, cache_policy) = self.fresh_request(req, cache_control).await?; let payload = self .run_response_callback(cache_entry, cache_policy, response, async |resp| { @@ -401,10 +417,11 @@ impl CachedClient { &self, req: Request, cache_entry: &CacheEntry, + cache_control: CacheControl<'_>, response_callback: Callback, ) -> Result> { let _ = fs_err::tokio::remove_file(&cache_entry.path()).await; - let (response, cache_policy) = self.fresh_request(req).await?; + let (response, cache_policy) = self.fresh_request(req, cache_control).await?; self.run_response_callback(cache_entry, cache_policy, response, response_callback) .await } @@ -476,20 +493,13 @@ impl CachedClient { ) -> Result { // Apply the cache control header, if necessary. match cache_control { - CacheControl::None | CacheControl::AllowStale => {} + CacheControl::None | CacheControl::AllowStale | CacheControl::Override(..) => {} CacheControl::MustRevalidate => { req.headers_mut().insert( http::header::CACHE_CONTROL, http::HeaderValue::from_static("no-cache"), ); } - CacheControl::Override(value) => { - req.headers_mut().insert( - http::header::CACHE_CONTROL, - http::HeaderValue::from_str(value) - .map_err(|_| ErrorKind::InvalidCacheControl(value.to_string()))?, - ); - } } Ok(match cached.cache_policy.before_request(&mut req) { BeforeRequest::Fresh => { @@ -499,8 +509,13 @@ impl CachedClient { BeforeRequest::Stale(new_cache_policy_builder) => match cache_control { CacheControl::None | CacheControl::MustRevalidate | CacheControl::Override(_) => { debug!("Found stale response for: {}", req.url()); - self.send_cached_handle_stale(req, cached, new_cache_policy_builder) - .await? + self.send_cached_handle_stale( + req, + cache_control, + cached, + new_cache_policy_builder, + ) + .await? } CacheControl::AllowStale => { debug!("Found stale (but allowed) response for: {}", req.url()); @@ -513,7 +528,7 @@ impl CachedClient { "Cached request doesn't match current request for: {}", req.url() ); - let (response, cache_policy) = self.fresh_request(req).await?; + let (response, cache_policy) = self.fresh_request(req, cache_control).await?; CachedResponse::ModifiedOrNew { response, cache_policy, @@ -525,12 +540,13 @@ impl CachedClient { async fn send_cached_handle_stale( &self, req: Request, + cache_control: CacheControl<'_>, cached: DataWithCachePolicy, new_cache_policy_builder: CachePolicyBuilder, ) -> Result { let url = DisplaySafeUrl::from(req.url().clone()); debug!("Sending revalidation request for: {url}"); - let response = self + let mut response = self .0 .execute(req) .instrument(info_span!("revalidation_request", url = url.as_str())) @@ -538,6 +554,16 @@ impl CachedClient { .map_err(|err| ErrorKind::from_reqwest_middleware(url.clone(), err))? .error_for_status() .map_err(|err| ErrorKind::from_reqwest(url.clone(), err))?; + + // If the user set a custom `Cache-Control` header, override it. + if let CacheControl::Override(header) = cache_control { + response.headers_mut().insert( + http::header::CACHE_CONTROL, + http::HeaderValue::from_str(header) + .expect("Cache-Control header must be valid UTF-8"), + ); + } + match cached .cache_policy .after_response(new_cache_policy_builder, &response) @@ -566,16 +592,26 @@ impl CachedClient { async fn fresh_request( &self, req: Request, + cache_control: CacheControl<'_>, ) -> Result<(Response, Option>), Error> { let url = DisplaySafeUrl::from(req.url().clone()); trace!("Sending fresh {} request for {}", req.method(), url); let cache_policy_builder = CachePolicyBuilder::new(&req); - let response = self + let mut response = self .0 .execute(req) .await .map_err(|err| ErrorKind::from_reqwest_middleware(url.clone(), err))?; + // If the user set a custom `Cache-Control` header, override it. + if let CacheControl::Override(header) = cache_control { + response.headers_mut().insert( + http::header::CACHE_CONTROL, + http::HeaderValue::from_str(header) + .expect("Cache-Control header must be valid UTF-8"), + ); + } + let retry_count = response .extensions() .get::() @@ -690,6 +726,7 @@ impl CachedClient { &self, req: Request, cache_entry: &CacheEntry, + cache_control: CacheControl<'_>, response_callback: Callback, ) -> Result> { let mut past_retries = 0; @@ -698,7 +735,7 @@ impl CachedClient { loop { let fresh_req = req.try_clone().expect("HTTP request must be cloneable"); let result = self - .skip_cache(fresh_req, cache_entry, &response_callback) + .skip_cache(fresh_req, cache_entry, cache_control, &response_callback) .await; // Check if the middleware already performed retries diff --git a/crates/uv-distribution-types/src/index_url.rs b/crates/uv-distribution-types/src/index_url.rs index cbc1a4eb1..6baca1c1f 100644 --- a/crates/uv-distribution-types/src/index_url.rs +++ b/crates/uv-distribution-types/src/index_url.rs @@ -441,6 +441,26 @@ impl<'a> IndexLocations { } } } + + /// Return the Simple API cache control header for an [`IndexUrl`], if configured. + pub fn simple_api_cache_control_for(&self, url: &IndexUrl) -> Option<&str> { + for index in &self.indexes { + if index.url() == url { + return index.cache_control.as_ref()?.api.as_deref(); + } + } + None + } + + /// Return the artifact cache control header for an [`IndexUrl`], if configured. + pub fn artifact_cache_control_for(&self, url: &IndexUrl) -> Option<&str> { + for index in &self.indexes { + if index.url() == url { + return index.cache_control.as_ref()?.files.as_deref(); + } + } + None + } } impl From<&IndexLocations> for uv_auth::Indexes { diff --git a/crates/uv-distribution/src/distribution_database.rs b/crates/uv-distribution/src/distribution_database.rs index d18269730..30f3a243c 100644 --- a/crates/uv-distribution/src/distribution_database.rs +++ b/crates/uv-distribution/src/distribution_database.rs @@ -20,7 +20,7 @@ use uv_client::{ }; use uv_distribution_filename::WheelFilename; use uv_distribution_types::{ - BuildableSource, BuiltDist, Dist, HashPolicy, Hashed, InstalledDist, Name, SourceDist, + BuildableSource, BuiltDist, Dist, HashPolicy, Hashed, IndexUrl, InstalledDist, Name, SourceDist, }; use uv_extract::hash::Hasher; use uv_fs::write_atomic; @@ -201,6 +201,7 @@ impl<'a, Context: BuildContext> DistributionDatabase<'a, Context> { match self .stream_wheel( url.clone(), + dist.index(), &wheel.filename, wheel.file.size, &wheel_entry, @@ -236,6 +237,7 @@ impl<'a, Context: BuildContext> DistributionDatabase<'a, Context> { let archive = self .download_wheel( url, + dist.index(), &wheel.filename, wheel.file.size, &wheel_entry, @@ -272,6 +274,7 @@ impl<'a, Context: BuildContext> DistributionDatabase<'a, Context> { match self .stream_wheel( wheel.url.raw().clone(), + None, &wheel.filename, None, &wheel_entry, @@ -301,6 +304,7 @@ impl<'a, Context: BuildContext> DistributionDatabase<'a, Context> { let archive = self .download_wheel( wheel.url.raw().clone(), + None, &wheel.filename, None, &wheel_entry, @@ -534,6 +538,7 @@ impl<'a, Context: BuildContext> DistributionDatabase<'a, Context> { async fn stream_wheel( &self, url: DisplaySafeUrl, + index: Option<&IndexUrl>, filename: &WheelFilename, size: Option, wheel_entry: &CacheEntry, @@ -616,13 +621,24 @@ impl<'a, Context: BuildContext> DistributionDatabase<'a, Context> { // Fetch the archive from the cache, or download it if necessary. let req = self.request(url.clone())?; + // Determine the cache control policy for the URL. let cache_control = match self.client.unmanaged.connectivity() { - Connectivity::Online => CacheControl::from( - self.build_context - .cache() - .freshness(&http_entry, Some(&filename.name), None) - .map_err(Error::CacheRead)?, - ), + Connectivity::Online => { + if let Some(header) = index.and_then(|index| { + self.build_context + .locations() + .artifact_cache_control_for(index) + }) { + CacheControl::Override(header) + } else { + CacheControl::from( + self.build_context + .cache() + .freshness(&http_entry, Some(&filename.name), None) + .map_err(Error::CacheRead)?, + ) + } + } Connectivity::Offline => CacheControl::AllowStale, }; @@ -654,7 +670,12 @@ impl<'a, Context: BuildContext> DistributionDatabase<'a, Context> { .managed(async |client| { client .cached_client() - .skip_cache_with_retry(self.request(url)?, &http_entry, download) + .skip_cache_with_retry( + self.request(url)?, + &http_entry, + cache_control, + download, + ) .await .map_err(|err| match err { CachedClientError::Callback { err, .. } => err, @@ -671,6 +692,7 @@ impl<'a, Context: BuildContext> DistributionDatabase<'a, Context> { async fn download_wheel( &self, url: DisplaySafeUrl, + index: Option<&IndexUrl>, filename: &WheelFilename, size: Option, wheel_entry: &CacheEntry, @@ -783,13 +805,24 @@ impl<'a, Context: BuildContext> DistributionDatabase<'a, Context> { // Fetch the archive from the cache, or download it if necessary. let req = self.request(url.clone())?; + // Determine the cache control policy for the URL. let cache_control = match self.client.unmanaged.connectivity() { - Connectivity::Online => CacheControl::from( - self.build_context - .cache() - .freshness(&http_entry, Some(&filename.name), None) - .map_err(Error::CacheRead)?, - ), + Connectivity::Online => { + if let Some(header) = index.and_then(|index| { + self.build_context + .locations() + .artifact_cache_control_for(index) + }) { + CacheControl::Override(header) + } else { + CacheControl::from( + self.build_context + .cache() + .freshness(&http_entry, Some(&filename.name), None) + .map_err(Error::CacheRead)?, + ) + } + } Connectivity::Offline => CacheControl::AllowStale, }; @@ -821,7 +854,12 @@ impl<'a, Context: BuildContext> DistributionDatabase<'a, Context> { .managed(async |client| { client .cached_client() - .skip_cache_with_retry(self.request(url)?, &http_entry, download) + .skip_cache_with_retry( + self.request(url)?, + &http_entry, + cache_control, + download, + ) .await .map_err(|err| match err { CachedClientError::Callback { err, .. } => err, diff --git a/crates/uv-distribution/src/source/mod.rs b/crates/uv-distribution/src/source/mod.rs index 080a1e52d..66b6122e0 100644 --- a/crates/uv-distribution/src/source/mod.rs +++ b/crates/uv-distribution/src/source/mod.rs @@ -32,7 +32,7 @@ use uv_client::{ use uv_configuration::{BuildKind, BuildOutput, ConfigSettings, SourceStrategy}; use uv_distribution_filename::{SourceDistExtension, WheelFilename}; use uv_distribution_types::{ - BuildableSource, DirectorySourceUrl, GitSourceUrl, HashPolicy, Hashed, PathSourceUrl, + BuildableSource, DirectorySourceUrl, GitSourceUrl, HashPolicy, Hashed, IndexUrl, PathSourceUrl, SourceDist, SourceUrl, }; use uv_extract::hash::Hasher; @@ -148,6 +148,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> { self.url( source, &url, + Some(&dist.index), &cache_shard, None, dist.ext, @@ -168,6 +169,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> { self.url( source, &dist.url, + None, &cache_shard, dist.subdirectory.as_deref(), dist.ext, @@ -213,6 +215,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> { self.url( source, resource.url, + None, &cache_shard, resource.subdirectory, resource.ext, @@ -288,9 +291,18 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> { .await; } - self.url_metadata(source, &url, &cache_shard, None, dist.ext, hashes, client) - .boxed_local() - .await? + self.url_metadata( + source, + &url, + Some(&dist.index), + &cache_shard, + None, + dist.ext, + hashes, + client, + ) + .boxed_local() + .await? } BuildableSource::Dist(SourceDist::DirectUrl(dist)) => { // For direct URLs, cache directly under the hash of the URL itself. @@ -302,6 +314,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> { self.url_metadata( source, &dist.url, + None, &cache_shard, dist.subdirectory.as_deref(), dist.ext, @@ -340,6 +353,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> { self.url_metadata( source, resource.url, + None, &cache_shard, resource.subdirectory, resource.ext, @@ -395,6 +409,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> { &self, source: &BuildableSource<'data>, url: &'data DisplaySafeUrl, + index: Option<&'data IndexUrl>, cache_shard: &CacheShard, subdirectory: Option<&'data Path>, ext: SourceDistExtension, @@ -406,7 +421,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> { // Fetch the revision for the source distribution. let revision = self - .url_revision(source, ext, url, cache_shard, hashes, client) + .url_revision(source, ext, url, index, cache_shard, hashes, client) .await?; // Before running the build, check that the hashes match. @@ -448,6 +463,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> { source, ext, url, + index, &source_dist_entry, revision, hashes, @@ -511,6 +527,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> { &self, source: &BuildableSource<'data>, url: &'data Url, + index: Option<&'data IndexUrl>, cache_shard: &CacheShard, subdirectory: Option<&'data Path>, ext: SourceDistExtension, @@ -521,7 +538,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> { // Fetch the revision for the source distribution. let revision = self - .url_revision(source, ext, url, cache_shard, hashes, client) + .url_revision(source, ext, url, index, cache_shard, hashes, client) .await?; // Before running the build, check that the hashes match. @@ -578,6 +595,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> { source, ext, url, + index, &source_dist_entry, revision, hashes, @@ -689,18 +707,31 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> { source: &BuildableSource<'_>, ext: SourceDistExtension, url: &Url, + index: Option<&IndexUrl>, cache_shard: &CacheShard, hashes: HashPolicy<'_>, client: &ManagedClient<'_>, ) -> Result { let cache_entry = cache_shard.entry(HTTP_REVISION); + + // Determine the cache control policy for the request. let cache_control = match client.unmanaged.connectivity() { - Connectivity::Online => CacheControl::from( - self.build_context - .cache() - .freshness(&cache_entry, source.name(), source.source_tree()) - .map_err(Error::CacheRead)?, - ), + Connectivity::Online => { + if let Some(header) = index.and_then(|index| { + self.build_context + .locations() + .artifact_cache_control_for(index) + }) { + CacheControl::Override(header) + } else { + CacheControl::from( + self.build_context + .cache() + .freshness(&cache_entry, source.name(), source.source_tree()) + .map_err(Error::CacheRead)?, + ) + } + } Connectivity::Offline => CacheControl::AllowStale, }; @@ -750,6 +781,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> { .skip_cache_with_retry( Self::request(DisplaySafeUrl::from(url.clone()), client)?, &cache_entry, + cache_control, download, ) .await @@ -2056,6 +2088,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> { source: &BuildableSource<'_>, ext: SourceDistExtension, url: &Url, + index: Option<&IndexUrl>, entry: &CacheEntry, revision: Revision, hashes: HashPolicy<'_>, @@ -2063,6 +2096,28 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> { ) -> Result { warn!("Re-downloading missing source distribution: {source}"); let cache_entry = entry.shard().entry(HTTP_REVISION); + + // Determine the cache control policy for the request. + let cache_control = match client.unmanaged.connectivity() { + Connectivity::Online => { + if let Some(header) = index.and_then(|index| { + self.build_context + .locations() + .artifact_cache_control_for(index) + }) { + CacheControl::Override(header) + } else { + CacheControl::from( + self.build_context + .cache() + .freshness(&cache_entry, source.name(), source.source_tree()) + .map_err(Error::CacheRead)?, + ) + } + } + Connectivity::Offline => CacheControl::AllowStale, + }; + let download = |response| { async { // Take the union of the requested and existing hash algorithms. @@ -2096,6 +2151,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> { .skip_cache_with_retry( Self::request(DisplaySafeUrl::from(url.clone()), client)?, &cache_entry, + cache_control, download, ) .await From 9c9db9b5476388bcfb32eb5f76e09462ea1c3d86 Mon Sep 17 00:00:00 2001 From: Charlie Marsh Date: Sun, 20 Jul 2025 09:44:25 -0400 Subject: [PATCH 091/130] Clarify which portions of requires-python behavior are consistent with pip (#14752) See: #14711 --- docs/pip/compatibility.md | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/docs/pip/compatibility.md b/docs/pip/compatibility.md index 5719c2fcc..2ce702006 100644 --- a/docs/pip/compatibility.md +++ b/docs/pip/compatibility.md @@ -447,7 +447,7 @@ By default, uv does not write any index URLs to the output file, while `pip-comp in the output file, pass the `--emit-index-url` flag to `uv pip compile`. Unlike `pip-compile`, uv will include all index URLs when `--emit-index-url` is passed, including the default index URL. -## `requires-python` enforcement +## `requires-python` upper bounds When evaluating `requires-python` ranges for dependencies, uv only considers lower bounds and ignores upper bounds entirely. For example, `>=3.8, <4` is treated as `>=3.8`. Respecting upper @@ -455,6 +455,8 @@ bounds on `requires-python` often leads to formally correct but practically inco as, e.g., resolvers will backtrack to the first published version that omits the upper bound (see: [`Requires-Python` upper limits](https://discuss.python.org/t/requires-python-upper-limits/12663)). +## `requires-python` specifiers + When evaluating Python versions against `requires-python` specifiers, uv truncates the candidate version to the major, minor, and patch components, ignoring (e.g.) pre-release and post-release identifiers. From d0a14c72a303deaf6f8ea5115df2129948b20cf9 Mon Sep 17 00:00:00 2001 From: Zanie Blue Date: Sun, 20 Jul 2025 11:12:01 -0500 Subject: [PATCH 092/130] Fix tests requiring patch-level Python (#14733) Closes #14723 https://chatgpt.com/codex/tasks/task_e_687a532188d08331b4352ba0a78f8fdb --- crates/uv/tests/it/sync.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/crates/uv/tests/it/sync.rs b/crates/uv/tests/it/sync.rs index 16c4d673a..1639ecaae 100644 --- a/crates/uv/tests/it/sync.rs +++ b/crates/uv/tests/it/sync.rs @@ -10494,7 +10494,7 @@ fn sync_required_environment_hint() -> Result<()> { [project] name = "example" version = "0.1.0" - requires-python = ">=3.13.2" + requires-python = ">=3.13" dependencies = ["no-sdist-no-wheels-with-matching-platform-a"] [[tool.uv.index]] @@ -10544,7 +10544,7 @@ fn sync_url_with_query_parameters() -> Result<()> { [project] name = "example" version = "0.1.0" - requires-python = ">=3.13.2" + requires-python = ">=3.13" dependencies = ["source-distribution @ https://files.pythonhosted.org/packages/1f/e5/5b016c945d745f8b108e759d428341488a6aee8f51f07c6c4e33498bb91f/source_distribution-0.0.3.tar.gz?foo=bar"] "# )?; From 2d8dda34b4c5e1cc7ec479dee830627dd317ddf3 Mon Sep 17 00:00:00 2001 From: Charlie Marsh Date: Sun, 20 Jul 2025 13:53:36 -0400 Subject: [PATCH 093/130] Fix comment on `extra_names` (#14756) --- crates/uv-configuration/src/extras.rs | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/crates/uv-configuration/src/extras.rs b/crates/uv-configuration/src/extras.rs index e39fc72ef..5bb74240f 100644 --- a/crates/uv-configuration/src/extras.rs +++ b/crates/uv-configuration/src/extras.rs @@ -155,7 +155,8 @@ impl ExtrasSpecificationInner { self.include.names().chain(&self.exclude) } - /// Returns `true` if the specification includes the given extra. + /// Returns an iterator over all extras that are included in the specification, + /// assuming `all_names` is an iterator over all extras. pub fn extra_names<'a, Names>( &'a self, all_names: Names, From a3371867acc888771316c0c5a7e49fc2f46a1df7 Mon Sep 17 00:00:00 2001 From: Charlie Marsh Date: Sun, 20 Jul 2025 14:02:22 -0400 Subject: [PATCH 094/130] Support `extras` and `dependency_groups` markers in PEP 508 grammar (#14753) ## Summary We always evaluate these to `false` right now, but we can at least parse them. See: https://peps.python.org/pep-0751/#dependency-groups. --- crates/uv-pep508/src/marker/algebra.rs | 60 ++++- crates/uv-pep508/src/marker/lowering.rs | 37 ++- crates/uv-pep508/src/marker/parse.rs | 102 +++++++- crates/uv-pep508/src/marker/simplify.rs | 59 +++++ crates/uv-pep508/src/marker/tree.rs | 267 +++++++++++++++++++- crates/uv-resolver/src/marker.rs | 10 + crates/uv-resolver/src/resolution/output.rs | 10 + 7 files changed, 530 insertions(+), 15 deletions(-) diff --git a/crates/uv-pep508/src/marker/algebra.rs b/crates/uv-pep508/src/marker/algebra.rs index 2a3f82f27..d1a369491 100644 --- a/crates/uv-pep508/src/marker/algebra.rs +++ b/crates/uv-pep508/src/marker/algebra.rs @@ -59,8 +59,10 @@ use uv_pep440::{Operator, Version, VersionSpecifier, release_specifier_to_range} use crate::marker::MarkerValueExtra; use crate::marker::lowering::{ - CanonicalMarkerValueExtra, CanonicalMarkerValueString, CanonicalMarkerValueVersion, + CanonicalMarkerValueDependencyGroup, CanonicalMarkerValueExtra, CanonicalMarkerValueString, + CanonicalMarkerValueVersion, }; +use crate::marker::tree::{ContainerOperator, MarkerValueDependencyGroup}; use crate::{ ExtraOperator, MarkerExpression, MarkerOperator, MarkerValueString, MarkerValueVersion, }; @@ -328,11 +330,53 @@ impl InternerGuard<'_> { Variable::Extra(CanonicalMarkerValueExtra::Extra(extra)), Edges::from_bool(false), ), - // Invalid extras are always `false`. + // Invalid `extra` names are always `false`. MarkerExpression::Extra { name: MarkerValueExtra::Arbitrary(_), .. } => return NodeId::FALSE, + // A variable representing the existence or absence of a particular extra, in the + // context of a PEP 751 lockfile. + MarkerExpression::Extras { + name: MarkerValueExtra::Extra(extra), + operator: ContainerOperator::In, + } => ( + Variable::Extras(CanonicalMarkerValueExtra::Extra(extra)), + Edges::from_bool(true), + ), + MarkerExpression::Extras { + name: MarkerValueExtra::Extra(extra), + operator: ContainerOperator::NotIn, + } => ( + Variable::Extras(CanonicalMarkerValueExtra::Extra(extra)), + Edges::from_bool(false), + ), + // Invalid `extras` names are always `false`. + MarkerExpression::Extras { + name: MarkerValueExtra::Arbitrary(_), + .. + } => return NodeId::FALSE, + // A variable representing the existence or absence of a particular extra, in the + // context of a PEP 751 lockfile. + MarkerExpression::DependencyGroups { + name: MarkerValueDependencyGroup::Group(group), + operator: ContainerOperator::In, + } => ( + Variable::DependencyGroups(CanonicalMarkerValueDependencyGroup::Group(group)), + Edges::from_bool(true), + ), + MarkerExpression::DependencyGroups { + name: MarkerValueDependencyGroup::Group(group), + operator: ContainerOperator::NotIn, + } => ( + Variable::DependencyGroups(CanonicalMarkerValueDependencyGroup::Group(group)), + Edges::from_bool(false), + ), + // Invalid `dependency_group` names are always `false`. + MarkerExpression::DependencyGroups { + name: MarkerValueDependencyGroup::Arbitrary(_), + .. + } => return NodeId::FALSE, }; self.create_node(var, children) @@ -1046,6 +1090,18 @@ pub(crate) enum Variable { /// We keep extras at the leaves of the tree, so when simplifying extras we can /// trivially remove the leaves without having to reconstruct the entire tree. Extra(CanonicalMarkerValueExtra), + /// A variable representing the existence or absence of a given extra, in the context of a + /// PEP 751 lockfile marker. + /// + /// We keep extras at the leaves of the tree, so when simplifying extras we can + /// trivially remove the leaves without having to reconstruct the entire tree. + Extras(CanonicalMarkerValueExtra), + /// A variable representing the existence or absence of a given dependency group, in the context of a + /// PEP 751 lockfile marker. + /// + /// We keep groups at the leaves of the tree, so when simplifying groups we can + /// trivially remove the leaves without having to reconstruct the entire tree. + DependencyGroups(CanonicalMarkerValueDependencyGroup), } impl Variable { diff --git a/crates/uv-pep508/src/marker/lowering.rs b/crates/uv-pep508/src/marker/lowering.rs index 16139a65d..dadfeac53 100644 --- a/crates/uv-pep508/src/marker/lowering.rs +++ b/crates/uv-pep508/src/marker/lowering.rs @@ -1,7 +1,8 @@ use std::fmt::{Display, Formatter}; -use uv_normalize::ExtraName; +use uv_normalize::{ExtraName, GroupName}; +use crate::marker::tree::MarkerValueDependencyGroup; use crate::{MarkerValueExtra, MarkerValueString, MarkerValueVersion}; /// Those environment markers with a PEP 440 version as value such as `python_version` @@ -128,7 +129,7 @@ impl Display for CanonicalMarkerValueString { } } -/// The [`ExtraName`] value used in `extra` markers. +/// The [`ExtraName`] value used in `extra` and `extras` markers. #[derive(Clone, Debug, Eq, Hash, PartialEq, PartialOrd, Ord)] pub enum CanonicalMarkerValueExtra { /// A valid [`ExtraName`]. @@ -159,3 +160,35 @@ impl Display for CanonicalMarkerValueExtra { } } } + +/// The [`GroupName`] value used in `dependency_group` markers. +#[derive(Clone, Debug, Eq, Hash, PartialEq, PartialOrd, Ord)] +pub enum CanonicalMarkerValueDependencyGroup { + /// A valid [`GroupName`]. + Group(GroupName), +} + +impl CanonicalMarkerValueDependencyGroup { + /// Returns the [`GroupName`] value. + pub fn group(&self) -> &GroupName { + match self { + Self::Group(group) => group, + } + } +} + +impl From for MarkerValueDependencyGroup { + fn from(value: CanonicalMarkerValueDependencyGroup) -> Self { + match value { + CanonicalMarkerValueDependencyGroup::Group(group) => Self::Group(group), + } + } +} + +impl Display for CanonicalMarkerValueDependencyGroup { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + match self { + Self::Group(group) => group.fmt(f), + } + } +} diff --git a/crates/uv-pep508/src/marker/parse.rs b/crates/uv-pep508/src/marker/parse.rs index 13620662b..9c361c19d 100644 --- a/crates/uv-pep508/src/marker/parse.rs +++ b/crates/uv-pep508/src/marker/parse.rs @@ -1,10 +1,11 @@ use arcstr::ArcStr; use std::str::FromStr; -use uv_normalize::ExtraName; +use uv_normalize::{ExtraName, GroupName}; use uv_pep440::{Version, VersionPattern, VersionSpecifier}; use crate::cursor::Cursor; use crate::marker::MarkerValueExtra; +use crate::marker::tree::{ContainerOperator, MarkerValueDependencyGroup}; use crate::{ ExtraOperator, MarkerExpression, MarkerOperator, MarkerTree, MarkerValue, MarkerValueString, MarkerValueVersion, MarkerWarningKind, Pep508Error, Pep508ErrorSource, Pep508Url, Reporter, @@ -208,6 +209,8 @@ pub(crate) fn parse_marker_key_op_value( MarkerValue::MarkerEnvString(key) => { let value = match r_value { MarkerValue::Extra + | MarkerValue::Extras + | MarkerValue::DependencyGroups | MarkerValue::MarkerEnvVersion(_) | MarkerValue::MarkerEnvString(_) => { reporter.report( @@ -242,7 +245,9 @@ pub(crate) fn parse_marker_key_op_value( let value = match r_value { MarkerValue::MarkerEnvVersion(_) | MarkerValue::MarkerEnvString(_) - | MarkerValue::Extra => { + | MarkerValue::Extra + | MarkerValue::Extras + | MarkerValue::DependencyGroups => { reporter.report( MarkerWarningKind::ExtraInvalidComparison, "Comparing extra with something other than a quoted string is wrong, @@ -257,7 +262,7 @@ pub(crate) fn parse_marker_key_op_value( parse_extra_expr(operator, &value, reporter) } - // This is either MarkerEnvVersion, MarkerEnvString or Extra inverted + // This is either MarkerEnvVersion, MarkerEnvString, Extra (inverted), or Extras MarkerValue::QuotedString(l_string) => { match r_value { // The only sound choice for this is ` ` @@ -273,6 +278,12 @@ pub(crate) fn parse_marker_key_op_value( }), // `'...' == extra` MarkerValue::Extra => parse_extra_expr(operator, &l_string, reporter), + // `'...' in extras` + MarkerValue::Extras => parse_extras_expr(operator, &l_string, reporter), + // `'...' in dependency_groups` + MarkerValue::DependencyGroups => { + parse_dependency_groups_expr(operator, &l_string, reporter) + } // `'...' == '...'`, doesn't make much sense MarkerValue::QuotedString(_) => { // Not even pypa/packaging 22.0 supports this @@ -289,6 +300,26 @@ pub(crate) fn parse_marker_key_op_value( } } } + MarkerValue::Extras => { + reporter.report( + MarkerWarningKind::Pep440Error, + format!( + "The `extras` marker must be used as '...' in extras' or '... not in extras', + found `{l_value} {operator} {r_value}`, will be ignored" + ), + ); + return Ok(None); + } + MarkerValue::DependencyGroups => { + reporter.report( + MarkerWarningKind::Pep440Error, + format!( + "The `dependency_groups` marker must be used as '...' in dependency_groups' or '... not in dependency_groups', + found `{l_value} {operator} {r_value}`, will be ignored" + ), + ); + return Ok(None); + } }; Ok(expr) @@ -491,8 +522,69 @@ fn parse_extra_expr( reporter.report( MarkerWarningKind::ExtraInvalidComparison, - "Comparing extra with something other than a quoted string is wrong, - will be ignored" + "Comparing `extra` with any operator other than `==` or `!=` is wrong and will be ignored" + .to_string(), + ); + + None +} + +/// Creates an instance of [`MarkerExpression::Extras`] with the given values, falling back to +/// [`MarkerExpression::Arbitrary`] on failure. +fn parse_extras_expr( + operator: MarkerOperator, + value: &str, + reporter: &mut impl Reporter, +) -> Option { + let name = match ExtraName::from_str(value) { + Ok(name) => MarkerValueExtra::Extra(name), + Err(err) => { + reporter.report( + MarkerWarningKind::ExtrasInvalidComparison, + format!("Expected extra name (found `{value}`): {err}"), + ); + MarkerValueExtra::Arbitrary(value.to_string()) + } + }; + + if let Some(operator) = ContainerOperator::from_marker_operator(operator) { + return Some(MarkerExpression::Extras { operator, name }); + } + + reporter.report( + MarkerWarningKind::ExtrasInvalidComparison, + "Comparing `extras` with any operator other than `in` or `not in` is wrong and will be ignored" + .to_string(), + ); + + None +} + +/// Creates an instance of [`MarkerExpression::DependencyGroups`] with the given values, falling +/// back to [`MarkerExpression::Arbitrary`] on failure. +fn parse_dependency_groups_expr( + operator: MarkerOperator, + value: &str, + reporter: &mut impl Reporter, +) -> Option { + let name = match GroupName::from_str(value) { + Ok(name) => MarkerValueDependencyGroup::Group(name), + Err(err) => { + reporter.report( + MarkerWarningKind::ExtrasInvalidComparison, + format!("Expected extra name (found `{value}`): {err}"), + ); + MarkerValueDependencyGroup::Arbitrary(value.to_string()) + } + }; + + if let Some(operator) = ContainerOperator::from_marker_operator(operator) { + return Some(MarkerExpression::DependencyGroups { operator, name }); + } + + reporter.report( + MarkerWarningKind::ExtrasInvalidComparison, + "Comparing `extras` with any operator other than `in` or `not in` is wrong and will be ignored" .to_string(), ); diff --git a/crates/uv-pep508/src/marker/simplify.rs b/crates/uv-pep508/src/marker/simplify.rs index 3dc03693a..6897615c4 100644 --- a/crates/uv-pep508/src/marker/simplify.rs +++ b/crates/uv-pep508/src/marker/simplify.rs @@ -9,6 +9,7 @@ use version_ranges::Ranges; use uv_pep440::{Version, VersionSpecifier}; +use crate::marker::tree::ContainerOperator; use crate::{ExtraOperator, MarkerExpression, MarkerOperator, MarkerTree, MarkerTreeKind}; /// Returns a simplified DNF expression for a given marker tree. @@ -174,6 +175,42 @@ fn collect_dnf( operator, }; + path.push(expr); + collect_dnf(tree, dnf, path); + path.pop(); + } + } + MarkerTreeKind::Extras(marker) => { + for (value, tree) in marker.children() { + let operator = if value { + ContainerOperator::In + } else { + ContainerOperator::NotIn + }; + + let expr = MarkerExpression::Extras { + name: marker.name().clone().into(), + operator, + }; + + path.push(expr); + collect_dnf(tree, dnf, path); + path.pop(); + } + } + MarkerTreeKind::DependencyGroups(marker) => { + for (value, tree) in marker.children() { + let operator = if value { + ContainerOperator::In + } else { + ContainerOperator::NotIn + }; + + let expr = MarkerExpression::DependencyGroups { + name: marker.name().clone().into(), + operator, + }; + path.push(expr); collect_dnf(tree, dnf, path); path.pop(); @@ -440,5 +477,27 @@ fn is_negation(left: &MarkerExpression, right: &MarkerExpression) -> bool { name == name2 && operator.negate() == *operator2 } + MarkerExpression::Extras { name, operator } => { + let MarkerExpression::Extras { + name: name2, + operator: operator2, + } = right + else { + return false; + }; + + name == name2 && *operator == operator2.negate() + } + MarkerExpression::DependencyGroups { name, operator } => { + let MarkerExpression::DependencyGroups { + name: name2, + operator: operator2, + } = right + else { + return false; + }; + + name == name2 && *operator == operator2.negate() + } } } diff --git a/crates/uv-pep508/src/marker/tree.rs b/crates/uv-pep508/src/marker/tree.rs index 5739d7c98..594b81723 100644 --- a/crates/uv-pep508/src/marker/tree.rs +++ b/crates/uv-pep508/src/marker/tree.rs @@ -9,14 +9,15 @@ use itertools::Itertools; use serde::{Deserialize, Deserializer, Serialize, Serializer, de}; use version_ranges::Ranges; -use uv_normalize::ExtraName; +use uv_normalize::{ExtraName, GroupName}; use uv_pep440::{Version, VersionParseError, VersionSpecifier}; use super::algebra::{Edges, INTERNER, NodeId, Variable}; use super::simplify; use crate::cursor::Cursor; use crate::marker::lowering::{ - CanonicalMarkerValueExtra, CanonicalMarkerValueString, CanonicalMarkerValueVersion, + CanonicalMarkerValueDependencyGroup, CanonicalMarkerValueExtra, CanonicalMarkerValueString, + CanonicalMarkerValueVersion, }; use crate::marker::parse; use crate::{ @@ -32,6 +33,12 @@ pub enum MarkerWarningKind { /// Doing an operation other than `==` and `!=` on a quoted string with `extra`, such as /// `extra > "perf"` or `extra == os_name` ExtraInvalidComparison, + /// Doing an operation other than `in` and `not in` on a quoted string with `extra`, such as + /// `extras > "perf"` or `extras == os_name` + ExtrasInvalidComparison, + /// Doing an operation other than `in` and `not in` on a quoted string with `dependency_groups`, + /// such as `dependency_groups > "perf"` or `dependency_groups == os_name` + DependencyGroupsInvalidComparison, /// Comparing a string valued marker and a string lexicographically, such as `"3.9" > "3.10"` LexicographicComparison, /// Comparing two markers, such as `os_name != sys_implementation` @@ -128,8 +135,12 @@ pub enum MarkerValue { MarkerEnvVersion(MarkerValueVersion), /// Those environment markers with an arbitrary string as value such as `sys_platform` MarkerEnvString(MarkerValueString), - /// `extra`. This one is special because it's a list and not env but user given + /// `extra`. This one is special because it's a list, and user-provided Extra, + /// `extras`. This one is special because it's a list, and user-provided + Extras, + /// `dependency_groups`. This one is special because it's a list, and user-provided + DependencyGroups, /// Not a constant, but a user given quoted string with a value inside such as '3.8' or "windows" QuotedString(ArcStr), } @@ -170,6 +181,8 @@ impl FromStr for MarkerValue { "sys_platform" => Self::MarkerEnvString(MarkerValueString::SysPlatform), "sys.platform" => Self::MarkerEnvString(MarkerValueString::SysPlatformDeprecated), "extra" => Self::Extra, + "extras" => Self::Extras, + "dependency_groups" => Self::DependencyGroups, _ => return Err(format!("Invalid key: {s}")), }; Ok(value) @@ -182,6 +195,8 @@ impl Display for MarkerValue { Self::MarkerEnvVersion(marker_value_version) => marker_value_version.fmt(f), Self::MarkerEnvString(marker_value_string) => marker_value_string.fmt(f), Self::Extra => f.write_str("extra"), + Self::Extras => f.write_str("extras"), + Self::DependencyGroups => f.write_str("dependency_groups"), Self::QuotedString(value) => write!(f, "'{value}'"), } } @@ -433,7 +448,7 @@ impl Deref for StringVersion { } } -/// The [`ExtraName`] value used in `extra` markers. +/// The [`ExtraName`] value used in `extra` and `extras` markers. #[derive(Clone, Debug, Eq, Hash, PartialEq, PartialOrd, Ord)] pub enum MarkerValueExtra { /// A valid [`ExtraName`]. @@ -469,6 +484,24 @@ impl Display for MarkerValueExtra { } } +/// The [`GroupName`] value used in `dependency_group` markers. +#[derive(Clone, Debug, Eq, Hash, PartialEq, PartialOrd, Ord)] +pub enum MarkerValueDependencyGroup { + /// A valid [`GroupName`]. + Group(GroupName), + /// An invalid name, preserved as an arbitrary string. + Arbitrary(String), +} + +impl Display for MarkerValueDependencyGroup { + fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { + match self { + Self::Group(group) => group.fmt(f), + Self::Arbitrary(string) => string.fmt(f), + } + } +} + /// Represents one clause such as `python_version > "3.8"`. #[derive(Clone, Debug, Eq, Hash, PartialEq, PartialOrd, Ord)] #[allow(missing_docs)] @@ -504,8 +537,18 @@ pub enum MarkerExpression { }, /// `extra '...'` or `'...' extra`. Extra { - operator: ExtraOperator, name: MarkerValueExtra, + operator: ExtraOperator, + }, + /// `'...' in extras` + Extras { + name: MarkerValueExtra, + operator: ContainerOperator, + }, + /// `'...' in dependency_groups` + DependencyGroups { + name: MarkerValueDependencyGroup, + operator: ContainerOperator, }, } @@ -520,6 +563,10 @@ pub(crate) enum MarkerExpressionKind { String(MarkerValueString), /// An extra expression, e.g. `extra == '...'`. Extra, + /// An extras expression, e.g. `'...' in extras`. + Extras, + /// A dependency groups expression, e.g. `'...' in dependency_groups`. + DependencyGroups, } /// The operator for an extra expression, either '==' or '!='. @@ -561,6 +608,45 @@ impl Display for ExtraOperator { } } +/// The operator for a container expression, either 'in' or 'not in'. +#[derive(Clone, Debug, Eq, Hash, PartialEq, PartialOrd, Ord)] +pub enum ContainerOperator { + /// `in` + In, + /// `not in` + NotIn, +} + +impl ContainerOperator { + /// Creates a [`ContainerOperator`] from an equivalent [`MarkerOperator`]. + /// + /// Returns `None` if the operator is not supported for containers. + pub(crate) fn from_marker_operator(operator: MarkerOperator) -> Option { + match operator { + MarkerOperator::In => Some(ContainerOperator::In), + MarkerOperator::NotIn => Some(ContainerOperator::NotIn), + _ => None, + } + } + + /// Negates this operator. + pub(crate) fn negate(&self) -> ContainerOperator { + match *self { + ContainerOperator::In => ContainerOperator::NotIn, + ContainerOperator::NotIn => ContainerOperator::In, + } + } +} + +impl Display for ContainerOperator { + fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { + f.write_str(match self { + Self::In => "in", + Self::NotIn => "not in", + }) + } +} + impl MarkerExpression { /// Parse a [`MarkerExpression`] from a string with the given reporter. pub fn parse_reporter( @@ -600,6 +686,8 @@ impl MarkerExpression { MarkerExpression::VersionIn { key, .. } => MarkerExpressionKind::VersionIn(*key), MarkerExpression::String { key, .. } => MarkerExpressionKind::String(*key), MarkerExpression::Extra { .. } => MarkerExpressionKind::Extra, + MarkerExpression::Extras { .. } => MarkerExpressionKind::Extras, + MarkerExpression::DependencyGroups { .. } => MarkerExpressionKind::DependencyGroups, } } } @@ -641,6 +729,12 @@ impl Display for MarkerExpression { MarkerExpression::Extra { operator, name } => { write!(f, "extra {operator} '{name}'") } + MarkerExpression::Extras { operator, name } => { + write!(f, "'{name}' {operator} extras") + } + MarkerExpression::DependencyGroups { operator, name } => { + write!(f, "'{name}' {operator} dependency_groups") + } } } } @@ -862,6 +956,26 @@ impl MarkerTree { low: low.negate(self.0), }) } + Variable::Extras(name) => { + let Edges::Boolean { low, high } = node.children else { + unreachable!() + }; + MarkerTreeKind::Extras(ExtrasMarkerTree { + name, + high: high.negate(self.0), + low: low.negate(self.0), + }) + } + Variable::DependencyGroups(name) => { + let Edges::Boolean { low, high } = node.children else { + unreachable!() + }; + MarkerTreeKind::DependencyGroups(DependencyGroupsMarkerTree { + name, + high: high.negate(self.0), + low: low.negate(self.0), + }) + } } } @@ -962,6 +1076,10 @@ impl MarkerTree { .edge(extras.contains(marker.name().extra())) .evaluate_reporter_impl(env, extras, reporter); } + // TODO(charlie): Add support for evaluating container extras in PEP 751 lockfiles. + MarkerTreeKind::Extras(..) | MarkerTreeKind::DependencyGroups(..) => { + return false; + } } false @@ -989,6 +1107,12 @@ impl MarkerTree { MarkerTreeKind::Extra(marker) => marker .edge(extras.contains(marker.name().extra())) .evaluate_extras(extras), + MarkerTreeKind::Extras(marker) => marker + .children() + .any(|(_, tree)| tree.evaluate_extras(extras)), + MarkerTreeKind::DependencyGroups(marker) => marker + .children() + .any(|(_, tree)| tree.evaluate_extras(extras)), } } @@ -1226,6 +1350,16 @@ impl MarkerTree { imp(tree, f); } } + MarkerTreeKind::Extras(kind) => { + for (_, tree) in kind.children() { + imp(tree, f); + } + } + MarkerTreeKind::DependencyGroups(kind) => { + for (_, tree) in kind.children() { + imp(tree, f); + } + } } } imp(self, &mut f); @@ -1348,6 +1482,36 @@ impl MarkerTree { write!(f, "extra != {} -> ", kind.name())?; kind.edge(false).fmt_graph(f, level + 1)?; } + MarkerTreeKind::Extras(kind) => { + writeln!(f)?; + for _ in 0..level { + write!(f, " ")?; + } + write!(f, "{} in extras -> ", kind.name())?; + kind.edge(true).fmt_graph(f, level + 1)?; + + writeln!(f)?; + for _ in 0..level { + write!(f, " ")?; + } + write!(f, "{} not in extras -> ", kind.name())?; + kind.edge(false).fmt_graph(f, level + 1)?; + } + MarkerTreeKind::DependencyGroups(kind) => { + writeln!(f)?; + for _ in 0..level { + write!(f, " ")?; + } + write!(f, "{} in dependency_groups -> ", kind.name())?; + kind.edge(true).fmt_graph(f, level + 1)?; + + writeln!(f)?; + for _ in 0..level { + write!(f, " ")?; + } + write!(f, "{} not in dependency_groups -> ", kind.name())?; + kind.edge(false).fmt_graph(f, level + 1)?; + } } Ok(()) @@ -1417,8 +1581,12 @@ pub enum MarkerTreeKind<'a> { In(InMarkerTree<'a>), /// A string expression with the `contains` operator. Contains(ContainsMarkerTree<'a>), - /// A string expression. + /// A string expression (e.g., `extra == 'dev'`). Extra(ExtraMarkerTree<'a>), + /// A string expression (e.g., `'dev' in extras`). + Extras(ExtrasMarkerTree<'a>), + /// A string expression (e.g., `'dev' in dependency_groups`). + DependencyGroups(DependencyGroupsMarkerTree<'a>), } /// A version marker node, such as `python_version < '3.7'`. @@ -1636,6 +1804,93 @@ impl Ord for ExtraMarkerTree<'_> { } } +/// A node representing the existence or absence of a given extra, such as `'bar' in extras`. +#[derive(PartialEq, Eq, Clone, Debug)] +pub struct ExtrasMarkerTree<'a> { + name: &'a CanonicalMarkerValueExtra, + high: NodeId, + low: NodeId, +} + +impl ExtrasMarkerTree<'_> { + /// Returns the name of the extra in this expression. + pub fn name(&self) -> &CanonicalMarkerValueExtra { + self.name + } + + /// The edges of this node, corresponding to the boolean evaluation of the expression. + pub fn children(&self) -> impl Iterator { + [(true, MarkerTree(self.high)), (false, MarkerTree(self.low))].into_iter() + } + + /// Returns the subtree associated with the given edge value. + pub fn edge(&self, value: bool) -> MarkerTree { + if value { + MarkerTree(self.high) + } else { + MarkerTree(self.low) + } + } +} + +impl PartialOrd for ExtrasMarkerTree<'_> { + fn partial_cmp(&self, other: &Self) -> Option { + Some(self.cmp(other)) + } +} + +impl Ord for ExtrasMarkerTree<'_> { + fn cmp(&self, other: &Self) -> Ordering { + self.name() + .cmp(other.name()) + .then_with(|| self.children().cmp(other.children())) + } +} + +/// A node representing the existence or absence of a given dependency group, such as +/// `'bar' in dependency_groups`. +#[derive(PartialEq, Eq, Clone, Debug)] +pub struct DependencyGroupsMarkerTree<'a> { + name: &'a CanonicalMarkerValueDependencyGroup, + high: NodeId, + low: NodeId, +} + +impl DependencyGroupsMarkerTree<'_> { + /// Returns the name of the group in this expression. + pub fn name(&self) -> &CanonicalMarkerValueDependencyGroup { + self.name + } + + /// The edges of this node, corresponding to the boolean evaluation of the expression. + pub fn children(&self) -> impl Iterator { + [(true, MarkerTree(self.high)), (false, MarkerTree(self.low))].into_iter() + } + + /// Returns the subtree associated with the given edge value. + pub fn edge(&self, value: bool) -> MarkerTree { + if value { + MarkerTree(self.high) + } else { + MarkerTree(self.low) + } + } +} + +impl PartialOrd for DependencyGroupsMarkerTree<'_> { + fn partial_cmp(&self, other: &Self) -> Option { + Some(self.cmp(other)) + } +} + +impl Ord for DependencyGroupsMarkerTree<'_> { + fn cmp(&self, other: &Self) -> Ordering { + self.name() + .cmp(other.name()) + .then_with(|| self.children().cmp(other.children())) + } +} + /// A marker tree that contains at least one expression. /// /// See [`MarkerTree::contents`] for details. diff --git a/crates/uv-resolver/src/marker.rs b/crates/uv-resolver/src/marker.rs index b63d51401..5a2203f9b 100644 --- a/crates/uv-resolver/src/marker.rs +++ b/crates/uv-resolver/src/marker.rs @@ -54,6 +54,16 @@ pub(crate) fn requires_python(tree: MarkerTree) -> Option { collect_python_markers(tree, markers, range); } } + MarkerTreeKind::Extras(marker) => { + for (_, tree) in marker.children() { + collect_python_markers(tree, markers, range); + } + } + MarkerTreeKind::DependencyGroups(marker) => { + for (_, tree) in marker.children() { + collect_python_markers(tree, markers, range); + } + } } } diff --git a/crates/uv-resolver/src/resolution/output.rs b/crates/uv-resolver/src/resolution/output.rs index dd2b3388f..2afbf2c6b 100644 --- a/crates/uv-resolver/src/resolution/output.rs +++ b/crates/uv-resolver/src/resolution/output.rs @@ -698,6 +698,16 @@ impl ResolverOutput { add_marker_params_from_tree(tree, set); } } + MarkerTreeKind::Extras(marker) => { + for (_, tree) in marker.children() { + add_marker_params_from_tree(tree, set); + } + } + MarkerTreeKind::DependencyGroups(marker) => { + for (_, tree) in marker.children() { + add_marker_params_from_tree(tree, set); + } + } } } From bd4c7ff860a844a907fbe1ce9723c6ae04a4ede9 Mon Sep 17 00:00:00 2001 From: Charlie Marsh Date: Sun, 20 Jul 2025 14:13:27 -0400 Subject: [PATCH 095/130] Move dependency group normalization into specification (#14757) ## Summary A refactor that I'm extracting from #14755. There should be no functional changes, but the core idea is to postpone filling in the default `path` for a dependency group until we make the specification. This allows us to use the groups for the `pylock.toml` in the future, if such a `pylock.toml` is provided. --- crates/uv-normalize/src/group_name.rs | 13 +------ crates/uv-requirements/src/specification.rs | 36 +++++++++++++++--- crates/uv/src/commands/pip/compile.rs | 13 ++++--- crates/uv/src/commands/pip/install.rs | 10 ++--- crates/uv/src/commands/pip/operations.rs | 20 ++++------ crates/uv/src/commands/pip/sync.rs | 4 +- crates/uv/src/commands/project/add.rs | 2 +- crates/uv/src/commands/tool/install.rs | 3 +- crates/uv/src/commands/tool/run.rs | 3 +- crates/uv/src/lib.rs | 41 +++++---------------- 10 files changed, 65 insertions(+), 80 deletions(-) diff --git a/crates/uv-normalize/src/group_name.rs b/crates/uv-normalize/src/group_name.rs index 6b9ab14bd..e0a2b7c1f 100644 --- a/crates/uv-normalize/src/group_name.rs +++ b/crates/uv-normalize/src/group_name.rs @@ -1,5 +1,5 @@ use std::fmt::{Display, Formatter}; -use std::path::{Path, PathBuf}; +use std::path::PathBuf; use std::str::FromStr; use std::sync::LazyLock; @@ -98,17 +98,6 @@ pub struct PipGroupName { pub name: GroupName, } -impl PipGroupName { - /// Gets the path to use, applying the default if it's missing - pub fn path(&self) -> &Path { - if let Some(path) = &self.path { - path - } else { - Path::new("pyproject.toml") - } - } -} - impl FromStr for PipGroupName { type Err = InvalidPipGroupError; diff --git a/crates/uv-requirements/src/specification.rs b/crates/uv-requirements/src/specification.rs index 4c5741392..deead2c82 100644 --- a/crates/uv-requirements/src/specification.rs +++ b/crates/uv-requirements/src/specification.rs @@ -43,7 +43,7 @@ use uv_distribution_types::{ UnresolvedRequirementSpecification, }; use uv_fs::{CWD, Simplified}; -use uv_normalize::{ExtraName, GroupName, PackageName}; +use uv_normalize::{ExtraName, PackageName, PipGroupName}; use uv_requirements_txt::{RequirementsTxt, RequirementsTxtRequirement}; use uv_warnings::warn_user; use uv_workspace::pyproject::PyProjectToml; @@ -215,7 +215,7 @@ impl RequirementsSpecification { requirements: &[RequirementsSource], constraints: &[RequirementsSource], overrides: &[RequirementsSource], - groups: BTreeMap>, + groups: Option<&GroupsSpecification>, client_builder: &BaseClientBuilder<'_>, ) -> Result { let mut spec = Self::default(); @@ -272,7 +272,7 @@ impl RequirementsSpecification { "Cannot specify constraints with a `pylock.toml` file" )); } - if !groups.is_empty() { + if groups.is_some_and(|groups| !groups.groups.is_empty()) { return Err(anyhow::anyhow!( "Cannot specify groups with a `pylock.toml` file" )); @@ -287,9 +287,24 @@ impl RequirementsSpecification { } // pip `--group` flags specify their own sources, which we need to process here - if !groups.is_empty() { + if let Some(groups) = groups { + // First, we collect all groups by their path. + let mut groups_by_path = BTreeMap::new(); + for group in &groups.groups { + // If there's no path provided, expect a pyproject.toml in the project-dir + // (Which is typically the current working directory, matching pip's behaviour) + let pyproject_path = group + .path + .clone() + .unwrap_or_else(|| groups.root.join("pyproject.toml")); + groups_by_path + .entry(pyproject_path) + .or_insert_with(Vec::new) + .push(group.name.clone()); + } + let mut group_specs = BTreeMap::new(); - for (path, groups) in groups { + for (path, groups) in groups_by_path { let group_spec = DependencyGroups::from_args( false, false, @@ -426,7 +441,7 @@ impl RequirementsSpecification { requirements: &[RequirementsSource], client_builder: &BaseClientBuilder<'_>, ) -> Result { - Self::from_sources(requirements, &[], &[], BTreeMap::default(), client_builder).await + Self::from_sources(requirements, &[], &[], None, client_builder).await } /// Initialize a [`RequirementsSpecification`] from a list of [`Requirement`]. @@ -485,3 +500,12 @@ impl RequirementsSpecification { self.requirements.is_empty() && self.source_trees.is_empty() && self.overrides.is_empty() } } + +#[derive(Debug, Default, Clone)] +pub struct GroupsSpecification { + /// The path to the project root, relative to which the default `pyproject.toml` file is + /// located. + pub root: PathBuf, + /// The enabled groups. + pub groups: Vec, +} diff --git a/crates/uv/src/commands/pip/compile.rs b/crates/uv/src/commands/pip/compile.rs index a5116327b..b9dda45c8 100644 --- a/crates/uv/src/commands/pip/compile.rs +++ b/crates/uv/src/commands/pip/compile.rs @@ -1,7 +1,7 @@ -use std::collections::{BTreeMap, BTreeSet}; +use std::collections::BTreeSet; use std::env; use std::ffi::OsStr; -use std::path::{Path, PathBuf}; +use std::path::Path; use std::str::FromStr; use anyhow::{Result, anyhow}; @@ -26,7 +26,7 @@ use uv_distribution_types::{ use uv_fs::{CWD, Simplified}; use uv_git::ResolvedRepositoryReference; use uv_install_wheel::LinkMode; -use uv_normalize::{GroupName, PackageName}; +use uv_normalize::PackageName; use uv_pypi_types::{Conflicts, SupportedEnvironments}; use uv_python::{ EnvironmentPreference, PythonEnvironment, PythonInstallation, PythonPreference, PythonRequest, @@ -34,7 +34,8 @@ use uv_python::{ }; use uv_requirements::upgrade::{LockedRequirements, read_pylock_toml_requirements}; use uv_requirements::{ - RequirementsSource, RequirementsSpecification, is_pylock_toml, upgrade::read_requirements_txt, + GroupsSpecification, RequirementsSource, RequirementsSpecification, is_pylock_toml, + upgrade::read_requirements_txt, }; use uv_resolver::{ AnnotationStyle, DependencyMode, DisplayResolutionGraph, ExcludeNewer, FlatIndex, ForkStrategy, @@ -64,7 +65,7 @@ pub(crate) async fn pip_compile( build_constraints_from_workspace: Vec, environments: SupportedEnvironments, extras: ExtrasSpecification, - groups: BTreeMap>, + groups: GroupsSpecification, output_file: Option<&Path>, format: Option, resolution_mode: ResolutionMode, @@ -207,7 +208,7 @@ pub(crate) async fn pip_compile( requirements, constraints, overrides, - groups, + Some(&groups), &client_builder, ) .await?; diff --git a/crates/uv/src/commands/pip/install.rs b/crates/uv/src/commands/pip/install.rs index 79e18bd98..b9edad20e 100644 --- a/crates/uv/src/commands/pip/install.rs +++ b/crates/uv/src/commands/pip/install.rs @@ -1,6 +1,5 @@ -use std::collections::{BTreeMap, BTreeSet}; +use std::collections::BTreeSet; use std::fmt::Write; -use std::path::PathBuf; use anyhow::Context; use itertools::Itertools; @@ -23,14 +22,13 @@ use uv_distribution_types::{ use uv_fs::Simplified; use uv_install_wheel::LinkMode; use uv_installer::{SatisfiesResult, SitePackages}; -use uv_normalize::GroupName; use uv_pep508::PackageName; use uv_pypi_types::Conflicts; use uv_python::{ EnvironmentPreference, Prefix, PythonEnvironment, PythonInstallation, PythonPreference, PythonRequest, PythonVersion, Target, }; -use uv_requirements::{RequirementsSource, RequirementsSpecification}; +use uv_requirements::{GroupsSpecification, RequirementsSource, RequirementsSpecification}; use uv_resolver::{ DependencyMode, ExcludeNewer, FlatIndex, OptionsBuilder, PrereleaseMode, PylockToml, PythonRequirement, ResolutionMode, ResolverEnvironment, @@ -59,7 +57,7 @@ pub(crate) async fn pip_install( overrides_from_workspace: Vec, build_constraints_from_workspace: Vec, extras: &ExtrasSpecification, - groups: BTreeMap>, + groups: &GroupsSpecification, resolution_mode: ResolutionMode, prerelease_mode: PrereleaseMode, dependency_mode: DependencyMode, @@ -128,7 +126,7 @@ pub(crate) async fn pip_install( constraints, overrides, extras, - groups, + Some(groups), &client_builder, ) .await?; diff --git a/crates/uv/src/commands/pip/operations.rs b/crates/uv/src/commands/pip/operations.rs index 117321c14..809f8bfdc 100644 --- a/crates/uv/src/commands/pip/operations.rs +++ b/crates/uv/src/commands/pip/operations.rs @@ -27,14 +27,14 @@ use uv_distribution_types::{ use uv_fs::Simplified; use uv_install_wheel::LinkMode; use uv_installer::{Plan, Planner, Preparer, SitePackages}; -use uv_normalize::{GroupName, PackageName}; +use uv_normalize::PackageName; use uv_pep508::{MarkerEnvironment, RequirementOrigin}; use uv_platform_tags::Tags; use uv_pypi_types::{Conflicts, ResolverMarkerEnvironment}; use uv_python::{PythonEnvironment, PythonInstallation}; use uv_requirements::{ - LookaheadResolver, NamedRequirementsResolver, RequirementsSource, RequirementsSpecification, - SourceTreeResolver, + GroupsSpecification, LookaheadResolver, NamedRequirementsResolver, RequirementsSource, + RequirementsSpecification, SourceTreeResolver, }; use uv_resolver::{ DependencyMode, Exclusions, FlatIndex, InMemoryIndex, Manifest, Options, Preference, @@ -55,7 +55,7 @@ pub(crate) async fn read_requirements( constraints: &[RequirementsSource], overrides: &[RequirementsSource], extras: &ExtrasSpecification, - groups: BTreeMap>, + groups: Option<&GroupsSpecification>, client_builder: &BaseClientBuilder<'_>, ) -> Result { // If the user requests `extras` but does not provide a valid source (e.g., a `pyproject.toml`), @@ -91,15 +91,11 @@ pub(crate) async fn read_constraints( constraints: &[RequirementsSource], client_builder: &BaseClientBuilder<'_>, ) -> Result, Error> { - Ok(RequirementsSpecification::from_sources( - &[], - constraints, - &[], - BTreeMap::default(), - client_builder, + Ok( + RequirementsSpecification::from_sources(&[], constraints, &[], None, client_builder) + .await? + .constraints, ) - .await? - .constraints) } /// Resolve a set of requirements, similar to running `pip compile`. diff --git a/crates/uv/src/commands/pip/sync.rs b/crates/uv/src/commands/pip/sync.rs index 61999825e..47d180a74 100644 --- a/crates/uv/src/commands/pip/sync.rs +++ b/crates/uv/src/commands/pip/sync.rs @@ -1,4 +1,4 @@ -use std::collections::{BTreeMap, BTreeSet}; +use std::collections::BTreeSet; use std::fmt::Write; use anyhow::{Context, Result}; @@ -92,7 +92,7 @@ pub(crate) async fn pip_sync( // Initialize a few defaults. let overrides = &[]; let extras = ExtrasSpecification::default(); - let groups = BTreeMap::default(); + let groups = None; let upgrade = Upgrade::default(); let resolution_mode = ResolutionMode::default(); let prerelease_mode = PrereleaseMode::default(); diff --git a/crates/uv/src/commands/project/add.rs b/crates/uv/src/commands/project/add.rs index 12535f859..4bf5905d2 100644 --- a/crates/uv/src/commands/project/add.rs +++ b/crates/uv/src/commands/project/add.rs @@ -345,7 +345,7 @@ pub(crate) async fn add( &requirements, &constraints, &[], - BTreeMap::default(), + None, &client_builder, ) .await?; diff --git a/crates/uv/src/commands/tool/install.rs b/crates/uv/src/commands/tool/install.rs index 27f18abe4..12de5fd1f 100644 --- a/crates/uv/src/commands/tool/install.rs +++ b/crates/uv/src/commands/tool/install.rs @@ -1,4 +1,3 @@ -use std::collections::BTreeMap; use std::fmt::Write; use std::str::FromStr; @@ -261,7 +260,7 @@ pub(crate) async fn install( with, constraints, overrides, - BTreeMap::default(), + None, &client_builder, ) .await?; diff --git a/crates/uv/src/commands/tool/run.rs b/crates/uv/src/commands/tool/run.rs index a1faa1153..7c91b9fe9 100644 --- a/crates/uv/src/commands/tool/run.rs +++ b/crates/uv/src/commands/tool/run.rs @@ -1,4 +1,3 @@ -use std::collections::BTreeMap; use std::fmt::Display; use std::fmt::Write; use std::path::Path; @@ -871,7 +870,7 @@ async fn get_or_create_environment( with, constraints, overrides, - BTreeMap::default(), + None, &client_builder, ) .await?; diff --git a/crates/uv/src/lib.rs b/crates/uv/src/lib.rs index 0f6c9465f..6ca03a470 100644 --- a/crates/uv/src/lib.rs +++ b/crates/uv/src/lib.rs @@ -1,5 +1,4 @@ use std::borrow::Cow; -use std::collections::BTreeMap; use std::ffi::OsString; use std::fmt::Write; use std::io::stdout; @@ -36,7 +35,7 @@ use uv_pep440::release_specifiers_to_ranges; use uv_pep508::VersionOrUrl; use uv_pypi_types::{ParsedDirectoryUrl, ParsedUrl}; use uv_python::PythonRequest; -use uv_requirements::RequirementsSource; +use uv_requirements::{GroupsSpecification, RequirementsSource}; use uv_requirements_txt::RequirementsTxtRequirement; use uv_scripts::{Pep723Error, Pep723Item, Pep723ItemRef, Pep723Metadata, Pep723Script}; use uv_settings::{Combine, EnvironmentOptions, FilesystemOptions, Options}; @@ -472,20 +471,10 @@ async fn run(mut cli: Cli) -> Result { .into_iter() .map(RequirementsSource::from_constraints_txt) .collect::, _>>()?; - - let mut groups = BTreeMap::new(); - for group in args.settings.groups { - // If there's no path provided, expect a pyproject.toml in the project-dir - // (Which is typically the current working directory, matching pip's behaviour) - let pyproject_path = group - .path - .clone() - .unwrap_or_else(|| project_dir.join("pyproject.toml")); - groups - .entry(pyproject_path) - .or_insert_with(Vec::new) - .push(group.name.clone()); - } + let groups = GroupsSpecification { + root: project_dir.to_path_buf(), + groups: args.settings.groups, + }; commands::pip_compile( &requirements, @@ -657,20 +646,10 @@ async fn run(mut cli: Cli) -> Result { .into_iter() .map(RequirementsSource::from_overrides_txt) .collect::, _>>()?; - - let mut groups = BTreeMap::new(); - for group in args.settings.groups { - // If there's no path provided, expect a pyproject.toml in the project-dir - // (Which is typically the current working directory, matching pip's behaviour) - let pyproject_path = group - .path - .clone() - .unwrap_or_else(|| project_dir.join("pyproject.toml")); - groups - .entry(pyproject_path) - .or_insert_with(Vec::new) - .push(group.name.clone()); - } + let groups = GroupsSpecification { + root: project_dir.to_path_buf(), + groups: args.settings.groups, + }; // Special-case: any source trees specified on the command-line are automatically // reinstalled. This matches user expectations: `uv pip install .` should always @@ -730,7 +709,7 @@ async fn run(mut cli: Cli) -> Result { args.overrides_from_workspace, args.build_constraints_from_workspace, &args.settings.extras, - groups, + &groups, args.settings.resolution, args.settings.prerelease, args.settings.dependency_mode, From d85a300b5f456a26dc1ac780e7ed8da9d39b7566 Mon Sep 17 00:00:00 2001 From: Matt Norton Date: Sun, 20 Jul 2025 22:27:33 +0100 Subject: [PATCH 096/130] Fix typo in `concepts/projects/config.md` (#14759) --- docs/concepts/projects/config.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/concepts/projects/config.md b/docs/concepts/projects/config.md index 34b62c01a..b2eafd36a 100644 --- a/docs/concepts/projects/config.md +++ b/docs/concepts/projects/config.md @@ -367,9 +367,9 @@ in the deployed environment without a dependency on the originating source code. ## Conflicting dependencies -uv requires resolves all project dependencies together, including optional dependencies ("extras") -and dependency groups. If dependencies declared in one section are not compatible with those in -another section, uv will fail to resolve the requirements of the project with an error. +uv resolves all project dependencies together, including optional dependencies ("extras") and +dependency groups. If dependencies declared in one section are not compatible with those in another +section, uv will fail to resolve the requirements of the project with an error. uv supports explicit declaration of conflicting dependency groups. For example, to declare that the `optional-dependency` groups `extra1` and `extra2` are incompatible: From fcf0bdd3a6675df6be5b5e4c8420f3cdea4d0ce6 Mon Sep 17 00:00:00 2001 From: Charlie Marsh Date: Sun, 20 Jul 2025 17:38:24 -0400 Subject: [PATCH 097/130] Add missing `the` in concept link (#14763) --- docs/getting-started/features.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/getting-started/features.md b/docs/getting-started/features.md index ed34dd8b2..c78f5f560 100644 --- a/docs/getting-started/features.md +++ b/docs/getting-started/features.md @@ -104,6 +104,6 @@ self-update: ## Next steps -Read the [guides](../guides/index.md) for an introduction to each feature, check out +Read the [guides](../guides/index.md) for an introduction to each feature, check out the [concept](../concepts/index.md) pages for in-depth details about uv's features, or learn how to [get help](./help.md) if you run into any problems. From 9923f42c2eb03debc9bf7461b61ea64ae5b081a1 Mon Sep 17 00:00:00 2001 From: Charlie Marsh Date: Sun, 20 Jul 2025 17:38:50 -0400 Subject: [PATCH 098/130] Fix kebab casing of README variants in build backend (#14762) ## Summary In this context, `rename_all` only applies to the variants, not their fields. Closes #14761. --- crates/uv-build-backend/src/metadata.rs | 63 ++++++++++++++++++++++++- 1 file changed, 61 insertions(+), 2 deletions(-) diff --git a/crates/uv-build-backend/src/metadata.rs b/crates/uv-build-backend/src/metadata.rs index 296c76a2b..5997f72b6 100644 --- a/crates/uv-build-backend/src/metadata.rs +++ b/crates/uv-build-backend/src/metadata.rs @@ -703,7 +703,7 @@ struct Project { /// The optional `project.readme` key in a pyproject.toml as specified in /// . #[derive(Deserialize, Debug, Clone)] -#[serde(untagged, rename_all = "kebab-case")] +#[serde(untagged, rename_all_fields = "kebab-case")] pub(crate) enum Readme { /// Relative path to the README. String(PathBuf), @@ -713,7 +713,7 @@ pub(crate) enum Readme { content_type: String, charset: Option, }, - /// The full description of the project as inline value. + /// The full description of the project as an inline value. Text { text: String, content_type: String, @@ -965,6 +965,65 @@ mod tests { "###); } + #[test] + fn readme() { + let temp_dir = TempDir::new().unwrap(); + + fs_err::write( + temp_dir.path().join("Readme.md"), + indoc! {r" + # Foo + + This is the foo library. + "}, + ) + .unwrap(); + + fs_err::write( + temp_dir.path().join("License.txt"), + indoc! {r#" + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, + INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A + PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT + HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF + CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE + OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + "#}, + ) + .unwrap(); + + let contents = indoc! {r#" + # See https://github.com/pypa/sampleproject/blob/main/pyproject.toml for another example + + [project] + name = "hello-world" + version = "0.1.0" + description = "A Python package" + readme = { file = "Readme.md", content-type = "text/markdown" } + requires_python = ">=3.12" + + [build-system] + requires = ["uv_build>=0.4.15,<0.5"] + build-backend = "uv_build" + "# + }; + + let pyproject_toml = PyProjectToml::parse(contents).unwrap(); + let metadata = pyproject_toml.to_metadata(temp_dir.path()).unwrap(); + + assert_snapshot!(metadata.core_metadata_format(), @r" + Metadata-Version: 2.3 + Name: hello-world + Version: 0.1.0 + Summary: A Python package + Description-Content-Type: text/markdown + + # Foo + + This is the foo library. + "); + } + #[test] fn self_extras() { let temp_dir = TempDir::new().unwrap(); From 5e2047b253718bb58d7d373f38e6176d7f154afa Mon Sep 17 00:00:00 2001 From: Charlie Marsh Date: Sun, 20 Jul 2025 18:17:07 -0400 Subject: [PATCH 099/130] Implement `PartialEq` for `OptionSet` (#14765) Closes https://github.com/astral-sh/uv/issues/14737. --- crates/uv-options-metadata/src/lib.rs | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/crates/uv-options-metadata/src/lib.rs b/crates/uv-options-metadata/src/lib.rs index 6e966cfc4..4c0a5c322 100644 --- a/crates/uv-options-metadata/src/lib.rs +++ b/crates/uv-options-metadata/src/lib.rs @@ -69,12 +69,20 @@ impl Display for OptionEntry { /// /// It extracts the options by calling the [`OptionsMetadata::record`] of a type implementing /// [`OptionsMetadata`]. -#[derive(Copy, Clone, Eq, PartialEq)] +#[derive(Copy, Clone)] pub struct OptionSet { record: fn(&mut dyn Visit), doc: fn() -> Option<&'static str>, } +impl PartialEq for OptionSet { + fn eq(&self, other: &Self) -> bool { + std::ptr::fn_addr_eq(self.record, other.record) && std::ptr::fn_addr_eq(self.doc, other.doc) + } +} + +impl Eq for OptionSet {} + impl OptionSet { pub fn of() -> Self where From dbe6a214862d4bd86ff5eecd91921ee77848e610 Mon Sep 17 00:00:00 2001 From: konsti Date: Mon, 21 Jul 2025 00:28:34 +0200 Subject: [PATCH 100/130] Retry request on invalid data error (#14703) I also improved the trace logging. Fixes #14699 --- crates/uv-client/src/base_client.rs | 30 ++++++++++++++++++++++------- 1 file changed, 23 insertions(+), 7 deletions(-) diff --git a/crates/uv-client/src/base_client.rs b/crates/uv-client/src/base_client.rs index 9ddc30e75..d901f57e7 100644 --- a/crates/uv-client/src/base_client.rs +++ b/crates/uv-client/src/base_client.rs @@ -920,18 +920,34 @@ pub fn is_extended_transient_error(err: &dyn Error) -> bool { } // IO Errors may be nested through custom IO errors. + let mut has_io_error = false; for io_err in find_sources::(&err) { - if io_err.kind() == io::ErrorKind::ConnectionReset - || io_err.kind() == io::ErrorKind::UnexpectedEof - || io_err.kind() == io::ErrorKind::BrokenPipe - { - trace!("Retrying error: `ConnectionReset` or `UnexpectedEof`"); + has_io_error = true; + let retryable_io_err_kinds = [ + // https://github.com/astral-sh/uv/issues/12054 + io::ErrorKind::BrokenPipe, + // From reqwest-middleware + io::ErrorKind::ConnectionAborted, + // https://github.com/astral-sh/uv/issues/3514 + io::ErrorKind::ConnectionReset, + // https://github.com/astral-sh/uv/issues/14699 + io::ErrorKind::InvalidData, + // https://github.com/astral-sh/uv/issues/9246 + io::ErrorKind::UnexpectedEof, + ]; + if retryable_io_err_kinds.contains(&io_err.kind()) { + trace!("Retrying error: `{}`", io_err.kind()); return true; } - trace!("Cannot retry IO error: not one of `ConnectionReset` or `UnexpectedEof`"); + trace!( + "Cannot retry IO error `{}`, not a retryable IO error kind", + io_err.kind() + ); } - trace!("Cannot retry error: not an IO error"); + if !has_io_error { + trace!("Cannot retry error: not an extended IO error"); + } false } From a42a2846e662c68785ccc29e583e1bf9227bec85 Mon Sep 17 00:00:00 2001 From: Aria Desires Date: Sun, 20 Jul 2025 18:54:50 -0400 Subject: [PATCH 101/130] Make warnings about masked `[tool.uv]` fields more precise (#14325) This is the second half of #14308 --- crates/uv-settings/src/lib.rs | 254 +++++++++++++++++++++++++++- crates/uv/tests/it/show_settings.rs | 233 ++++++++++++++++++++++++- 2 files changed, 482 insertions(+), 5 deletions(-) diff --git a/crates/uv-settings/src/lib.rs b/crates/uv-settings/src/lib.rs index 84aef8f28..4dd4c392f 100644 --- a/crates/uv-settings/src/lib.rs +++ b/crates/uv-settings/src/lib.rs @@ -120,10 +120,9 @@ impl FilesystemOptions { .ok() .and_then(|content| toml::from_str::(&content).ok()) { - if pyproject.tool.is_some_and(|tool| tool.uv.is_some()) { - warn_user!( - "Found both a `uv.toml` file and a `[tool.uv]` section in an adjacent `pyproject.toml`. The `[tool.uv]` section will be ignored in favor of the `uv.toml` file." - ); + if let Some(options) = pyproject.tool.as_ref().and_then(|tool| tool.uv.as_ref()) + { + warn_uv_toml_masked_fields(options); } } @@ -269,6 +268,253 @@ fn validate_uv_toml(path: &Path, options: &Options) -> Result<(), Error> { Ok(()) } +/// Validate that an [`Options`] contains no fields that `uv.toml` would mask +/// +/// This is essentially the inverse of [`validated_uv_toml`][]. +fn warn_uv_toml_masked_fields(options: &Options) { + let Options { + globals: + GlobalOptions { + required_version, + native_tls, + offline, + no_cache, + cache_dir, + preview, + python_preference, + python_downloads, + concurrent_downloads, + concurrent_builds, + concurrent_installs, + allow_insecure_host, + }, + top_level: + ResolverInstallerOptions { + index, + index_url, + extra_index_url, + no_index, + find_links, + index_strategy, + keyring_provider, + resolution, + prerelease, + fork_strategy, + dependency_metadata, + config_settings, + no_build_isolation, + no_build_isolation_package, + exclude_newer, + link_mode, + compile_bytecode, + no_sources, + upgrade, + upgrade_package, + reinstall, + reinstall_package, + no_build, + no_build_package, + no_binary, + no_binary_package, + }, + install_mirrors: + PythonInstallMirrors { + python_install_mirror, + pypy_install_mirror, + python_downloads_json_url, + }, + publish: + PublishOptions { + publish_url, + trusted_publishing, + check_url, + }, + add: AddOptions { add_bounds }, + pip, + cache_keys, + override_dependencies, + constraint_dependencies, + build_constraint_dependencies, + environments, + required_environments, + conflicts: _, + workspace: _, + sources: _, + dev_dependencies: _, + default_groups: _, + dependency_groups: _, + managed: _, + package: _, + build_backend: _, + } = options; + + let mut masked_fields = vec![]; + + if required_version.is_some() { + masked_fields.push("required-version"); + } + if native_tls.is_some() { + masked_fields.push("native-tls"); + } + if offline.is_some() { + masked_fields.push("offline"); + } + if no_cache.is_some() { + masked_fields.push("no-cache"); + } + if cache_dir.is_some() { + masked_fields.push("cache-dir"); + } + if preview.is_some() { + masked_fields.push("preview"); + } + if python_preference.is_some() { + masked_fields.push("python-preference"); + } + if python_downloads.is_some() { + masked_fields.push("python-downloads"); + } + if concurrent_downloads.is_some() { + masked_fields.push("concurrent-downloads"); + } + if concurrent_builds.is_some() { + masked_fields.push("concurrent-builds"); + } + if concurrent_installs.is_some() { + masked_fields.push("concurrent-installs"); + } + if allow_insecure_host.is_some() { + masked_fields.push("allow-insecure-host"); + } + if index.is_some() { + masked_fields.push("index"); + } + if index_url.is_some() { + masked_fields.push("index-url"); + } + if extra_index_url.is_some() { + masked_fields.push("extra-index-url"); + } + if no_index.is_some() { + masked_fields.push("no-index"); + } + if find_links.is_some() { + masked_fields.push("find-links"); + } + if index_strategy.is_some() { + masked_fields.push("index-strategy"); + } + if keyring_provider.is_some() { + masked_fields.push("keyring-provider"); + } + if resolution.is_some() { + masked_fields.push("resolution"); + } + if prerelease.is_some() { + masked_fields.push("prerelease"); + } + if fork_strategy.is_some() { + masked_fields.push("fork-strategy"); + } + if dependency_metadata.is_some() { + masked_fields.push("dependency-metadata"); + } + if config_settings.is_some() { + masked_fields.push("config-settings"); + } + if no_build_isolation.is_some() { + masked_fields.push("no-build-isolation"); + } + if no_build_isolation_package.is_some() { + masked_fields.push("no-build-isolation-package"); + } + if exclude_newer.is_some() { + masked_fields.push("exclude-newer"); + } + if link_mode.is_some() { + masked_fields.push("link-mode"); + } + if compile_bytecode.is_some() { + masked_fields.push("compile-bytecode"); + } + if no_sources.is_some() { + masked_fields.push("no-sources"); + } + if upgrade.is_some() { + masked_fields.push("upgrade"); + } + if upgrade_package.is_some() { + masked_fields.push("upgrade-package"); + } + if reinstall.is_some() { + masked_fields.push("reinstall"); + } + if reinstall_package.is_some() { + masked_fields.push("reinstall-package"); + } + if no_build.is_some() { + masked_fields.push("no-build"); + } + if no_build_package.is_some() { + masked_fields.push("no-build-package"); + } + if no_binary.is_some() { + masked_fields.push("no-binary"); + } + if no_binary_package.is_some() { + masked_fields.push("no-binary-package"); + } + if python_install_mirror.is_some() { + masked_fields.push("python-install-mirror"); + } + if pypy_install_mirror.is_some() { + masked_fields.push("pypy-install-mirror"); + } + if python_downloads_json_url.is_some() { + masked_fields.push("python-downloads-json-url"); + } + if publish_url.is_some() { + masked_fields.push("publish-url"); + } + if trusted_publishing.is_some() { + masked_fields.push("trusted-publishing"); + } + if check_url.is_some() { + masked_fields.push("check-url"); + } + if add_bounds.is_some() { + masked_fields.push("add-bounds"); + } + if pip.is_some() { + masked_fields.push("pip"); + } + if cache_keys.is_some() { + masked_fields.push("cache_keys"); + } + if override_dependencies.is_some() { + masked_fields.push("override-dependencies"); + } + if constraint_dependencies.is_some() { + masked_fields.push("constraint-dependencies"); + } + if build_constraint_dependencies.is_some() { + masked_fields.push("build-constraint-dependencies"); + } + if environments.is_some() { + masked_fields.push("environments"); + } + if required_environments.is_some() { + masked_fields.push("required-environments"); + } + if !masked_fields.is_empty() { + let field_listing = masked_fields.join("\n- "); + warn_user!( + "Found both a `uv.toml` file and a `[tool.uv]` section in an adjacent `pyproject.toml`. The following fields from `[tool.uv]` will be ignored in favor of the `uv.toml` file:\n- {}", + field_listing, + ); + } +} + #[derive(thiserror::Error, Debug)] pub enum Error { #[error(transparent)] diff --git a/crates/uv/tests/it/show_settings.rs b/crates/uv/tests/it/show_settings.rs index 500e78965..c88f8b739 100644 --- a/crates/uv/tests/it/show_settings.rs +++ b/crates/uv/tests/it/show_settings.rs @@ -3511,6 +3511,8 @@ fn resolve_poetry_toml() -> anyhow::Result<()> { } /// Read from both a `uv.toml` and `pyproject.toml` file in the current directory. +/// +/// Some fields in `[tool.uv]` are masked by `uv.toml` being defined, and should be warned about. #[test] #[cfg_attr( windows, @@ -3535,6 +3537,10 @@ fn resolve_both() -> anyhow::Result<()> { name = "example" version = "0.0.0" + [tool.uv] + offline = true + dev-dependencies = ["pytest"] + [tool.uv.pip] resolution = "highest" extra-index-url = ["https://test.pypi.org/simple"] @@ -3724,7 +3730,232 @@ fn resolve_both() -> anyhow::Result<()> { } ----- stderr ----- - warning: Found both a `uv.toml` file and a `[tool.uv]` section in an adjacent `pyproject.toml`. The `[tool.uv]` section will be ignored in favor of the `uv.toml` file. + warning: Found both a `uv.toml` file and a `[tool.uv]` section in an adjacent `pyproject.toml`. The following fields from `[tool.uv]` will be ignored in favor of the `uv.toml` file: + - offline + - pip + "# + ); + + Ok(()) +} + +/// Read from both a `uv.toml` and `pyproject.toml` file in the current directory. +/// +/// But the fields `[tool.uv]` defines aren't allowed in `uv.toml` so there's no warning. +#[test] +#[cfg_attr( + windows, + ignore = "Configuration tests are not yet supported on Windows" +)] +fn resolve_both_special_fields() -> anyhow::Result<()> { + let context = TestContext::new("3.12"); + + // Write a `uv.toml` file to the directory. + let config = context.temp_dir.child("uv.toml"); + config.write_str(indoc::indoc! {r#" + [pip] + resolution = "lowest-direct" + generate-hashes = true + index-url = "https://pypi.org/simple" + "#})?; + + // Write a `pyproject.toml` file to the directory + let config = context.temp_dir.child("pyproject.toml"); + config.write_str(indoc::indoc! {r#" + [project] + name = "example" + version = "0.0.0" + + [dependency-groups] + mygroup = ["iniconfig"] + + [tool.uv] + dev-dependencies = ["pytest"] + + [tool.uv.dependency-groups] + mygroup = {requires-python = ">=3.12"} + "#})?; + + let requirements_in = context.temp_dir.child("requirements.in"); + requirements_in.write_str("anyio>3.0.0")?; + + // Resolution should succeed, but warn that the `pip` section in `pyproject.toml` is ignored. + uv_snapshot!(context.filters(), add_shared_args(context.pip_compile(), context.temp_dir.path()) + .arg("--show-settings") + .arg("requirements.in"), @r#" + success: true + exit_code: 0 + ----- stdout ----- + GlobalSettings { + required_version: None, + quiet: 0, + verbose: 0, + color: Auto, + network_settings: NetworkSettings { + connectivity: Online, + native_tls: false, + allow_insecure_host: [], + }, + concurrency: Concurrency { + downloads: 50, + builds: 16, + installs: 8, + }, + show_settings: true, + preview: Disabled, + python_preference: Managed, + python_downloads: Automatic, + no_progress: false, + installer_metadata: true, + } + CacheSettings { + no_cache: false, + cache_dir: Some( + "[CACHE_DIR]/", + ), + } + PipCompileSettings { + format: None, + src_file: [ + "requirements.in", + ], + constraints: [], + overrides: [], + build_constraints: [], + constraints_from_workspace: [], + overrides_from_workspace: [], + build_constraints_from_workspace: [], + environments: SupportedEnvironments( + [], + ), + refresh: None( + Timestamp( + SystemTime { + tv_sec: [TIME], + tv_nsec: [TIME], + }, + ), + ), + settings: PipSettings { + index_locations: IndexLocations { + indexes: [ + Index { + name: None, + url: Pypi( + VerbatimUrl { + url: DisplaySafeUrl { + scheme: "https", + cannot_be_a_base: false, + username: "", + password: None, + host: Some( + Domain( + "pypi.org", + ), + ), + port: None, + path: "/simple", + query: None, + fragment: None, + }, + given: Some( + "https://pypi.org/simple", + ), + }, + ), + explicit: false, + default: true, + origin: None, + format: Simple, + publish_url: None, + authenticate: Auto, + ignore_error_codes: None, + }, + ], + flat_index: [], + no_index: false, + }, + python: None, + install_mirrors: PythonInstallMirrors { + python_install_mirror: None, + pypy_install_mirror: None, + python_downloads_json_url: None, + }, + system: false, + extras: ExtrasSpecification( + ExtrasSpecificationInner { + include: Some( + [], + ), + exclude: [], + only_extras: false, + history: ExtrasSpecificationHistory { + extra: [], + only_extra: [], + no_extra: [], + all_extras: false, + no_default_extras: false, + defaults: List( + [], + ), + }, + }, + ), + groups: [], + break_system_packages: false, + target: None, + prefix: None, + index_strategy: FirstIndex, + keyring_provider: Disabled, + torch_backend: None, + no_build_isolation: false, + no_build_isolation_package: [], + build_options: BuildOptions { + no_binary: None, + no_build: None, + }, + allow_empty_requirements: false, + strict: false, + dependency_mode: Transitive, + resolution: LowestDirect, + prerelease: IfNecessaryOrExplicit, + fork_strategy: RequiresPython, + dependency_metadata: DependencyMetadata( + {}, + ), + output_file: None, + no_strip_extras: false, + no_strip_markers: false, + no_annotate: false, + no_header: false, + custom_compile_command: None, + generate_hashes: true, + config_setting: ConfigSettings( + {}, + ), + python_version: None, + python_platform: None, + universal: false, + exclude_newer: None, + no_emit_package: [], + emit_index_url: false, + emit_find_links: false, + emit_build_options: false, + emit_marker_expression: false, + emit_index_annotation: false, + annotation_style: Split, + link_mode: Clone, + compile_bytecode: false, + sources: Enabled, + hash_checking: Some( + Verify, + ), + upgrade: None, + reinstall: None, + }, + } + + ----- stderr ----- "# ); From 0487034e91bbbbbe88def7ebdaf58abf454f8cb8 Mon Sep 17 00:00:00 2001 From: Charlie Marsh Date: Sun, 20 Jul 2025 20:28:31 -0400 Subject: [PATCH 102/130] Fix bad merge in `warn_uv_toml_masked_fields` (#14767) ## Summary The branch got stale and merged without flagging that this no longer compiles. --- crates/uv-settings/src/lib.rs | 4 ++++ crates/uv/tests/it/show_settings.rs | 4 ++++ 2 files changed, 8 insertions(+) diff --git a/crates/uv-settings/src/lib.rs b/crates/uv-settings/src/lib.rs index 4dd4c392f..64f160aa3 100644 --- a/crates/uv-settings/src/lib.rs +++ b/crates/uv-settings/src/lib.rs @@ -302,6 +302,7 @@ fn warn_uv_toml_masked_fields(options: &Options) { fork_strategy, dependency_metadata, config_settings, + config_settings_package, no_build_isolation, no_build_isolation_package, exclude_newer, @@ -422,6 +423,9 @@ fn warn_uv_toml_masked_fields(options: &Options) { if config_settings.is_some() { masked_fields.push("config-settings"); } + if config_settings_package.is_some() { + masked_fields.push("config-settings-package"); + } if no_build_isolation.is_some() { masked_fields.push("no-build-isolation"); } diff --git a/crates/uv/tests/it/show_settings.rs b/crates/uv/tests/it/show_settings.rs index c88f8b739..293b437d6 100644 --- a/crates/uv/tests/it/show_settings.rs +++ b/crates/uv/tests/it/show_settings.rs @@ -3870,6 +3870,7 @@ fn resolve_both_special_fields() -> anyhow::Result<()> { publish_url: None, authenticate: Auto, ignore_error_codes: None, + cache_control: None, }, ], flat_index: [], @@ -3933,6 +3934,9 @@ fn resolve_both_special_fields() -> anyhow::Result<()> { config_setting: ConfigSettings( {}, ), + config_settings_package: PackageConfigSettings( + {}, + ), python_version: None, python_platform: None, universal: false, From a4c7bcf3ca5698e87388030e4d32749ed89bde35 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Sun, 20 Jul 2025 21:24:09 -0400 Subject: [PATCH 103/130] Update aws-actions/configure-aws-credentials digest to a159d7b (#14768) --- .github/workflows/ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 4fb67346e..06d578d28 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -1594,7 +1594,7 @@ jobs: run: chmod +x ./uv - name: "Configure AWS credentials" - uses: aws-actions/configure-aws-credentials@f503a1870408dcf2c35d5c2b8a68e69211042c7d + uses: aws-actions/configure-aws-credentials@a159d7bb5354cf786f855f2f5d1d8d768d9a08d1 with: aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} From 0951ebc55ceec102341a6e0b038baaa2ecd5229e Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Sun, 20 Jul 2025 21:24:22 -0400 Subject: [PATCH 104/130] Update google-github-actions/auth digest to 140bb51 (#14769) --- .github/workflows/ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 06d578d28..2cd11706a 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -1613,7 +1613,7 @@ jobs: - name: "Authenticate with GCP" id: "auth" - uses: "google-github-actions/auth@0920706a19e9d22c3d0da43d1db5939c6ad837a8" + uses: "google-github-actions/auth@140bb5113ffb6b65a7e9b937a81fa96cf5064462" with: credentials_json: "${{ secrets.GCP_SERVICE_ACCOUNT_KEY }}" From 2c54a4acfba3282b649d7bc7cb7b72ac8b19a297 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Sun, 20 Jul 2025 21:24:28 -0400 Subject: [PATCH 105/130] Update google-github-actions/setup-gcloud digest to 6a7c903 (#14770) --- .github/workflows/ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 2cd11706a..ff8212d91 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -1618,7 +1618,7 @@ jobs: credentials_json: "${{ secrets.GCP_SERVICE_ACCOUNT_KEY }}" - name: "Set up GCP SDK" - uses: "google-github-actions/setup-gcloud@a8b58010a5b2a061afd605f50e88629c9ec7536b" + uses: "google-github-actions/setup-gcloud@6a7c903a70c8625ed6700fa299f5ddb4ca6022e9" - name: "Get GCP Artifact Registry token" id: get_token From 51336acd2a070528515dab9cd1c1823be6396a76 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Sun, 20 Jul 2025 21:24:34 -0400 Subject: [PATCH 106/130] Update pre-commit hook astral-sh/ruff-pre-commit to v0.12.4 (#14771) --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 5476c9dc8..3a8e4a39a 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -42,7 +42,7 @@ repos: types_or: [yaml, json5] - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.12.3 + rev: v0.12.4 hooks: - id: ruff-format - id: ruff From 3a949e0e5312a6e616b94dc39289d8a745916fdc Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Sun, 20 Jul 2025 21:24:46 -0400 Subject: [PATCH 107/130] Update Rust crate rustix to v1.0.8 (#14772) --- Cargo.lock | 26 +++++++++++++------------- 1 file changed, 13 insertions(+), 13 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 77dfad413..e1bc5dbef 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1150,7 +1150,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "33d852cb9b869c2a9b3df2f71a3074817f01e1844f839a144f5fcef059a4eb5d" dependencies = [ "libc", - "windows-sys 0.52.0", + "windows-sys 0.59.0", ] [[package]] @@ -1989,7 +1989,7 @@ checksum = "e19b23d53f35ce9f56aebc7d1bb4e6ac1e9c0db7ac85c8d1760c04379edced37" dependencies = [ "hermit-abi 0.4.0", "libc", - "windows-sys 0.52.0", + "windows-sys 0.59.0", ] [[package]] @@ -2049,7 +2049,7 @@ dependencies = [ "portable-atomic", "portable-atomic-util", "serde", - "windows-sys 0.52.0", + "windows-sys 0.59.0", ] [[package]] @@ -2902,7 +2902,7 @@ dependencies = [ "once_cell", "socket2", "tracing", - "windows-sys 0.52.0", + "windows-sys 0.59.0", ] [[package]] @@ -3033,7 +3033,7 @@ checksum = "78c81d000a2c524133cc00d2f92f019d399e57906c3b7119271a2495354fe895" dependencies = [ "cfg-if", "libc", - "rustix 1.0.7", + "rustix 1.0.8", "windows 0.61.1", ] @@ -3334,20 +3334,20 @@ dependencies = [ "errno", "libc", "linux-raw-sys 0.4.15", - "windows-sys 0.52.0", + "windows-sys 0.59.0", ] [[package]] name = "rustix" -version = "1.0.7" +version = "1.0.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c71e83d6afe7ff64890ec6b71d6a69bb8a610ab78ce364b3352876bb4c801266" +checksum = "11181fbabf243db407ef8df94a6ce0b2f9a733bd8be4ad02b4eda9602296cac8" dependencies = [ "bitflags 2.9.1", "errno", "libc", "linux-raw-sys 0.9.2", - "windows-sys 0.52.0", + "windows-sys 0.59.0", ] [[package]] @@ -3929,8 +3929,8 @@ dependencies = [ "fastrand", "getrandom 0.3.1", "once_cell", - "rustix 1.0.7", - "windows-sys 0.52.0", + "rustix 1.0.8", + "windows-sys 0.59.0", ] [[package]] @@ -5289,7 +5289,7 @@ dependencies = [ "junction", "path-slash", "percent-encoding", - "rustix 1.0.7", + "rustix 1.0.8", "same-file", "schemars", "serde", @@ -6284,7 +6284,7 @@ checksum = "d3fabb953106c3c8eea8306e4393700d7657561cb43122571b172bbfb7c7ba1d" dependencies = [ "env_home", "regex", - "rustix 1.0.7", + "rustix 1.0.8", "winsafe", ] From b6d12c1b84f666cacf18d1501b701f9ee63c410b Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Sun, 20 Jul 2025 21:24:56 -0400 Subject: [PATCH 108/130] Update Rust crate serde_json to v1.0.141 (#14773) --- Cargo.lock | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index e1bc5dbef..7b73f29d7 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3584,9 +3584,9 @@ dependencies = [ [[package]] name = "serde_json" -version = "1.0.140" +version = "1.0.141" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "20068b6e96dc6c9bd23e01df8827e6c7e1f2fddd43c21810382803c136b99373" +checksum = "30b9eff21ebe718216c6ec64e1d9ac57087aad11efc64e32002bce4a0d4c03d3" dependencies = [ "itoa", "memchr", From abcd03bc0ede8bc19566b90d0c059709978e6748 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Sun, 20 Jul 2025 21:25:01 -0400 Subject: [PATCH 109/130] Update astral-sh/setup-uv action to v6.4.1 (#14774) --- .github/workflows/ci.yml | 10 +++++----- .github/workflows/publish-pypi.yml | 4 ++-- .github/workflows/sync-python-releases.yml | 2 +- 3 files changed, 8 insertions(+), 8 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index ff8212d91..a8be5efe0 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -82,7 +82,7 @@ jobs: run: rustup component add rustfmt - name: "Install uv" - uses: astral-sh/setup-uv@bd01e18f51369d5a26f1651c3cb451d3417e3bba # v6.3.1 + uses: astral-sh/setup-uv@7edac99f961f18b581bbd960d59d049f04c0002f # v6.4.1 - name: "rustfmt" run: cargo fmt --all --check @@ -213,7 +213,7 @@ jobs: - name: "Install Rust toolchain" run: rustup show - - uses: astral-sh/setup-uv@bd01e18f51369d5a26f1651c3cb451d3417e3bba # v6.3.1 + - uses: astral-sh/setup-uv@7edac99f961f18b581bbd960d59d049f04c0002f # v6.4.1 - name: "Install required Python versions" run: uv python install @@ -249,7 +249,7 @@ jobs: - name: "Install Rust toolchain" run: rustup show - - uses: astral-sh/setup-uv@bd01e18f51369d5a26f1651c3cb451d3417e3bba # v6.3.1 + - uses: astral-sh/setup-uv@7edac99f961f18b581bbd960d59d049f04c0002f # v6.4.1 - name: "Install required Python versions" run: uv python install @@ -286,7 +286,7 @@ jobs: run: | Copy-Item -Path "${{ github.workspace }}" -Destination "${{ env.UV_WORKSPACE }}" -Recurse - - uses: astral-sh/setup-uv@bd01e18f51369d5a26f1651c3cb451d3417e3bba # v6.3.1 + - uses: astral-sh/setup-uv@7edac99f961f18b581bbd960d59d049f04c0002f # v6.4.1 - name: "Install required Python versions" run: uv python install @@ -439,7 +439,7 @@ jobs: - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: fetch-depth: 0 - - uses: astral-sh/setup-uv@bd01e18f51369d5a26f1651c3cb451d3417e3bba # v6.3.1 + - uses: astral-sh/setup-uv@7edac99f961f18b581bbd960d59d049f04c0002f # v6.4.1 - uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0 - name: "Add SSH key" if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }} diff --git a/.github/workflows/publish-pypi.yml b/.github/workflows/publish-pypi.yml index e4435ff17..f6e4b1b4a 100644 --- a/.github/workflows/publish-pypi.yml +++ b/.github/workflows/publish-pypi.yml @@ -22,7 +22,7 @@ jobs: id-token: write steps: - name: "Install uv" - uses: astral-sh/setup-uv@bd01e18f51369d5a26f1651c3cb451d3417e3bba # v6.3.1 + uses: astral-sh/setup-uv@7edac99f961f18b581bbd960d59d049f04c0002f # v6.4.1 - uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0 with: pattern: wheels_uv-* @@ -43,7 +43,7 @@ jobs: id-token: write steps: - name: "Install uv" - uses: astral-sh/setup-uv@bd01e18f51369d5a26f1651c3cb451d3417e3bba # v6.3.1 + uses: astral-sh/setup-uv@7edac99f961f18b581bbd960d59d049f04c0002f # v6.4.1 - uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0 with: pattern: wheels_uv_build-* diff --git a/.github/workflows/sync-python-releases.yml b/.github/workflows/sync-python-releases.yml index 166458507..bbc9e7b07 100644 --- a/.github/workflows/sync-python-releases.yml +++ b/.github/workflows/sync-python-releases.yml @@ -17,7 +17,7 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - - uses: astral-sh/setup-uv@bd01e18f51369d5a26f1651c3cb451d3417e3bba # v6.3.1 + - uses: astral-sh/setup-uv@7edac99f961f18b581bbd960d59d049f04c0002f # v6.4.1 with: version: "latest" enable-cache: true From e0feed8f9e1a9735c2e74e8553692362077fa566 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Sun, 20 Jul 2025 21:54:38 -0400 Subject: [PATCH 110/130] Update taiki-e/install-action action to v2.56.19 (#14777) --- .github/workflows/ci.yml | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index a8be5efe0..0e4afd098 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -188,7 +188,7 @@ jobs: steps: - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - name: "Install cargo shear" - uses: taiki-e/install-action@7b20dfd705618832f20d29066e34aa2f2f6194c2 # v2.52.8 + uses: taiki-e/install-action@c99cc51b309eee71a866715cfa08c922f11cf898 # v2.56.19 with: tool: cargo-shear - run: cargo shear @@ -218,7 +218,7 @@ jobs: run: uv python install - name: "Install cargo nextest" - uses: taiki-e/install-action@7b20dfd705618832f20d29066e34aa2f2f6194c2 # v2.52.8 + uses: taiki-e/install-action@c99cc51b309eee71a866715cfa08c922f11cf898 # v2.56.19 with: tool: cargo-nextest @@ -254,7 +254,7 @@ jobs: run: uv python install - name: "Install cargo nextest" - uses: taiki-e/install-action@7b20dfd705618832f20d29066e34aa2f2f6194c2 # v2.52.8 + uses: taiki-e/install-action@c99cc51b309eee71a866715cfa08c922f11cf898 # v2.56.19 with: tool: cargo-nextest @@ -299,7 +299,7 @@ jobs: run: rustup show - name: "Install cargo nextest" - uses: taiki-e/install-action@7b20dfd705618832f20d29066e34aa2f2f6194c2 # v2.52.8 + uses: taiki-e/install-action@c99cc51b309eee71a866715cfa08c922f11cf898 # v2.56.19 with: tool: cargo-nextest @@ -352,7 +352,7 @@ jobs: rustup component add rust-src --target ${{ matrix.target-arch }}-pc-windows-msvc - name: "Install cargo-bloat" - uses: taiki-e/install-action@7b20dfd705618832f20d29066e34aa2f2f6194c2 # v2.52.8 + uses: taiki-e/install-action@c99cc51b309eee71a866715cfa08c922f11cf898 # v2.56.19 with: tool: cargo-bloat @@ -2516,7 +2516,7 @@ jobs: run: rustup show - name: "Install codspeed" - uses: taiki-e/install-action@7b20dfd705618832f20d29066e34aa2f2f6194c2 # v2.52.8 + uses: taiki-e/install-action@c99cc51b309eee71a866715cfa08c922f11cf898 # v2.56.19 with: tool: cargo-codspeed @@ -2553,7 +2553,7 @@ jobs: run: rustup show - name: "Install codspeed" - uses: taiki-e/install-action@7b20dfd705618832f20d29066e34aa2f2f6194c2 # v2.52.8 + uses: taiki-e/install-action@c99cc51b309eee71a866715cfa08c922f11cf898 # v2.56.19 with: tool: cargo-codspeed From a049ba78fcf34b0c7314f111498f6125d2fd185c Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Sun, 20 Jul 2025 21:54:44 -0400 Subject: [PATCH 111/130] Update uraimo/run-on-arch-action action to v3 (#14778) --- .github/workflows/build-binaries.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/build-binaries.yml b/.github/workflows/build-binaries.yml index ccd3ef3ee..b8d245c1b 100644 --- a/.github/workflows/build-binaries.yml +++ b/.github/workflows/build-binaries.yml @@ -718,7 +718,7 @@ jobs: manylinux: auto docker-options: ${{ matrix.platform.maturin_docker_options }} args: --release --locked --out dist --features self-update - - uses: uraimo/run-on-arch-action@ac33288c3728ca72563c97b8b88dda5a65a84448 # v2 + - uses: uraimo/run-on-arch-action@d94c13912ea685de38fccc1109385b83fd79427d # v3.0.1 name: "Test wheel" with: arch: ${{ matrix.platform.arch }} @@ -767,7 +767,7 @@ jobs: manylinux: auto docker-options: ${{ matrix.platform.maturin_docker_options }} args: --profile minimal-size --locked --out crates/uv-build/dist -m crates/uv-build/Cargo.toml - - uses: uraimo/run-on-arch-action@ac33288c3728ca72563c97b8b88dda5a65a84448 # v2 + - uses: uraimo/run-on-arch-action@d94c13912ea685de38fccc1109385b83fd79427d # v3.0.1 name: "Test wheel uv-build" with: arch: ${{ matrix.platform.arch }} From 7c2819d1f63b7a2c5b1aa3b9acee032b5bf1d725 Mon Sep 17 00:00:00 2001 From: Zanie Blue Date: Mon, 21 Jul 2025 04:48:38 -0500 Subject: [PATCH 112/130] Match `--bounds` formatting for `uv_build` bounds in `uv init` (#14731) Closes #14724 https://chatgpt.com/codex/tasks/task_e_687a53ba646c8331baa4140c5b2bec70 --------- Co-authored-by: konstin --- crates/uv-build-backend/src/lib.rs | 28 ++++++++++----------- crates/uv-build-backend/src/metadata.rs | 12 ++++----- crates/uv/src/commands/project/init.rs | 9 ++++++- scripts/packages/built-by-uv/pyproject.toml | 2 +- 4 files changed, 29 insertions(+), 22 deletions(-) diff --git a/crates/uv-build-backend/src/lib.rs b/crates/uv-build-backend/src/lib.rs index 5e0efd6d5..5800d04d2 100644 --- a/crates/uv-build-backend/src/lib.rs +++ b/crates/uv-build-backend/src/lib.rs @@ -680,7 +680,7 @@ mod tests { license = { file = "license.txt" } [build-system] - requires = ["uv_build>=0.5.15,<0.6"] + requires = ["uv_build>=0.5.15,<0.6.0"] build-backend = "uv_build" "# }, @@ -748,7 +748,7 @@ mod tests { version = "1.0.0" [build-system] - requires = ["uv_build>=0.5.15,<0.6"] + requires = ["uv_build>=0.5.15,<0.6.0"] build-backend = "uv_build" "# }, @@ -812,7 +812,7 @@ mod tests { version = "1.0.0" [build-system] - requires = ["uv_build>=0.5.15,<0.6"] + requires = ["uv_build>=0.5.15,<0.6.0"] build-backend = "uv_build" [tool.uv.build-backend] @@ -854,7 +854,7 @@ mod tests { version = "1.0.0" [build-system] - requires = ["uv_build>=0.5.15,<0.6"] + requires = ["uv_build>=0.5.15,<0.6.0"] build-backend = "uv_build" [tool.uv.build-backend] @@ -879,7 +879,7 @@ mod tests { version = "1.0.0" [build-system] - requires = ["uv_build>=0.5.15,<0.6"] + requires = ["uv_build>=0.5.15,<0.6.0"] build-backend = "uv_build" [tool.uv.build-backend] @@ -928,7 +928,7 @@ mod tests { version = "1.0.0" [build-system] - requires = ["uv_build>=0.5.15,<0.6"] + requires = ["uv_build>=0.5.15,<0.6.0"] build-backend = "uv_build" [tool.uv.build-backend] @@ -959,7 +959,7 @@ mod tests { version = "1.0.0" [build-system] - requires = ["uv_build>=0.5.15,<0.6"] + requires = ["uv_build>=0.5.15,<0.6.0"] build-backend = "uv_build" "# }; @@ -1010,7 +1010,7 @@ mod tests { version = "1.0.0" [build-system] - requires = ["uv_build>=0.5.15,<0.6"] + requires = ["uv_build>=0.5.15,<0.6.0"] build-backend = "uv_build" [tool.uv.build-backend] @@ -1036,7 +1036,7 @@ mod tests { module-name = "simple_namespace.part" [build-system] - requires = ["uv_build>=0.5.15,<0.6"] + requires = ["uv_build>=0.5.15,<0.6.0"] build-backend = "uv_build" "# }; @@ -1104,7 +1104,7 @@ mod tests { namespace = true [build-system] - requires = ["uv_build>=0.5.15,<0.6"] + requires = ["uv_build>=0.5.15,<0.6.0"] build-backend = "uv_build" "# }; @@ -1127,7 +1127,7 @@ mod tests { namespace = true [build-system] - requires = ["uv_build>=0.5.15,<0.6"] + requires = ["uv_build>=0.5.15,<0.6.0"] build-backend = "uv_build" "# }; @@ -1188,7 +1188,7 @@ mod tests { namespace = true [build-system] - requires = ["uv_build>=0.5.15,<0.6"] + requires = ["uv_build>=0.5.15,<0.6.0"] build-backend = "uv_build" "# }; @@ -1211,7 +1211,7 @@ mod tests { module-name = "cloud-stubs.db.schema" [build-system] - requires = ["uv_build>=0.5.15,<0.6"] + requires = ["uv_build>=0.5.15,<0.6.0"] build-backend = "uv_build" "# }; @@ -1261,7 +1261,7 @@ mod tests { module-name = ["foo", "simple_namespace.part_a", "simple_namespace.part_b"] [build-system] - requires = ["uv_build>=0.5.15,<0.6"] + requires = ["uv_build>=0.5.15,<0.6.0"] build-backend = "uv_build" "# }; diff --git a/crates/uv-build-backend/src/metadata.rs b/crates/uv-build-backend/src/metadata.rs index 5997f72b6..d224fd788 100644 --- a/crates/uv-build-backend/src/metadata.rs +++ b/crates/uv-build-backend/src/metadata.rs @@ -171,7 +171,7 @@ impl PyProjectToml { /// /// ```toml /// [build-system] - /// requires = ["uv_build>=0.4.15,<0.5"] + /// requires = ["uv_build>=0.4.15,<0.5.0"] /// build-backend = "uv_build" /// ``` pub fn check_build_system(&self, uv_version: &str) -> Vec { @@ -826,7 +826,7 @@ mod tests { {payload} [build-system] - requires = ["uv_build>=0.4.15,<0.5"] + requires = ["uv_build>=0.4.15,<0.5.0"] build-backend = "uv_build" "# } @@ -909,7 +909,7 @@ mod tests { foo-bar = "foo:bar" [build-system] - requires = ["uv_build>=0.4.15,<0.5"] + requires = ["uv_build>=0.4.15,<0.5.0"] build-backend = "uv_build" "# }; @@ -1095,7 +1095,7 @@ mod tests { foo-bar = "foo:bar" [build-system] - requires = ["uv_build>=0.4.15,<0.5"] + requires = ["uv_build>=0.4.15,<0.5.0"] build-backend = "uv_build" "# }; @@ -1194,7 +1194,7 @@ mod tests { version = "0.1.0" [build-system] - requires = ["uv_build>=0.4.15,<0.5", "wheel"] + requires = ["uv_build>=0.4.15,<0.5.0", "wheel"] build-backend = "uv_build" "#}; let pyproject_toml = PyProjectToml::parse(contents).unwrap(); @@ -1230,7 +1230,7 @@ mod tests { version = "0.1.0" [build-system] - requires = ["uv_build>=0.4.15,<0.5"] + requires = ["uv_build>=0.4.15,<0.5.0"] build-backend = "setuptools" "#}; let pyproject_toml = PyProjectToml::parse(contents).unwrap(); diff --git a/crates/uv/src/commands/project/init.rs b/crates/uv/src/commands/project/init.rs index 4fd79b1c2..9ba2a434d 100644 --- a/crates/uv/src/commands/project/init.rs +++ b/crates/uv/src/commands/project/init.rs @@ -1,6 +1,7 @@ use anyhow::{Context, Result, anyhow}; use owo_colors::OwoColorize; use std::fmt::Write; +use std::iter; use std::path::{Path, PathBuf}; use std::process::{Command, Stdio}; use std::str::FromStr; @@ -944,7 +945,13 @@ fn pyproject_build_system(package: &PackageName, build_backend: ProjectBuildBack min_version.release()[0] == 0, "migrate to major version bumps" ); - let max_version = Version::new([0, min_version.release()[1] + 1]); + let max_version = Version::new( + [0, min_version.release()[1] + 1] + .into_iter() + // Add trailing zeroes to match the version length, to use the same style + // as `--bounds`. + .chain(iter::repeat_n(0, min_version.release().len() - 2)), + ); indoc::formatdoc! {r#" [build-system] requires = ["uv_build>={min_version},<{max_version}"] diff --git a/scripts/packages/built-by-uv/pyproject.toml b/scripts/packages/built-by-uv/pyproject.toml index b1914e071..b95f9862f 100644 --- a/scripts/packages/built-by-uv/pyproject.toml +++ b/scripts/packages/built-by-uv/pyproject.toml @@ -24,5 +24,5 @@ data = "assets" headers = "header" [build-system] -requires = ["uv_build>=0.8,<0.9"] +requires = ["uv_build>=0.8.0,<0.9.0"] build-backend = "uv_build" From 98d6ab6632a4ac06211eb2dbd18cd588c653b069 Mon Sep 17 00:00:00 2001 From: Jo <10510431+j178@users.noreply.github.com> Date: Mon, 21 Jul 2025 18:22:45 +0800 Subject: [PATCH 113/130] Improve `CPythonFinder._parse_download_url` a bit (#14780) ## Summary Rename `_parse_download_url` to `_parse_download_asset` and move the `asset['digest']` logic into it. ## Test Plan ```console uv run ./crates/uv-python/fetch-download-metadata.py ``` --- crates/uv-python/fetch-download-metadata.py | 24 +++++++++------------ 1 file changed, 10 insertions(+), 14 deletions(-) diff --git a/crates/uv-python/fetch-download-metadata.py b/crates/uv-python/fetch-download-metadata.py index 3dd0817f3..f43349e50 100755 --- a/crates/uv-python/fetch-download-metadata.py +++ b/crates/uv-python/fetch-download-metadata.py @@ -53,8 +53,7 @@ import re from dataclasses import asdict, dataclass, field from enum import StrEnum from pathlib import Path -from typing import Generator, Iterable, NamedTuple, Self -from urllib.parse import unquote +from typing import Any, Generator, Iterable, NamedTuple, Self import httpx @@ -255,13 +254,7 @@ class CPythonFinder(Finder): # Sort the assets to ensure deterministic results row["assets"].sort(key=lambda asset: asset["browser_download_url"]) for asset in row["assets"]: - # On older versions, GitHub didn't backfill the digest. - if digest := asset["digest"]: - sha256 = digest.removeprefix("sha256:") - else: - sha256 = None - url = asset["browser_download_url"] - download = self._parse_download_url(url, sha256) + download = self._parse_download_asset(asset) if download is None: continue if ( @@ -355,16 +348,19 @@ class CPythonFinder(Finder): continue download.sha256 = checksums.get(download.filename) - def _parse_download_url( - self, url: str, sha256: str | None - ) -> PythonDownload | None: - """Parse an indygreg download URL into a PythonDownload object.""" + def _parse_download_asset(self, asset: dict[str, Any]) -> PythonDownload | None: + """Parse a python-build-standalone download asset into a PythonDownload object.""" + url = asset["browser_download_url"] # Ex) # https://github.com/astral-sh/python-build-standalone/releases/download/20240107/cpython-3.12.1%2B20240107-aarch64-unknown-linux-gnu-lto-full.tar.zst if url.endswith(".sha256"): return None - filename = unquote(url.rsplit("/", maxsplit=1)[-1]) release = int(url.rsplit("/")[-2]) + filename = asset["name"] + sha256 = None + # On older versions, GitHub didn't backfill the digest. + if digest := asset["digest"]: + sha256 = digest.removeprefix("sha256:") match = self._filename_re.match(filename) or self._legacy_filename_re.match( filename From 8ed86a6dcdc794f6054533d94e0983e527cb2d31 Mon Sep 17 00:00:00 2001 From: konsti Date: Mon, 21 Jul 2025 14:27:59 +0200 Subject: [PATCH 114/130] Remove Python 3.9.18 from `.python-versions` (#14784) Python 3.9.18 is not used in the tests anymore. --- .python-versions | 1 - 1 file changed, 1 deletion(-) diff --git a/.python-versions b/.python-versions index 957687cb4..f17a9a96b 100644 --- a/.python-versions +++ b/.python-versions @@ -6,7 +6,6 @@ 3.8.20 # The following are required for packse scenarios 3.9.20 -3.9.18 3.9.12 # The following is needed for `==3.13` request tests 3.13.0 From 9983273289ba229c20c3008b831a06c685497b29 Mon Sep 17 00:00:00 2001 From: Jo <10510431+j178@users.noreply.github.com> Date: Mon, 21 Jul 2025 20:35:45 +0800 Subject: [PATCH 115/130] Use sha256 checksum from GitHub API for GraalPy releases (#14779) ## Summary Follow #14078, use GitHub generated sha256 for GraalPy releases too. ## Test Plan ```console uv run ./crates/uv-python/fetch-download-metadata.py ``` --- crates/uv-python/fetch-download-metadata.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/crates/uv-python/fetch-download-metadata.py b/crates/uv-python/fetch-download-metadata.py index f43349e50..ec2b4835e 100755 --- a/crates/uv-python/fetch-download-metadata.py +++ b/crates/uv-python/fetch-download-metadata.py @@ -607,6 +607,9 @@ class GraalPyFinder(Finder): platform = self._normalize_os(m.group(1)) arch = self._normalize_arch(m.group(2)) libc = "gnu" if platform == "linux" else "none" + sha256 = None + if digest := asset["digest"]: + sha256 = digest.removeprefix("sha256:") download = PythonDownload( release=0, version=python_version, @@ -619,6 +622,7 @@ class GraalPyFinder(Finder): implementation=self.implementation, filename=asset["name"], url=url, + sha256=sha256, ) # Only keep the latest GraalPy version of each arch/platform if (python_version, arch, platform) not in results: @@ -633,6 +637,7 @@ class GraalPyFinder(Finder): return self.PLATFORM_MAPPING.get(os, os) async def _fetch_checksums(self, downloads: list[PythonDownload], n: int) -> None: + downloads = list(filter(lambda d: not d.sha256, downloads)) for idx, batch in enumerate(batched(downloads, n)): logging.info("Fetching GraalPy checksums: %d/%d", idx * n, len(downloads)) checksum_requests = [] From ab48dfd0cb56e10b50bdcf36aff5a09359388e97 Mon Sep 17 00:00:00 2001 From: konsti Date: Mon, 21 Jul 2025 14:38:33 +0200 Subject: [PATCH 116/130] Collect contains markers in enum (#14782) We'll add more contains markers for the wheel variants, so I want to unify them before rebasing the variants branch on them. --- crates/uv-pep508/src/marker/parse.rs | 34 ++++++++++------------------ crates/uv-pep508/src/marker/tree.rs | 31 ++++++++++++++++++------- 2 files changed, 35 insertions(+), 30 deletions(-) diff --git a/crates/uv-pep508/src/marker/parse.rs b/crates/uv-pep508/src/marker/parse.rs index 9c361c19d..de8bfac72 100644 --- a/crates/uv-pep508/src/marker/parse.rs +++ b/crates/uv-pep508/src/marker/parse.rs @@ -5,7 +5,7 @@ use uv_pep440::{Version, VersionPattern, VersionSpecifier}; use crate::cursor::Cursor; use crate::marker::MarkerValueExtra; -use crate::marker::tree::{ContainerOperator, MarkerValueDependencyGroup}; +use crate::marker::tree::{ContainerOperator, MarkerValueContains, MarkerValueDependencyGroup}; use crate::{ ExtraOperator, MarkerExpression, MarkerOperator, MarkerTree, MarkerValue, MarkerValueString, MarkerValueVersion, MarkerWarningKind, Pep508Error, Pep508ErrorSource, Pep508Url, Reporter, @@ -209,10 +209,9 @@ pub(crate) fn parse_marker_key_op_value( MarkerValue::MarkerEnvString(key) => { let value = match r_value { MarkerValue::Extra - | MarkerValue::Extras - | MarkerValue::DependencyGroups | MarkerValue::MarkerEnvVersion(_) - | MarkerValue::MarkerEnvString(_) => { + | MarkerValue::MarkerEnvString(_) + | MarkerValue::MarkerEnvContains(_) => { reporter.report( MarkerWarningKind::MarkerMarkerComparison, "Comparing two markers with each other doesn't make any sense, @@ -245,9 +244,8 @@ pub(crate) fn parse_marker_key_op_value( let value = match r_value { MarkerValue::MarkerEnvVersion(_) | MarkerValue::MarkerEnvString(_) - | MarkerValue::Extra - | MarkerValue::Extras - | MarkerValue::DependencyGroups => { + | MarkerValue::MarkerEnvContains(_) + | MarkerValue::Extra => { reporter.report( MarkerWarningKind::ExtraInvalidComparison, "Comparing extra with something other than a quoted string is wrong, @@ -279,9 +277,11 @@ pub(crate) fn parse_marker_key_op_value( // `'...' == extra` MarkerValue::Extra => parse_extra_expr(operator, &l_string, reporter), // `'...' in extras` - MarkerValue::Extras => parse_extras_expr(operator, &l_string, reporter), + MarkerValue::MarkerEnvContains(MarkerValueContains::Extras) => { + parse_extras_expr(operator, &l_string, reporter) + } // `'...' in dependency_groups` - MarkerValue::DependencyGroups => { + MarkerValue::MarkerEnvContains(MarkerValueContains::DependencyGroups) => { parse_dependency_groups_expr(operator, &l_string, reporter) } // `'...' == '...'`, doesn't make much sense @@ -300,22 +300,12 @@ pub(crate) fn parse_marker_key_op_value( } } } - MarkerValue::Extras => { + MarkerValue::MarkerEnvContains(key) => { reporter.report( MarkerWarningKind::Pep440Error, format!( - "The `extras` marker must be used as '...' in extras' or '... not in extras', - found `{l_value} {operator} {r_value}`, will be ignored" - ), - ); - return Ok(None); - } - MarkerValue::DependencyGroups => { - reporter.report( - MarkerWarningKind::Pep440Error, - format!( - "The `dependency_groups` marker must be used as '...' in dependency_groups' or '... not in dependency_groups', - found `{l_value} {operator} {r_value}`, will be ignored" + "The `{key}` marker must be used as '...' in {key}' or '... not in {key}', + found `{key} {operator} {r_value}`, will be ignored" ), ); return Ok(None); diff --git a/crates/uv-pep508/src/marker/tree.rs b/crates/uv-pep508/src/marker/tree.rs index 594b81723..95e7327ed 100644 --- a/crates/uv-pep508/src/marker/tree.rs +++ b/crates/uv-pep508/src/marker/tree.rs @@ -126,6 +126,24 @@ impl Display for MarkerValueString { } } +/// Those markers with exclusively `in` and `not in` operators (PEP 751) +#[derive(Clone, Debug, Eq, Hash, PartialEq, PartialOrd, Ord)] +pub enum MarkerValueContains { + /// `extras`. This one is special because it's a list, and user-provided + Extras, + /// `dependency_groups`. This one is special because it's a list, and user-provided + DependencyGroups, +} + +impl Display for MarkerValueContains { + fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { + match self { + Self::Extras => f.write_str("extras"), + Self::DependencyGroups => f.write_str("dependency_groups"), + } + } +} + /// One of the predefined environment values /// /// @@ -137,10 +155,8 @@ pub enum MarkerValue { MarkerEnvString(MarkerValueString), /// `extra`. This one is special because it's a list, and user-provided Extra, - /// `extras`. This one is special because it's a list, and user-provided - Extras, - /// `dependency_groups`. This one is special because it's a list, and user-provided - DependencyGroups, + /// Those markers with exclusively `in` and `not in` operators (PEP 751) + MarkerEnvContains(MarkerValueContains), /// Not a constant, but a user given quoted string with a value inside such as '3.8' or "windows" QuotedString(ArcStr), } @@ -181,8 +197,8 @@ impl FromStr for MarkerValue { "sys_platform" => Self::MarkerEnvString(MarkerValueString::SysPlatform), "sys.platform" => Self::MarkerEnvString(MarkerValueString::SysPlatformDeprecated), "extra" => Self::Extra, - "extras" => Self::Extras, - "dependency_groups" => Self::DependencyGroups, + "extras" => Self::MarkerEnvContains(MarkerValueContains::Extras), + "dependency_groups" => Self::MarkerEnvContains(MarkerValueContains::DependencyGroups), _ => return Err(format!("Invalid key: {s}")), }; Ok(value) @@ -195,8 +211,7 @@ impl Display for MarkerValue { Self::MarkerEnvVersion(marker_value_version) => marker_value_version.fmt(f), Self::MarkerEnvString(marker_value_string) => marker_value_string.fmt(f), Self::Extra => f.write_str("extra"), - Self::Extras => f.write_str("extras"), - Self::DependencyGroups => f.write_str("dependency_groups"), + Self::MarkerEnvContains(marker_value_contains) => marker_value_contains.fmt(f), Self::QuotedString(value) => write!(f, "'{value}'"), } } From b81cce9152047a8183d50319c74a7f66baa7b9a0 Mon Sep 17 00:00:00 2001 From: Charlie Marsh Date: Mon, 21 Jul 2025 08:48:47 -0400 Subject: [PATCH 117/130] Support `extras` and `dependency_groups` markers on `uv pip install` and `uv pip sync` (#14755) ## Summary We don't yet support writing these, but we can at least read them (which, e.g., allows you to install PDM-exported `pylock.toml` files with uv, since PDM _always_ writes a default group). Closes #14740. --- crates/uv-cli/src/lib.rs | 61 +++-- .../uv-configuration/src/dependency_groups.rs | 12 + crates/uv-pep508/src/marker/tree.rs | 91 +++++++- crates/uv-requirements/src/sources.rs | 4 +- crates/uv-requirements/src/specification.rs | 64 ++++-- .../src/lock/export/pylock_toml.rs | 11 +- crates/uv/src/commands/pip/install.rs | 33 ++- crates/uv/src/commands/pip/operations.rs | 2 +- crates/uv/src/commands/pip/sync.rs | 45 +++- crates/uv/src/lib.rs | 6 + crates/uv/src/settings.rs | 7 + crates/uv/tests/it/pip_install.rs | 217 +++++++++++++++++- docs/reference/cli.md | 21 +- 13 files changed, 492 insertions(+), 82 deletions(-) diff --git a/crates/uv-cli/src/lib.rs b/crates/uv-cli/src/lib.rs index d6560014f..e1084f035 100644 --- a/crates/uv-cli/src/lib.rs +++ b/crates/uv-cli/src/lib.rs @@ -1202,6 +1202,14 @@ pub struct PipCompileArgs { #[arg(long, overrides_with("all_extras"), hide = true)] pub no_all_extras: bool, + /// Install the specified dependency group from a `pyproject.toml`. + /// + /// If no path is provided, the `pyproject.toml` in the working directory is used. + /// + /// May be provided multiple times. + #[arg(long, group = "sources")] + pub group: Vec, + #[command(flatten)] pub resolver: ResolverArgs, @@ -1216,14 +1224,6 @@ pub struct PipCompileArgs { #[arg(long, overrides_with("no_deps"), hide = true)] pub deps: bool, - /// Install the specified dependency group from a `pyproject.toml`. - /// - /// If no path is provided, the `pyproject.toml` in the working directory is used. - /// - /// May be provided multiple times. - #[arg(long, group = "sources")] - pub group: Vec, - /// Write the compiled requirements to the given `requirements.txt` or `pylock.toml` file. /// /// If the file already exists, the existing versions will be preferred when resolving @@ -1518,6 +1518,30 @@ pub struct PipSyncArgs { #[arg(long, short, alias = "build-constraint", env = EnvVars::UV_BUILD_CONSTRAINT, value_delimiter = ' ', value_parser = parse_maybe_file_path)] pub build_constraints: Vec>, + /// Include optional dependencies from the specified extra name; may be provided more than once. + /// + /// Only applies to `pylock.toml`, `pyproject.toml`, `setup.py`, and `setup.cfg` sources. + #[arg(long, conflicts_with = "all_extras", value_parser = extra_name_with_clap_error)] + pub extra: Option>, + + /// Include all optional dependencies. + /// + /// Only applies to `pylock.toml`, `pyproject.toml`, `setup.py`, and `setup.cfg` sources. + #[arg(long, conflicts_with = "extra", overrides_with = "no_all_extras")] + pub all_extras: bool, + + #[arg(long, overrides_with("all_extras"), hide = true)] + pub no_all_extras: bool, + + /// Install the specified dependency group from a `pylock.toml` or `pyproject.toml`. + /// + /// If no path is provided, the `pylock.toml` or `pyproject.toml` in the working directory is + /// used. + /// + /// May be provided multiple times. + #[arg(long, group = "sources")] + pub group: Vec, + #[command(flatten)] pub installer: InstallerArgs, @@ -1798,19 +1822,28 @@ pub struct PipInstallArgs { /// Include optional dependencies from the specified extra name; may be provided more than once. /// - /// Only applies to `pyproject.toml`, `setup.py`, and `setup.cfg` sources. + /// Only applies to `pylock.toml`, `pyproject.toml`, `setup.py`, and `setup.cfg` sources. #[arg(long, conflicts_with = "all_extras", value_parser = extra_name_with_clap_error)] pub extra: Option>, /// Include all optional dependencies. /// - /// Only applies to `pyproject.toml`, `setup.py`, and `setup.cfg` sources. + /// Only applies to `pylock.toml`, `pyproject.toml`, `setup.py`, and `setup.cfg` sources. #[arg(long, conflicts_with = "extra", overrides_with = "no_all_extras")] pub all_extras: bool, #[arg(long, overrides_with("all_extras"), hide = true)] pub no_all_extras: bool, + /// Install the specified dependency group from a `pylock.toml` or `pyproject.toml`. + /// + /// If no path is provided, the `pylock.toml` or `pyproject.toml` in the working directory is + /// used. + /// + /// May be provided multiple times. + #[arg(long, group = "sources")] + pub group: Vec, + #[command(flatten)] pub installer: ResolverInstallerArgs, @@ -1825,14 +1858,6 @@ pub struct PipInstallArgs { #[arg(long, overrides_with("no_deps"), hide = true)] pub deps: bool, - /// Install the specified dependency group from a `pyproject.toml`. - /// - /// If no path is provided, the `pyproject.toml` in the working directory is used. - /// - /// May be provided multiple times. - #[arg(long, group = "sources")] - pub group: Vec, - /// Require a matching hash for each requirement. /// /// By default, uv will verify any available hashes in the requirements file, but will not diff --git a/crates/uv-configuration/src/dependency_groups.rs b/crates/uv-configuration/src/dependency_groups.rs index a3b90ea5f..70dd9db08 100644 --- a/crates/uv-configuration/src/dependency_groups.rs +++ b/crates/uv-configuration/src/dependency_groups.rs @@ -186,6 +186,18 @@ impl DependencyGroupsInner { self.include.names().chain(&self.exclude) } + /// Returns an iterator over all groups that are included in the specification, + /// assuming `all_names` is an iterator over all groups. + pub fn group_names<'a, Names>( + &'a self, + all_names: Names, + ) -> impl Iterator + 'a + where + Names: Iterator + 'a, + { + all_names.filter(move |name| self.contains(name)) + } + /// Iterate over all groups the user explicitly asked for on the CLI pub fn explicit_names(&self) -> impl Iterator { let DependencyGroupsHistory { diff --git a/crates/uv-pep508/src/marker/tree.rs b/crates/uv-pep508/src/marker/tree.rs index 95e7327ed..756c90ade 100644 --- a/crates/uv-pep508/src/marker/tree.rs +++ b/crates/uv-pep508/src/marker/tree.rs @@ -754,6 +754,51 @@ impl Display for MarkerExpression { } } +/// The extra and dependency group names to use when evaluating a marker tree. +#[derive(Debug, Copy, Clone)] +enum ExtrasEnvironment<'a> { + /// E.g., `extra == '...'` + Extras(&'a [ExtraName]), + /// E.g., `'...' in extras` or `'...' in dependency_groups` + Pep751(&'a [ExtraName], &'a [GroupName]), +} + +impl<'a> ExtrasEnvironment<'a> { + /// Creates a new [`ExtrasEnvironment`] for the given `extra` names. + fn from_extras(extras: &'a [ExtraName]) -> Self { + Self::Extras(extras) + } + + /// Creates a new [`ExtrasEnvironment`] for the given PEP 751 `extras` and `dependency_groups`. + fn from_pep751(extras: &'a [ExtraName], dependency_groups: &'a [GroupName]) -> Self { + Self::Pep751(extras, dependency_groups) + } + + /// Returns the `extra` names in this environment. + fn extra(&self) -> &[ExtraName] { + match self { + ExtrasEnvironment::Extras(extra) => extra, + ExtrasEnvironment::Pep751(..) => &[], + } + } + + /// Returns the `extras` names in this environment, as in a PEP 751 lockfile. + fn extras(&self) -> &[ExtraName] { + match self { + ExtrasEnvironment::Extras(..) => &[], + ExtrasEnvironment::Pep751(extras, ..) => extras, + } + } + + /// Returns the `dependency_group` group names in this environment, as in a PEP 751 lockfile. + fn dependency_groups(&self) -> &[GroupName] { + match self { + ExtrasEnvironment::Extras(..) => &[], + ExtrasEnvironment::Pep751(.., groups) => groups, + } + } +} + /// Represents one or more nested marker expressions with and/or/parentheses. /// /// Marker trees are canonical, meaning any two functionally equivalent markers @@ -1001,7 +1046,27 @@ impl MarkerTree { /// Does this marker apply in the given environment? pub fn evaluate(self, env: &MarkerEnvironment, extras: &[ExtraName]) -> bool { - self.evaluate_reporter_impl(env, extras, &mut TracingReporter) + self.evaluate_reporter_impl( + env, + ExtrasEnvironment::from_extras(extras), + &mut TracingReporter, + ) + } + + /// Evaluate a marker in the context of a PEP 751 lockfile, which exposes several additional + /// markers (`extras` and `dependency_groups`) that are not available in any other context, + /// per the spec. + pub fn evaluate_pep751( + self, + env: &MarkerEnvironment, + extras: &[ExtraName], + groups: &[GroupName], + ) -> bool { + self.evaluate_reporter_impl( + env, + ExtrasEnvironment::from_pep751(extras, groups), + &mut TracingReporter, + ) } /// Evaluates this marker tree against an optional environment and a @@ -1018,7 +1083,11 @@ impl MarkerTree { ) -> bool { match env { None => self.evaluate_extras(extras), - Some(env) => self.evaluate_reporter_impl(env, extras, &mut TracingReporter), + Some(env) => self.evaluate_reporter_impl( + env, + ExtrasEnvironment::from_extras(extras), + &mut TracingReporter, + ), } } @@ -1030,13 +1099,13 @@ impl MarkerTree { extras: &[ExtraName], reporter: &mut impl Reporter, ) -> bool { - self.evaluate_reporter_impl(env, extras, reporter) + self.evaluate_reporter_impl(env, ExtrasEnvironment::from_extras(extras), reporter) } fn evaluate_reporter_impl( self, env: &MarkerEnvironment, - extras: &[ExtraName], + extras: ExtrasEnvironment, reporter: &mut impl Reporter, ) -> bool { match self.kind() { @@ -1088,12 +1157,18 @@ impl MarkerTree { } MarkerTreeKind::Extra(marker) => { return marker - .edge(extras.contains(marker.name().extra())) + .edge(extras.extra().contains(marker.name().extra())) .evaluate_reporter_impl(env, extras, reporter); } - // TODO(charlie): Add support for evaluating container extras in PEP 751 lockfiles. - MarkerTreeKind::Extras(..) | MarkerTreeKind::DependencyGroups(..) => { - return false; + MarkerTreeKind::Extras(marker) => { + return marker + .edge(extras.extras().contains(marker.name().extra())) + .evaluate_reporter_impl(env, extras, reporter); + } + MarkerTreeKind::DependencyGroups(marker) => { + return marker + .edge(extras.dependency_groups().contains(marker.name().group())) + .evaluate_reporter_impl(env, extras, reporter); } } diff --git a/crates/uv-requirements/src/sources.rs b/crates/uv-requirements/src/sources.rs index 090a72e5c..024ac5ebf 100644 --- a/crates/uv-requirements/src/sources.rs +++ b/crates/uv-requirements/src/sources.rs @@ -273,13 +273,13 @@ impl RequirementsSource { pub fn allows_extras(&self) -> bool { matches!( self, - Self::PyprojectToml(_) | Self::SetupPy(_) | Self::SetupCfg(_) + Self::PylockToml(_) | Self::PyprojectToml(_) | Self::SetupPy(_) | Self::SetupCfg(_) ) } /// Returns `true` if the source allows groups to be specified. pub fn allows_groups(&self) -> bool { - matches!(self, Self::PyprojectToml(_)) + matches!(self, Self::PylockToml(_) | Self::PyprojectToml(_)) } } diff --git a/crates/uv-requirements/src/specification.rs b/crates/uv-requirements/src/specification.rs index deead2c82..88a5eba21 100644 --- a/crates/uv-requirements/src/specification.rs +++ b/crates/uv-requirements/src/specification.rs @@ -250,10 +250,13 @@ impl RequirementsSpecification { // If we have a `pylock.toml`, don't allow additional requirements, constraints, or // overrides. - if requirements - .iter() - .any(|source| matches!(source, RequirementsSource::PylockToml(..))) - { + if let Some(pylock_toml) = requirements.iter().find_map(|source| { + if let RequirementsSource::PylockToml(path) = source { + Some(path) + } else { + None + } + }) { if requirements .iter() .any(|source| !matches!(source, RequirementsSource::PylockToml(..))) @@ -272,22 +275,38 @@ impl RequirementsSpecification { "Cannot specify constraints with a `pylock.toml` file" )); } - if groups.is_some_and(|groups| !groups.groups.is_empty()) { - return Err(anyhow::anyhow!( - "Cannot specify groups with a `pylock.toml` file" - )); + + // If we have a `pylock.toml`, disallow specifying paths for groups; instead, require + // that all groups refer to the `pylock.toml` file. + if let Some(groups) = groups { + let mut names = Vec::new(); + for group in &groups.groups { + if group.path.is_some() { + return Err(anyhow::anyhow!( + "Cannot specify paths for groups with a `pylock.toml` file; all groups must refer to the `pylock.toml` file" + )); + } + names.push(group.name.clone()); + } + + if !names.is_empty() { + spec.groups.insert( + pylock_toml.clone(), + DependencyGroups::from_args( + false, + false, + false, + Vec::new(), + Vec::new(), + false, + names, + false, + ), + ); + } } - } - - // Resolve sources into specifications so we know their `source_tree`. - let mut requirement_sources = Vec::new(); - for source in requirements { - let source = Self::from_source(source, client_builder).await?; - requirement_sources.push(source); - } - - // pip `--group` flags specify their own sources, which we need to process here - if let Some(groups) = groups { + } else if let Some(groups) = groups { + // pip `--group` flags specify their own sources, which we need to process here. // First, we collect all groups by their path. let mut groups_by_path = BTreeMap::new(); for group in &groups.groups { @@ -320,6 +339,13 @@ impl RequirementsSpecification { spec.groups = group_specs; } + // Resolve sources into specifications so we know their `source_tree`. + let mut requirement_sources = Vec::new(); + for source in requirements { + let source = Self::from_source(source, client_builder).await?; + requirement_sources.push(source); + } + // Read all requirements, and keep track of all requirements _and_ constraints. // A `requirements.txt` can contain a `-c constraints.txt` directive within it, so reading // a requirements file can also add constraints. diff --git a/crates/uv-resolver/src/lock/export/pylock_toml.rs b/crates/uv-resolver/src/lock/export/pylock_toml.rs index 80cd54be2..ef3ad8615 100644 --- a/crates/uv-resolver/src/lock/export/pylock_toml.rs +++ b/crates/uv-resolver/src/lock/export/pylock_toml.rs @@ -188,11 +188,11 @@ pub struct PylockToml { #[serde(skip_serializing_if = "Option::is_none")] requires_python: Option, #[serde(skip_serializing_if = "Vec::is_empty", default)] - extras: Vec, + pub extras: Vec, #[serde(skip_serializing_if = "Vec::is_empty", default)] - dependency_groups: Vec, + pub dependency_groups: Vec, #[serde(skip_serializing_if = "Vec::is_empty", default)] - default_groups: Vec, + pub default_groups: Vec, #[serde(skip_serializing_if = "Vec::is_empty", default)] pub packages: Vec, #[serde(skip_serializing_if = "Vec::is_empty", default)] @@ -966,9 +966,12 @@ impl<'lock> PylockToml { self, install_path: &Path, markers: &MarkerEnvironment, + extras: &[ExtraName], + groups: &[GroupName], tags: &Tags, build_options: &BuildOptions, ) -> Result { + // Convert the extras and dependency groups specifications to a concrete environment. let mut graph = petgraph::graph::DiGraph::with_capacity(self.packages.len(), self.packages.len()); @@ -977,7 +980,7 @@ impl<'lock> PylockToml { for package in self.packages { // Omit packages that aren't relevant to the current environment. - if !package.marker.evaluate(markers, &[]) { + if !package.marker.evaluate_pep751(markers, extras, groups) { continue; } diff --git a/crates/uv/src/commands/pip/install.rs b/crates/uv/src/commands/pip/install.rs index b9edad20e..72c532d7b 100644 --- a/crates/uv/src/commands/pip/install.rs +++ b/crates/uv/src/commands/pip/install.rs @@ -22,6 +22,7 @@ use uv_distribution_types::{ use uv_fs::Simplified; use uv_install_wheel::LinkMode; use uv_installer::{SatisfiesResult, SitePackages}; +use uv_normalize::{DefaultExtras, DefaultGroups}; use uv_pep508::PackageName; use uv_pypi_types::Conflicts; use uv_python::{ @@ -439,11 +440,35 @@ pub(crate) async fn pip_install( let install_path = std::path::absolute(&pylock)?; let install_path = install_path.parent().unwrap(); let content = fs_err::tokio::read_to_string(&pylock).await?; - let lock = toml::from_str::(&content) - .with_context(|| format!("Not a valid pylock.toml file: {}", pylock.user_display()))?; + let lock = toml::from_str::(&content).with_context(|| { + format!("Not a valid `pylock.toml` file: {}", pylock.user_display()) + })?; - let resolution = - lock.to_resolution(install_path, marker_env.markers(), &tags, &build_options)?; + // Convert the extras and groups specifications into a concrete form. + let extras = extras.with_defaults(DefaultExtras::default()); + let extras = extras + .extra_names(lock.extras.iter()) + .cloned() + .collect::>(); + + let groups = groups + .get(&pylock) + .cloned() + .unwrap_or_default() + .with_defaults(DefaultGroups::List(lock.default_groups.clone())); + let groups = groups + .group_names(lock.dependency_groups.iter()) + .cloned() + .collect::>(); + + let resolution = lock.to_resolution( + install_path, + marker_env.markers(), + &extras, + &groups, + &tags, + &build_options, + )?; let hasher = HashStrategy::from_resolution(&resolution, HashCheckingMode::Verify)?; (resolution, hasher) diff --git a/crates/uv/src/commands/pip/operations.rs b/crates/uv/src/commands/pip/operations.rs index 809f8bfdc..b5879ecf6 100644 --- a/crates/uv/src/commands/pip/operations.rs +++ b/crates/uv/src/commands/pip/operations.rs @@ -70,7 +70,7 @@ pub(crate) async fn read_requirements( "Use `package[extra]` syntax instead." }; return Err(anyhow!( - "Requesting extras requires a `pyproject.toml`, `setup.cfg`, or `setup.py` file. {hint}" + "Requesting extras requires a `pylock.toml`, `pyproject.toml`, `setup.cfg`, or `setup.py` file. {hint}" ) .into()); } diff --git a/crates/uv/src/commands/pip/sync.rs b/crates/uv/src/commands/pip/sync.rs index 47d180a74..2f46ef502 100644 --- a/crates/uv/src/commands/pip/sync.rs +++ b/crates/uv/src/commands/pip/sync.rs @@ -18,13 +18,14 @@ use uv_distribution_types::{DependencyMetadata, Index, IndexLocations, Origin, R use uv_fs::Simplified; use uv_install_wheel::LinkMode; use uv_installer::SitePackages; +use uv_normalize::{DefaultExtras, DefaultGroups}; use uv_pep508::PackageName; use uv_pypi_types::Conflicts; use uv_python::{ EnvironmentPreference, Prefix, PythonEnvironment, PythonInstallation, PythonPreference, PythonRequest, PythonVersion, Target, }; -use uv_requirements::{RequirementsSource, RequirementsSpecification}; +use uv_requirements::{GroupsSpecification, RequirementsSource, RequirementsSpecification}; use uv_resolver::{ DependencyMode, ExcludeNewer, FlatIndex, OptionsBuilder, PrereleaseMode, PylockToml, PythonRequirement, ResolutionMode, ResolverEnvironment, @@ -48,6 +49,8 @@ pub(crate) async fn pip_sync( requirements: &[RequirementsSource], constraints: &[RequirementsSource], build_constraints: &[RequirementsSource], + extras: &ExtrasSpecification, + groups: &GroupsSpecification, reinstall: Reinstall, link_mode: LinkMode, compile: bool, @@ -91,8 +94,6 @@ pub(crate) async fn pip_sync( // Initialize a few defaults. let overrides = &[]; - let extras = ExtrasSpecification::default(); - let groups = None; let upgrade = Upgrade::default(); let resolution_mode = ResolutionMode::default(); let prerelease_mode = PrereleaseMode::default(); @@ -118,8 +119,8 @@ pub(crate) async fn pip_sync( requirements, constraints, overrides, - &extras, - groups, + extras, + Some(groups), &client_builder, ) .await?; @@ -377,11 +378,35 @@ pub(crate) async fn pip_sync( let install_path = std::path::absolute(&pylock)?; let install_path = install_path.parent().unwrap(); let content = fs_err::tokio::read_to_string(&pylock).await?; - let lock = toml::from_str::(&content) - .with_context(|| format!("Not a valid pylock.toml file: {}", pylock.user_display()))?; + let lock = toml::from_str::(&content).with_context(|| { + format!("Not a valid `pylock.toml` file: {}", pylock.user_display()) + })?; - let resolution = - lock.to_resolution(install_path, marker_env.markers(), &tags, &build_options)?; + // Convert the extras and groups specifications into a concrete form. + let extras = extras.with_defaults(DefaultExtras::default()); + let extras = extras + .extra_names(lock.extras.iter()) + .cloned() + .collect::>(); + + let groups = groups + .get(&pylock) + .cloned() + .unwrap_or_default() + .with_defaults(DefaultGroups::List(lock.default_groups.clone())); + let groups = groups + .group_names(lock.dependency_groups.iter()) + .cloned() + .collect::>(); + + let resolution = lock.to_resolution( + install_path, + marker_env.markers(), + &extras, + &groups, + &tags, + &build_options, + )?; let hasher = HashStrategy::from_resolution(&resolution, HashCheckingMode::Verify)?; (resolution, hasher) @@ -406,7 +431,7 @@ pub(crate) async fn pip_sync( source_trees, project, BTreeSet::default(), - &extras, + extras, &groups, preferences, site_packages.clone(), diff --git a/crates/uv/src/lib.rs b/crates/uv/src/lib.rs index 6ca03a470..9a67bb877 100644 --- a/crates/uv/src/lib.rs +++ b/crates/uv/src/lib.rs @@ -566,11 +566,17 @@ async fn run(mut cli: Cli) -> Result { .into_iter() .map(RequirementsSource::from_constraints_txt) .collect::, _>>()?; + let groups = GroupsSpecification { + root: project_dir.to_path_buf(), + groups: args.settings.groups, + }; commands::pip_sync( &requirements, &constraints, &build_constraints, + &args.settings.extras, + &groups, args.settings.reinstall, args.settings.link_mode, args.settings.compile_bytecode, diff --git a/crates/uv/src/settings.rs b/crates/uv/src/settings.rs index aa105cf97..534640f94 100644 --- a/crates/uv/src/settings.rs +++ b/crates/uv/src/settings.rs @@ -2058,6 +2058,10 @@ impl PipSyncSettings { src_file, constraints, build_constraints, + extra, + all_extras, + no_all_extras, + group, installer, refresh, require_hashes, @@ -2122,6 +2126,9 @@ impl PipSyncSettings { python_version, python_platform, strict: flag(strict, no_strict, "strict"), + extra, + all_extras: flag(all_extras, no_all_extras, "all-extras"), + group: Some(group), torch_backend, ..PipOptions::from(installer) }, diff --git a/crates/uv/tests/it/pip_install.rs b/crates/uv/tests/it/pip_install.rs index a977ac813..e1d48b86d 100644 --- a/crates/uv/tests/it/pip_install.rs +++ b/crates/uv/tests/it/pip_install.rs @@ -1298,27 +1298,27 @@ fn install_extras() -> Result<()> { uv_snapshot!(context.filters(), context.pip_install() .arg("--all-extras") .arg("-e") - .arg(context.workspace_root.join("scripts/packages/poetry_editable")), @r###" + .arg(context.workspace_root.join("scripts/packages/poetry_editable")), @r" success: false exit_code: 2 ----- stdout ----- ----- stderr ----- - error: Requesting extras requires a `pyproject.toml`, `setup.cfg`, or `setup.py` file. Use `[extra]` syntax or `-r ` instead. - "### + error: Requesting extras requires a `pylock.toml`, `pyproject.toml`, `setup.cfg`, or `setup.py` file. Use `[extra]` syntax or `-r ` instead. + " ); // Request extras for a source tree uv_snapshot!(context.filters(), context.pip_install() .arg("--all-extras") - .arg(context.workspace_root.join("scripts/packages/poetry_editable")), @r###" + .arg(context.workspace_root.join("scripts/packages/poetry_editable")), @r" success: false exit_code: 2 ----- stdout ----- ----- stderr ----- - error: Requesting extras requires a `pyproject.toml`, `setup.cfg`, or `setup.py` file. Use `package[extra]` syntax instead. - "### + error: Requesting extras requires a `pylock.toml`, `pyproject.toml`, `setup.cfg`, or `setup.py` file. Use `package[extra]` syntax instead. + " ); let requirements_txt = context.temp_dir.child("requirements.txt"); @@ -1327,14 +1327,14 @@ fn install_extras() -> Result<()> { // Request extras for a requirements file uv_snapshot!(context.filters(), context.pip_install() .arg("--all-extras") - .arg("-r").arg("requirements.txt"), @r###" + .arg("-r").arg("requirements.txt"), @r" success: false exit_code: 2 ----- stdout ----- ----- stderr ----- - error: Requesting extras requires a `pyproject.toml`, `setup.cfg`, or `setup.py` file. Use `package[extra]` syntax instead. - "### + error: Requesting extras requires a `pylock.toml`, `pyproject.toml`, `setup.cfg`, or `setup.py` file. Use `package[extra]` syntax instead. + " ); let pyproject_toml = context.temp_dir.child("pyproject.toml"); @@ -11392,6 +11392,205 @@ fn pep_751_multiple_sources() -> Result<()> { Ok(()) } +#[test] +fn pep_751_groups() -> Result<()> { + let context = TestContext::new("3.13"); + + let pylock_toml = context.temp_dir.child("pylock.toml"); + pylock_toml.write_str( + r#" +lock-version = "1.0" +requires-python = "==3.13.*" +environments = [ + "python_version == \"3.13\"", +] +extras = ["async", "dev"] +dependency-groups = ["default", "test"] +default-groups = ["default"] +created-by = "pdm" +[[packages]] +name = "anyio" +version = "4.9.0" +requires-python = ">=3.9" +sdist = {name = "anyio-4.9.0.tar.gz", url = "https://files.pythonhosted.org/packages/95/7d/4c1bd541d4dffa1b52bd83fb8527089e097a106fc90b467a7313b105f840/anyio-4.9.0.tar.gz", hashes = {sha256 = "673c0c244e15788651a4ff38710fea9675823028a6f08a5eda409e0c9840a028"}} +wheels = [ + {name = "anyio-4.9.0-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/a1/ee/48ca1a7c89ffec8b6a0c5d02b89c305671d5ffd8d3c94acf8b8c408575bb/anyio-4.9.0-py3-none-any.whl",hashes = {sha256 = "9f76d541cad6e36af7beb62e978876f3b41e3e04f2c1fbf0884604c0a9c4d93c"}}, +] +marker = "\"async\" in extras" + +[packages.tool.pdm] +dependencies = [ + "exceptiongroup>=1.0.2; python_version < \"3.11\"", + "idna>=2.8", + "sniffio>=1.1", + "typing-extensions>=4.5; python_version < \"3.13\"", +] + +[[packages]] +name = "blinker" +version = "1.9.0" +requires-python = ">=3.9" +sdist = {name = "blinker-1.9.0.tar.gz", url = "https://files.pythonhosted.org/packages/21/28/9b3f50ce0e048515135495f198351908d99540d69bfdc8c1d15b73dc55ce/blinker-1.9.0.tar.gz", hashes = {sha256 = "b4ce2265a7abece45e7cc896e98dbebe6cead56bcf805a3d23136d145f5445bf"}} +wheels = [ + {name = "blinker-1.9.0-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/10/cb/f2ad4230dc2eb1a74edf38f1a38b9b52277f75bef262d8908e60d957e13c/blinker-1.9.0-py3-none-any.whl",hashes = {sha256 = "ba0efaa9080b619ff2f3459d1d500c57bddea4a6b424b60a91141db6fd2f08bc"}}, +] +marker = "\"dev\" in extras" + +[packages.tool.pdm] +dependencies = [] + +[[packages]] +name = "idna" +version = "3.10" +requires-python = ">=3.6" +sdist = {name = "idna-3.10.tar.gz", url = "https://files.pythonhosted.org/packages/f1/70/7703c29685631f5a7590aa73f1f1d3fa9a380e654b86af429e0934a32f7d/idna-3.10.tar.gz", hashes = {sha256 = "12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}} +wheels = [ + {name = "idna-3.10-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/76/c6/c88e154df9c4e1a2a66ccf0005a88dfb2650c1dffb6f5ce603dfbd452ce3/idna-3.10-py3-none-any.whl",hashes = {sha256 = "946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}}, +] +marker = "\"async\" in extras" + +[packages.tool.pdm] +dependencies = [] + +[[packages]] +name = "iniconfig" +version = "2.1.0" +requires-python = ">=3.8" +sdist = {name = "iniconfig-2.1.0.tar.gz", url = "https://files.pythonhosted.org/packages/f2/97/ebf4da567aa6827c909642694d71c9fcf53e5b504f2d96afea02718862f3/iniconfig-2.1.0.tar.gz", hashes = {sha256 = "3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7"}} +wheels = [ + {name = "iniconfig-2.1.0-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/2c/e1/e6716421ea10d38022b952c159d5161ca1193197fb744506875fbb87ea7b/iniconfig-2.1.0-py3-none-any.whl",hashes = {sha256 = "9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760"}}, +] +marker = "\"default\" in dependency_groups" + +[packages.tool.pdm] +dependencies = [] + +[[packages]] +name = "pygments" +version = "2.19.2" +requires-python = ">=3.8" +sdist = {name = "pygments-2.19.2.tar.gz", url = "https://files.pythonhosted.org/packages/b0/77/a5b8c569bf593b0140bde72ea885a803b82086995367bf2037de0159d924/pygments-2.19.2.tar.gz", hashes = {sha256 = "636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887"}} +wheels = [ + {name = "pygments-2.19.2-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/c7/21/705964c7812476f378728bdf590ca4b771ec72385c533964653c68e86bdc/pygments-2.19.2-py3-none-any.whl",hashes = {sha256 = "86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b"}}, +] +marker = "\"test\" in dependency_groups" + +[packages.tool.pdm] +dependencies = [] + +[[packages]] +name = "sniffio" +version = "1.3.1" +requires-python = ">=3.7" +sdist = {name = "sniffio-1.3.1.tar.gz", url = "https://files.pythonhosted.org/packages/a2/87/a6771e1546d97e7e041b6ae58d80074f81b7d5121207425c964ddf5cfdbd/sniffio-1.3.1.tar.gz", hashes = {sha256 = "f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"}} +wheels = [ + {name = "sniffio-1.3.1-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl",hashes = {sha256 = "2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"}}, +] +marker = "\"async\" in extras" + +[packages.tool.pdm] +dependencies = [] + +[tool.pdm] +hashes = {sha256 = "51795362d337720c28bd6c3a26eb33751f2b69590261f599ffb4172ee2c441c6"} + +[[tool.pdm.targets]] +requires_python = "==3.13.*" + "#, + )?; + + // By default, only `iniconfig` should be installed, since it's in the default group. + uv_snapshot!(context.filters(), context.pip_install() + .arg("--preview") + .arg("-r") + .arg("pylock.toml"), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Prepared 1 package in [TIME] + Installed 1 package in [TIME] + + iniconfig==2.1.0 + " + ); + + // With `--extra async`, `anyio` should be installed. + uv_snapshot!(context.filters(), context.pip_install() + .arg("--preview") + .arg("-r") + .arg("pylock.toml") + .arg("--extra") + .arg("async"), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Prepared 3 packages in [TIME] + Installed 3 packages in [TIME] + + anyio==4.9.0 + + idna==3.10 + + sniffio==1.3.1 + " + ); + + // With `--group test`, `pygments` should be installed. + uv_snapshot!(context.filters(), context.pip_install() + .arg("--preview") + .arg("-r") + .arg("pylock.toml") + .arg("--group") + .arg("test"), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Prepared 1 package in [TIME] + Installed 1 package in [TIME] + + pygments==2.19.2 + " + ); + + // With `--all-extras`, `blinker` should be installed. + uv_snapshot!(context.filters(), context.pip_install() + .arg("--preview") + .arg("-r") + .arg("pylock.toml") + .arg("--all-extras"), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Prepared 1 package in [TIME] + Installed 1 package in [TIME] + + blinker==1.9.0 + " + ); + + // `--group pylock.toml:test` should be rejeceted. + uv_snapshot!(context.filters(), context.pip_install() + .arg("--preview") + .arg("-r") + .arg("pylock.toml") + .arg("--group") + .arg("pylock.toml:test"), @r" + success: false + exit_code: 2 + ----- stdout ----- + + ----- stderr ----- + error: invalid value 'pylock.toml:test' for '--group ': The `--group` path is required to end in 'pyproject.toml' for compatibility with pip; got: pylock.toml + + For more information, try '--help'. + " + ); + + Ok(()) +} + /// Test that uv doesn't hang if an index returns a distribution for the wrong package. #[tokio::test] async fn bogus_redirect() -> Result<()> { diff --git a/docs/reference/cli.md b/docs/reference/cli.md index 2ca95dce0..409ef5911 100644 --- a/docs/reference/cli.md +++ b/docs/reference/cli.md @@ -3637,7 +3637,9 @@ uv pip sync [OPTIONS] ...

    Options

    -
    --allow-empty-requirements

    Allow sync of empty requirements, which will clear the environment of all packages

    +
    --all-extras

    Include all optional dependencies.

    +

    Only applies to pylock.toml, pyproject.toml, setup.py, and setup.cfg sources.

    +
    --allow-empty-requirements

    Allow sync of empty requirements, which will clear the environment of all packages

    --allow-insecure-host, --trusted-host allow-insecure-host

    Allow insecure connections to a host.

    Can be provided multiple times.

    Expects to receive either a hostname (e.g., localhost), a host-port pair (e.g., localhost:8080), or a URL (e.g., https://localhost).

    @@ -3675,13 +3677,18 @@ uv pip sync [OPTIONS] ...
    --dry-run

    Perform a dry run, i.e., don't actually install anything but resolve the dependencies and print the resulting plan

    --exclude-newer exclude-newer

    Limit candidate packages to those that were uploaded prior to the given date.

    Accepts both RFC 3339 timestamps (e.g., 2006-12-02T02:07:43Z) and local dates in the same format (e.g., 2006-12-02) in your system's configured time zone.

    -

    May also be set with the UV_EXCLUDE_NEWER environment variable.

    --extra-index-url extra-index-url

    (Deprecated: use --index instead) Extra URLs of package indexes to use, in addition to --index-url.

    +

    May also be set with the UV_EXCLUDE_NEWER environment variable.

    --extra extra

    Include optional dependencies from the specified extra name; may be provided more than once.

    +

    Only applies to pylock.toml, pyproject.toml, setup.py, and setup.cfg sources.

    +
    --extra-index-url extra-index-url

    (Deprecated: use --index instead) Extra URLs of package indexes to use, in addition to --index-url.

    Accepts either a repository compliant with PEP 503 (the simple repository API), or a local directory laid out in the same format.

    All indexes provided via this flag take priority over the index specified by --index-url (which defaults to PyPI). When multiple --extra-index-url flags are provided, earlier values take priority.

    May also be set with the UV_EXTRA_INDEX_URL environment variable.

    Locations to search for candidate distributions, in addition to those found in the registry indexes.

    If a path, the target must be a directory that contains packages as wheel files (.whl) or source distributions (e.g., .tar.gz or .zip) at the top level.

    If a URL, the page must contain a flat list of links to package files adhering to the formats described above.

    -

    May also be set with the UV_FIND_LINKS environment variable.

    --help, -h

    Display the concise help for this command

    +

    May also be set with the UV_FIND_LINKS environment variable.

    --group group

    Install the specified dependency group from a pylock.toml or pyproject.toml.

    +

    If no path is provided, the pylock.toml or pyproject.toml in the working directory is used.

    +

    May be provided multiple times.

    +
    --help, -h

    Display the concise help for this command

    --index index

    The URLs to use when resolving dependencies, in addition to the default index.

    Accepts either a repository compliant with PEP 503 (the simple repository API), or a local directory laid out in the same format.

    All indexes provided via this flag take priority over the index specified by --default-index (which defaults to PyPI). When multiple --index flags are provided, earlier values take priority.

    @@ -3888,7 +3895,7 @@ uv pip install [OPTIONS] |--editable Options
    --all-extras

    Include all optional dependencies.

    -

    Only applies to pyproject.toml, setup.py, and setup.cfg sources.

    +

    Only applies to pylock.toml, pyproject.toml, setup.py, and setup.cfg sources.

    --allow-insecure-host, --trusted-host allow-insecure-host

    Allow insecure connections to a host.

    Can be provided multiple times.

    Expects to receive either a hostname (e.g., localhost), a host-port pair (e.g., localhost:8080), or a URL (e.g., https://localhost).

    @@ -3930,7 +3937,7 @@ uv pip install [OPTIONS] |--editable
    --exclude-newer exclude-newer

    Limit candidate packages to those that were uploaded prior to the given date.

    Accepts both RFC 3339 timestamps (e.g., 2006-12-02T02:07:43Z) and local dates in the same format (e.g., 2006-12-02) in your system's configured time zone.

    May also be set with the UV_EXCLUDE_NEWER environment variable.

    --extra extra

    Include optional dependencies from the specified extra name; may be provided more than once.

    -

    Only applies to pyproject.toml, setup.py, and setup.cfg sources.

    +

    Only applies to pylock.toml, pyproject.toml, setup.py, and setup.cfg sources.

    --extra-index-url extra-index-url

    (Deprecated: use --index instead) Extra URLs of package indexes to use, in addition to --index-url.

    Accepts either a repository compliant with PEP 503 (the simple repository API), or a local directory laid out in the same format.

    All indexes provided via this flag take priority over the index specified by --index-url (which defaults to PyPI). When multiple --extra-index-url flags are provided, earlier values take priority.

    @@ -3944,8 +3951,8 @@ uv pip install [OPTIONS] |--editable
  • fewest: Optimize for selecting the fewest number of versions for each package. Older versions may be preferred if they are compatible with a wider range of supported Python versions or platforms
  • requires-python: Optimize for selecting latest supported version of each package, for each supported Python version
  • -
    --group group

    Install the specified dependency group from a pyproject.toml.

    -

    If no path is provided, the pyproject.toml in the working directory is used.

    +
    --group group

    Install the specified dependency group from a pylock.toml or pyproject.toml.

    +

    If no path is provided, the pylock.toml or pyproject.toml in the working directory is used.

    May be provided multiple times.

    --help, -h

    Display the concise help for this command

    --index index

    The URLs to use when resolving dependencies, in addition to the default index.

    From ba1319450a145cbe92751243e93995ff0f4a0bb8 Mon Sep 17 00:00:00 2001 From: Ibraheem Ahmed Date: Mon, 21 Jul 2025 09:18:16 -0400 Subject: [PATCH 118/130] Update `toml` to v0.9 (#14571) ## Summary This should give us some performance and error message improvements. --------- Co-authored-by: Charlie Marsh Co-authored-by: Zanie Blue --- Cargo.lock | 44 ++++++++++++------- Cargo.toml | 4 +- crates/uv-build-frontend/src/lib.rs | 14 +++--- .../src/metadata/requires_dist.rs | 11 +++-- .../src/metadata/pyproject_toml.rs | 4 +- crates/uv-workspace/src/pyproject.rs | 7 +-- crates/uv/tests/it/edit.rs | 14 +++--- crates/uv/tests/it/lock.rs | 13 +++--- crates/uv/tests/it/pip_install.rs | 9 ++-- crates/uv/tests/it/show_settings.rs | 7 ++- crates/uv/tests/it/venv.rs | 8 ++-- 11 files changed, 72 insertions(+), 63 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 7b73f29d7..e8f91b076 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1252,9 +1252,9 @@ checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" [[package]] name = "foldhash" -version = "0.1.4" +version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a0d2fde1f7b3d48b8395d5f2de76c18a528bd6a9cdde438df747bfcba3e05d6f" +checksum = "d9c4f5dac5e15c24eb999c26181a6ca40b39fe946cbe4c263c7209467bc83af2" [[package]] name = "fontconfig-parser" @@ -3596,9 +3596,9 @@ dependencies = [ [[package]] name = "serde_spanned" -version = "0.6.9" +version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bf41e0cfaf7226dca15e8197172c295a782857fcb97fad1808a166870dee75a3" +checksum = "40734c41988f7306bb04f0ecf60ec0f3f1caa34290e4e8ea471dcd3346483b83" dependencies = [ "serde", ] @@ -4218,44 +4218,58 @@ dependencies = [ [[package]] name = "toml" -version = "0.8.23" +version = "0.9.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dc1beb996b9d83529a9e75c17a1686767d148d70663143c7854d8b4a09ced362" +checksum = "ed0aee96c12fa71097902e0bb061a5e1ebd766a6636bb605ba401c45c1650eac" dependencies = [ + "foldhash", + "indexmap", "serde", "serde_spanned", "toml_datetime", - "toml_edit", + "toml_parser", + "toml_writer", + "winnow", ] [[package]] name = "toml_datetime" -version = "0.6.11" +version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "22cddaf88f4fbc13c51aebbf5f8eceb5c7c5a9da2ac40a13519eb5b0a0e8f11c" +checksum = "bade1c3e902f58d73d3f294cd7f20391c1cb2fbcb643b73566bc773971df91e3" dependencies = [ "serde", ] [[package]] name = "toml_edit" -version = "0.22.27" +version = "0.23.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "41fe8c660ae4257887cf66394862d21dbca4a6ddd26f04a3560410406a2f819a" +checksum = "d1dee9dc43ac2aaf7d3b774e2fba5148212bf2bd9374f4e50152ebe9afd03d42" dependencies = [ "indexmap", "serde", "serde_spanned", "toml_datetime", - "toml_write", + "toml_parser", + "toml_writer", "winnow", ] [[package]] -name = "toml_write" -version = "0.1.2" +name = "toml_parser" +version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5d99f8c9a7727884afe522e9bd5edbfc91a3312b36a77b5fb8926e4c31a41801" +checksum = "97200572db069e74c512a14117b296ba0a80a30123fbbb5aa1f4a348f639ca30" +dependencies = [ + "winnow", +] + +[[package]] +name = "toml_writer" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fcc842091f2def52017664b53082ecbbeb5c7731092bad69d2c63050401dfd64" [[package]] name = "tower" diff --git a/Cargo.toml b/Cargo.toml index 2c32ce8d0..7c858a81d 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -172,8 +172,8 @@ tl = { git = "https://github.com/astral-sh/tl.git", rev = "6e25b2ee2513d75385101 tokio = { version = "1.40.0", features = ["fs", "io-util", "macros", "process", "rt", "signal", "sync"] } tokio-stream = { version = "0.1.16" } tokio-util = { version = "0.7.12", features = ["compat", "io"] } -toml = { version = "0.8.19" } -toml_edit = { version = "0.22.21", features = ["serde"] } +toml = { version = "0.9.2", features = ["fast_hash"] } +toml_edit = { version = "0.23.2", features = ["serde"] } tracing = { version = "0.1.40" } tracing-durations-export = { version = "0.3.0", features = ["plot"] } tracing-subscriber = { version = "0.3.18", features = ["env-filter", "json", "registry"] } diff --git a/crates/uv-build-frontend/src/lib.rs b/crates/uv-build-frontend/src/lib.rs index 67bee9619..e2a128747 100644 --- a/crates/uv-build-frontend/src/lib.rs +++ b/crates/uv-build-frontend/src/lib.rs @@ -19,8 +19,8 @@ use fs_err as fs; use indoc::formatdoc; use itertools::Itertools; use rustc_hash::FxHashMap; -use serde::de::{IntoDeserializer, SeqAccess, Visitor, value}; -use serde::{Deserialize, Deserializer, de}; +use serde::de::{self, IntoDeserializer, SeqAccess, Visitor, value}; +use serde::{Deserialize, Deserializer}; use tempfile::TempDir; use tokio::io::AsyncBufReadExt; use tokio::process::Command; @@ -511,12 +511,10 @@ impl SourceBuild { ) -> Result<(Pep517Backend, Option), Box> { match fs::read_to_string(source_tree.join("pyproject.toml")) { Ok(toml) => { - let pyproject_toml: toml_edit::ImDocument<_> = - toml_edit::ImDocument::from_str(&toml) - .map_err(Error::InvalidPyprojectTomlSyntax)?; - let pyproject_toml: PyProjectToml = - PyProjectToml::deserialize(pyproject_toml.into_deserializer()) - .map_err(Error::InvalidPyprojectTomlSchema)?; + let pyproject_toml = toml_edit::Document::from_str(&toml) + .map_err(Error::InvalidPyprojectTomlSyntax)?; + let pyproject_toml = PyProjectToml::deserialize(pyproject_toml.into_deserializer()) + .map_err(Error::InvalidPyprojectTomlSchema)?; let backend = if let Some(build_system) = pyproject_toml.build_system { // If necessary, lower the requirements. diff --git a/crates/uv-distribution/src/metadata/requires_dist.rs b/crates/uv-distribution/src/metadata/requires_dist.rs index e9f36f174..a5645c126 100644 --- a/crates/uv-distribution/src/metadata/requires_dist.rs +++ b/crates/uv-distribution/src/metadata/requires_dist.rs @@ -618,14 +618,13 @@ mod test { tqdm = { url = invalid url to tqdm-4.66.0-py3-none-any.whl" } "#}; - assert_snapshot!(format_err(input).await, @r###" - error: TOML parse error at line 8, column 16 + assert_snapshot!(format_err(input).await, @r#" + error: TOML parse error at line 8, column 28 | 8 | tqdm = { url = invalid url to tqdm-4.66.0-py3-none-any.whl" } - | ^ - invalid string - expected `"`, `'` - "###); + | ^ + missing comma between key-value pairs, expected `,` + "#); } #[tokio::test] diff --git a/crates/uv-pypi-types/src/metadata/pyproject_toml.rs b/crates/uv-pypi-types/src/metadata/pyproject_toml.rs index 113021a34..8487f058a 100644 --- a/crates/uv-pypi-types/src/metadata/pyproject_toml.rs +++ b/crates/uv-pypi-types/src/metadata/pyproject_toml.rs @@ -19,9 +19,9 @@ pub struct PyProjectToml { impl PyProjectToml { pub fn from_toml(toml: &str) -> Result { - let pyproject_toml: toml_edit::ImDocument<_> = toml_edit::ImDocument::from_str(toml) + let pyproject_toml = toml_edit::Document::from_str(toml) .map_err(MetadataError::InvalidPyprojectTomlSyntax)?; - let pyproject_toml: Self = PyProjectToml::deserialize(pyproject_toml.into_deserializer()) + let pyproject_toml = PyProjectToml::deserialize(pyproject_toml.into_deserializer()) .map_err(MetadataError::InvalidPyprojectTomlSchema)?; Ok(pyproject_toml) } diff --git a/crates/uv-workspace/src/pyproject.rs b/crates/uv-workspace/src/pyproject.rs index 4a994b801..b02dadc5d 100644 --- a/crates/uv-workspace/src/pyproject.rs +++ b/crates/uv-workspace/src/pyproject.rs @@ -17,7 +17,8 @@ use std::str::FromStr; use glob::Pattern; use owo_colors::OwoColorize; use rustc_hash::{FxBuildHasher, FxHashSet}; -use serde::{Deserialize, Deserializer, Serialize, de::IntoDeserializer, de::SeqAccess}; +use serde::de::{IntoDeserializer, SeqAccess}; +use serde::{Deserialize, Deserializer, Serialize}; use thiserror::Error; use uv_build_backend::BuildBackendSettings; use uv_distribution_types::{Index, IndexName, RequirementSource}; @@ -72,8 +73,8 @@ pub struct PyProjectToml { impl PyProjectToml { /// Parse a `PyProjectToml` from a raw TOML string. pub fn from_string(raw: String) -> Result { - let pyproject: toml_edit::ImDocument<_> = - toml_edit::ImDocument::from_str(&raw).map_err(PyprojectTomlError::TomlSyntax)?; + let pyproject = + toml_edit::Document::from_str(&raw).map_err(PyprojectTomlError::TomlSyntax)?; let pyproject = PyProjectToml::deserialize(pyproject.into_deserializer()) .map_err(PyprojectTomlError::TomlSchema)?; Ok(PyProjectToml { raw, ..pyproject }) diff --git a/crates/uv/tests/it/edit.rs b/crates/uv/tests/it/edit.rs index aa494435c..a7d11091b 100644 --- a/crates/uv/tests/it/edit.rs +++ b/crates/uv/tests/it/edit.rs @@ -3568,7 +3568,7 @@ fn add_update_git_reference_script() -> Result<()> { filters => context.filters(), }, { assert_snapshot!( - script_content, @r###" + script_content, @r##" # /// script # requires-python = ">=3.11" # dependencies = [ @@ -3581,7 +3581,7 @@ fn add_update_git_reference_script() -> Result<()> { import time time.sleep(5) - "### + "## ); }); @@ -3601,7 +3601,7 @@ fn add_update_git_reference_script() -> Result<()> { filters => context.filters(), }, { assert_snapshot!( - script_content, @r###" + script_content, @r##" # /// script # requires-python = ">=3.11" # dependencies = [ @@ -3614,7 +3614,7 @@ fn add_update_git_reference_script() -> Result<()> { import time time.sleep(5) - "### + "## ); }); @@ -10896,7 +10896,7 @@ fn add_preserves_empty_comment() -> Result<()> { filters => context.filters(), }, { assert_snapshot!( - pyproject_toml, @r###" + pyproject_toml, @r#" [project] name = "project" version = "0.1.0" @@ -10906,7 +10906,7 @@ fn add_preserves_empty_comment() -> Result<()> { # Second line. "anyio==3.7.0", ] - "### + "# ); }); @@ -13189,7 +13189,7 @@ fn add_path_with_existing_workspace() -> Result<()> { [tool.uv.workspace] members = [ "project", - "dep", + "dep", ] "# ); diff --git a/crates/uv/tests/it/lock.rs b/crates/uv/tests/it/lock.rs index ff9b711b7..0962ff6d2 100644 --- a/crates/uv/tests/it/lock.rs +++ b/crates/uv/tests/it/lock.rs @@ -18868,7 +18868,7 @@ fn lock_duplicate_sources() -> Result<()> { "#, )?; - uv_snapshot!(context.filters(), context.lock(), @r###" + uv_snapshot!(context.filters(), context.lock(), @r#" success: false exit_code: 2 ----- stdout ----- @@ -18878,17 +18878,16 @@ fn lock_duplicate_sources() -> Result<()> { TOML parse error at line 9, column 9 | 9 | python-multipart = { url = "https://files.pythonhosted.org/packages/c0/3e/9fbfd74e7f5b54f653f7ca99d44ceb56e718846920162165061c4c22b71a/python_multipart-0.0.8-py3-none-any.whl" } - | ^ - duplicate key `python-multipart` in table `tool.uv.sources` + | ^^^^^^^^^^^^^^^^ + duplicate key error: Failed to parse: `pyproject.toml` Caused by: TOML parse error at line 9, column 9 | 9 | python-multipart = { url = "https://files.pythonhosted.org/packages/c0/3e/9fbfd74e7f5b54f653f7ca99d44ceb56e718846920162165061c4c22b71a/python_multipart-0.0.8-py3-none-any.whl" } - | ^ - duplicate key `python-multipart` in table `tool.uv.sources` - - "###); + | ^^^^^^^^^^^^^^^^ + duplicate key + "#); let pyproject_toml = context.temp_dir.child("pyproject.toml"); pyproject_toml.write_str( diff --git a/crates/uv/tests/it/pip_install.rs b/crates/uv/tests/it/pip_install.rs index e1d48b86d..de62c4222 100644 --- a/crates/uv/tests/it/pip_install.rs +++ b/crates/uv/tests/it/pip_install.rs @@ -118,7 +118,7 @@ fn invalid_pyproject_toml_syntax() -> Result<()> { uv_snapshot!(context.pip_install() .arg("-r") - .arg("pyproject.toml"), @r###" + .arg("pyproject.toml"), @r" success: false exit_code: 2 ----- stdout ----- @@ -129,16 +129,15 @@ fn invalid_pyproject_toml_syntax() -> Result<()> { | 1 | 123 - 456 | ^ - expected `.`, `=` + key with no value, expected `=` error: Failed to parse: `pyproject.toml` Caused by: TOML parse error at line 1, column 5 | 1 | 123 - 456 | ^ - expected `.`, `=` - - "### + key with no value, expected `=` + " ); Ok(()) diff --git a/crates/uv/tests/it/show_settings.rs b/crates/uv/tests/it/show_settings.rs index 293b437d6..bbcddd2b1 100644 --- a/crates/uv/tests/it/show_settings.rs +++ b/crates/uv/tests/it/show_settings.rs @@ -4323,7 +4323,7 @@ fn resolve_config_file() -> anyhow::Result<()> { .arg("--show-settings") .arg("--config-file") .arg(config.path()) - .arg("requirements.in"), @r###" + .arg("requirements.in"), @r#" success: false exit_code: 2 ----- stdout ----- @@ -4335,9 +4335,8 @@ fn resolve_config_file() -> anyhow::Result<()> { | 9 | "" | ^ - expected `.`, `=` - - "### + key with no value, expected `=` + "# ); Ok(()) diff --git a/crates/uv/tests/it/venv.rs b/crates/uv/tests/it/venv.rs index 120d7def2..726d1731b 100644 --- a/crates/uv/tests/it/venv.rs +++ b/crates/uv/tests/it/venv.rs @@ -718,7 +718,7 @@ fn create_venv_warns_user_on_requires_python_discovery_error() -> Result<()> { let pyproject_toml = context.temp_dir.child("pyproject.toml"); pyproject_toml.write_str(indoc! { r"invalid toml" })?; - uv_snapshot!(context.filters(), context.venv(), @r###" + uv_snapshot!(context.filters(), context.venv(), @r" success: true exit_code: 0 ----- stdout ----- @@ -729,19 +729,19 @@ fn create_venv_warns_user_on_requires_python_discovery_error() -> Result<()> { | 1 | invalid toml | ^ - expected `.`, `=` + key with no value, expected `=` warning: Failed to parse `pyproject.toml` during environment creation: TOML parse error at line 1, column 9 | 1 | invalid toml | ^ - expected `.`, `=` + key with no value, expected `=` Using CPython 3.12.[X] interpreter at: [PYTHON-3.12] Creating virtual environment at: .venv Activate with: source .venv/[BIN]/activate - "### + " ); context.venv.assert(predicates::path::is_dir()); From d768dedff674de659ae8c7fc3d03faf41a13874d Mon Sep 17 00:00:00 2001 From: Copilot <198982749+Copilot@users.noreply.github.com> Date: Mon, 21 Jul 2025 14:17:06 +0000 Subject: [PATCH 119/130] Remove `version_get_fallback_unmanaged_json` test (#14786) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit The `version_get_fallback_unmanaged_json` test was failing when running tests outside of a git checkout (e.g., from a release tarball) due to inconsistent behavior based on git availability. The test had conditional logic that expected different outcomes depending on whether `git_version_info_expected()` returned true or false: - In git checkouts: Expected failure with "The project is marked as unmanaged" error - Outside git checkouts: Expected success with fallback behavior showing version info However, the fallback behavior was removed in version 0.8.0, making this test obsolete. All other similar tests (`version_get_fallback_unmanaged`, `version_get_fallback_unmanaged_short`, `version_get_fallback_unmanaged_strict`) consistently expect failure when a project is marked as unmanaged, regardless of git availability. This change removes the problematic test entirely, as suggested by @zanieb. All remaining version tests (51 total) continue to pass. Fixes #14785. --- 💡 You can make Copilot smarter by setting up custom instructions, customizing its development environment and configuring Model Context Protocol (MCP) servers. Learn more [Copilot coding agent tips](https://gh.io/copilot-coding-agent-tips) in the docs. --------- Co-authored-by: copilot-swe-agent[bot] <198982749+Copilot@users.noreply.github.com> Co-authored-by: zanieb <2586601+zanieb@users.noreply.github.com> --- crates/uv/tests/it/version.rs | 80 ----------------------------------- 1 file changed, 80 deletions(-) diff --git a/crates/uv/tests/it/version.rs b/crates/uv/tests/it/version.rs index 78dd64252..e5f6e1687 100644 --- a/crates/uv/tests/it/version.rs +++ b/crates/uv/tests/it/version.rs @@ -1545,86 +1545,6 @@ fn git_version_info_expected() -> bool { git_dir.exists() } -// version_get_fallback with `--json` -#[test] -fn version_get_fallback_unmanaged_json() -> Result<()> { - let context = TestContext::new("3.12"); - - let pyproject_toml = context.temp_dir.child("pyproject.toml"); - pyproject_toml.write_str( - r#" - [project] - name = "myapp" - version = "0.1.2" - - [tool.uv] - managed = false - "#, - )?; - - let filters = context - .filters() - .into_iter() - .chain([ - ( - r#"version": "\d+\.\d+\.\d+(-(alpha|beta|rc)\.\d+)?(\+\d+)?""#, - r#"version": "[VERSION]""#, - ), - ( - r#"short_commit_hash": ".*""#, - r#"short_commit_hash": "[HASH]""#, - ), - (r#"commit_hash": ".*""#, r#"commit_hash": "[LONGHASH]""#), - (r#"commit_date": ".*""#, r#"commit_date": "[DATE]""#), - (r#"last_tag": (".*"|null)"#, r#"last_tag": "[TAG]""#), - ( - r#"commits_since_last_tag": .*"#, - r#"commits_since_last_tag": [COUNT]"#, - ), - ]) - .collect::>(); - if git_version_info_expected() { - uv_snapshot!(filters, context.version() - .arg("--output-format").arg("json"), @r" - success: false - exit_code: 2 - ----- stdout ----- - - ----- stderr ----- - error: The project is marked as unmanaged: `[TEMP_DIR]/` - "); - } else { - uv_snapshot!(filters, context.version() - .arg("--output-format").arg("json"), @r#" - success: true - exit_code: 0 - ----- stdout ----- - { - "package_name": "uv", - "version": "[VERSION]", - "commit_info": null - } - - ----- stderr ----- - warning: Failed to read project metadata (The project is marked as unmanaged: `[TEMP_DIR]/`). Running `uv self version` for compatibility. This fallback will be removed in the future; pass `--preview` to force an error. - "#); - } - - let pyproject = fs_err::read_to_string(&pyproject_toml)?; - assert_snapshot!( - pyproject, - @r#" - [project] - name = "myapp" - version = "0.1.2" - - [tool.uv] - managed = false - "# - ); - Ok(()) -} - // Should error if this pyproject.toml isn't usable for whatever reason // and --project was passed explicitly. #[test] From aafeda2253369803bc315f0d894e7b58f83c007d Mon Sep 17 00:00:00 2001 From: Charlie Marsh Date: Mon, 21 Jul 2025 10:37:14 -0400 Subject: [PATCH 120/130] Enforce `requires-python` in `pylock.toml` (#14787) ## Summary Turns out we weren't validating this at install-time. --- .../src/lock/export/pylock_toml.rs | 2 +- crates/uv/src/commands/pip/install.rs | 11 +++++ crates/uv/src/commands/pip/sync.rs | 11 +++++ crates/uv/tests/it/pip_install.rs | 45 +++++++++++++++++++ 4 files changed, 68 insertions(+), 1 deletion(-) diff --git a/crates/uv-resolver/src/lock/export/pylock_toml.rs b/crates/uv-resolver/src/lock/export/pylock_toml.rs index ef3ad8615..642b9488a 100644 --- a/crates/uv-resolver/src/lock/export/pylock_toml.rs +++ b/crates/uv-resolver/src/lock/export/pylock_toml.rs @@ -186,7 +186,7 @@ pub struct PylockToml { lock_version: Version, created_by: String, #[serde(skip_serializing_if = "Option::is_none")] - requires_python: Option, + pub requires_python: Option, #[serde(skip_serializing_if = "Vec::is_empty", default)] pub extras: Vec, #[serde(skip_serializing_if = "Vec::is_empty", default)] diff --git a/crates/uv/src/commands/pip/install.rs b/crates/uv/src/commands/pip/install.rs index 72c532d7b..cb1229d72 100644 --- a/crates/uv/src/commands/pip/install.rs +++ b/crates/uv/src/commands/pip/install.rs @@ -444,6 +444,17 @@ pub(crate) async fn pip_install( format!("Not a valid `pylock.toml` file: {}", pylock.user_display()) })?; + // Verify that the Python version is compatible with the lock file. + if let Some(requires_python) = lock.requires_python.as_ref() { + if !requires_python.contains(interpreter.python_version()) { + return Err(anyhow::anyhow!( + "The requested interpreter resolved to Python {}, which is incompatible with the `pylock.toml`'s Python requirement: `{}`", + interpreter.python_version(), + requires_python, + )); + } + } + // Convert the extras and groups specifications into a concrete form. let extras = extras.with_defaults(DefaultExtras::default()); let extras = extras diff --git a/crates/uv/src/commands/pip/sync.rs b/crates/uv/src/commands/pip/sync.rs index 2f46ef502..2fe5fbe87 100644 --- a/crates/uv/src/commands/pip/sync.rs +++ b/crates/uv/src/commands/pip/sync.rs @@ -382,6 +382,17 @@ pub(crate) async fn pip_sync( format!("Not a valid `pylock.toml` file: {}", pylock.user_display()) })?; + // Verify that the Python version is compatible with the lock file. + if let Some(requires_python) = lock.requires_python.as_ref() { + if !requires_python.contains(interpreter.python_version()) { + return Err(anyhow::anyhow!( + "The requested interpreter resolved to Python {}, which is incompatible with the `pylock.toml`'s Python requirement: `{}`", + interpreter.python_version(), + requires_python, + )); + } + } + // Convert the extras and groups specifications into a concrete form. let extras = extras.with_defaults(DefaultExtras::default()); let extras = extras diff --git a/crates/uv/tests/it/pip_install.rs b/crates/uv/tests/it/pip_install.rs index de62c4222..936f77aff 100644 --- a/crates/uv/tests/it/pip_install.rs +++ b/crates/uv/tests/it/pip_install.rs @@ -11590,6 +11590,51 @@ requires_python = "==3.13.*" Ok(()) } +#[test] +fn pep_751_requires_python() -> Result<()> { + let context = TestContext::new_with_versions(&["3.12", "3.13"]); + + let pyproject_toml = context.temp_dir.child("pyproject.toml"); + pyproject_toml.write_str( + r#" + [project] + name = "project" + version = "0.1.0" + requires-python = ">=3.13" + dependencies = ["iniconfig"] + "#, + )?; + + context + .export() + .arg("-o") + .arg("pylock.toml") + .assert() + .success(); + + context + .venv() + .arg("--python") + .arg("3.12") + .assert() + .success(); + + uv_snapshot!(context.filters(), context.pip_install() + .arg("--preview") + .arg("-r") + .arg("pylock.toml"), @r" + success: false + exit_code: 2 + ----- stdout ----- + + ----- stderr ----- + error: The requested interpreter resolved to Python 3.12.[X], which is incompatible with the `pylock.toml`'s Python requirement: `>=3.13` + " + ); + + Ok(()) +} + /// Test that uv doesn't hang if an index returns a distribution for the wrong package. #[tokio::test] async fn bogus_redirect() -> Result<()> { From 80708dea6e09c998204c61bb09230ac7beda8210 Mon Sep 17 00:00:00 2001 From: Charlie Marsh Date: Mon, 21 Jul 2025 10:48:52 -0400 Subject: [PATCH 121/130] Use a match for Windows executables in `venv` (#14766) ## Summary I found it confusing that the `else` case for `== "graalpy"` is still necessary for the `== "pypy"` branch (i.e., that `pythonw.exe` is copied for PyPy despite not being in the `== "pypy"` branch). Instead, we now use a match for PyP, GraalPy, and then everything else. --- crates/uv-virtualenv/src/virtualenv.rs | 163 +++++++++++++------------ 1 file changed, 88 insertions(+), 75 deletions(-) diff --git a/crates/uv-virtualenv/src/virtualenv.rs b/crates/uv-virtualenv/src/virtualenv.rs index fe464c04a..822e8d7bb 100644 --- a/crates/uv-virtualenv/src/virtualenv.rs +++ b/crates/uv-virtualenv/src/virtualenv.rs @@ -268,6 +268,7 @@ pub(crate) fn create( create_link_to_executable(targetw.as_path(), executable_target) .map_err(Error::Python)?; } else { + // Always copy `python.exe`. copy_launcher_windows( WindowsExecutable::Python, interpreter, @@ -276,81 +277,93 @@ pub(crate) fn create( python_home, )?; - if interpreter.markers().implementation_name() == "graalpy" { - copy_launcher_windows( - WindowsExecutable::GraalPy, - interpreter, - &base_python, - &scripts, - python_home, - )?; - copy_launcher_windows( - WindowsExecutable::PythonMajor, - interpreter, - &base_python, - &scripts, - python_home, - )?; - } else { - copy_launcher_windows( - WindowsExecutable::Pythonw, - interpreter, - &base_python, - &scripts, - python_home, - )?; - } - - if interpreter.markers().implementation_name() == "pypy" { - copy_launcher_windows( - WindowsExecutable::PythonMajor, - interpreter, - &base_python, - &scripts, - python_home, - )?; - copy_launcher_windows( - WindowsExecutable::PythonMajorMinor, - interpreter, - &base_python, - &scripts, - python_home, - )?; - copy_launcher_windows( - WindowsExecutable::PyPy, - interpreter, - &base_python, - &scripts, - python_home, - )?; - copy_launcher_windows( - WindowsExecutable::PyPyMajor, - interpreter, - &base_python, - &scripts, - python_home, - )?; - copy_launcher_windows( - WindowsExecutable::PyPyMajorMinor, - interpreter, - &base_python, - &scripts, - python_home, - )?; - copy_launcher_windows( - WindowsExecutable::PyPyw, - interpreter, - &base_python, - &scripts, - python_home, - )?; - copy_launcher_windows( - WindowsExecutable::PyPyMajorMinorw, - interpreter, - &base_python, - &scripts, - python_home, - )?; + match interpreter.implementation_name() { + "graalpy" => { + // For GraalPy, copy `graalpy.exe` and `python3.exe`. + copy_launcher_windows( + WindowsExecutable::GraalPy, + interpreter, + &base_python, + &scripts, + python_home, + )?; + copy_launcher_windows( + WindowsExecutable::PythonMajor, + interpreter, + &base_python, + &scripts, + python_home, + )?; + } + "pypy" => { + // For PyPy, copy all versioned executables and all PyPy-specific executables. + copy_launcher_windows( + WindowsExecutable::PythonMajor, + interpreter, + &base_python, + &scripts, + python_home, + )?; + copy_launcher_windows( + WindowsExecutable::PythonMajorMinor, + interpreter, + &base_python, + &scripts, + python_home, + )?; + copy_launcher_windows( + WindowsExecutable::Pythonw, + interpreter, + &base_python, + &scripts, + python_home, + )?; + copy_launcher_windows( + WindowsExecutable::PyPy, + interpreter, + &base_python, + &scripts, + python_home, + )?; + copy_launcher_windows( + WindowsExecutable::PyPyMajor, + interpreter, + &base_python, + &scripts, + python_home, + )?; + copy_launcher_windows( + WindowsExecutable::PyPyMajorMinor, + interpreter, + &base_python, + &scripts, + python_home, + )?; + copy_launcher_windows( + WindowsExecutable::PyPyw, + interpreter, + &base_python, + &scripts, + python_home, + )?; + copy_launcher_windows( + WindowsExecutable::PyPyMajorMinorw, + interpreter, + &base_python, + &scripts, + python_home, + )?; + } + _ => { + // For all other interpreters, copy `pythonw.exe`. + copy_launcher_windows( + WindowsExecutable::Pythonw, + interpreter, + &base_python, + &scripts, + python_home, + )?; + } } } } From d052427c374c7fe42e65b4e56fd61a6febc79453 Mon Sep 17 00:00:00 2001 From: Charlie Marsh Date: Mon, 21 Jul 2025 11:53:28 -0400 Subject: [PATCH 122/130] Accept `&Path` when creating executable links (#14791) ## Summary I don't see a great reason for this to take an owned value. It only needs an owned value for error cases. --- crates/uv-python/src/managed.rs | 12 ++++++------ crates/uv-virtualenv/src/virtualenv.rs | 4 ++-- crates/uv/src/commands/python/install.rs | 6 +++--- 3 files changed, 11 insertions(+), 11 deletions(-) diff --git a/crates/uv-python/src/managed.rs b/crates/uv-python/src/managed.rs index ad1dacac6..9ee72adda 100644 --- a/crates/uv-python/src/managed.rs +++ b/crates/uv-python/src/managed.rs @@ -847,7 +847,7 @@ fn executable_path_from_base( /// Create a link to a managed Python executable. /// /// If the file already exists at the link path, an error will be returned. -pub fn create_link_to_executable(link: &Path, executable: PathBuf) -> Result<(), Error> { +pub fn create_link_to_executable(link: &Path, executable: &Path) -> Result<(), Error> { let link_parent = link.parent().ok_or(Error::NoExecutableDirectory)?; fs_err::create_dir_all(link_parent).map_err(|err| Error::ExecutableDirectory { to: link_parent.to_path_buf(), @@ -856,20 +856,20 @@ pub fn create_link_to_executable(link: &Path, executable: PathBuf) -> Result<(), if cfg!(unix) { // Note this will never copy on Unix — we use it here to allow compilation on Windows - match symlink_or_copy_file(&executable, link) { + match symlink_or_copy_file(executable, link) { Ok(()) => Ok(()), Err(err) if err.kind() == io::ErrorKind::NotFound => { - Err(Error::MissingExecutable(executable.clone())) + Err(Error::MissingExecutable(executable.to_path_buf())) } Err(err) => Err(Error::LinkExecutable { - from: executable, + from: executable.to_path_buf(), to: link.to_path_buf(), err, }), } } else if cfg!(windows) { // TODO(zanieb): Install GUI launchers as well - let launcher = windows_python_launcher(&executable, false)?; + let launcher = windows_python_launcher(executable, false)?; // OK to use `std::fs` here, `fs_err` does not support `File::create_new` and we attach // error context anyway @@ -878,7 +878,7 @@ pub fn create_link_to_executable(link: &Path, executable: PathBuf) -> Result<(), std::fs::File::create_new(link) .and_then(|mut file| file.write_all(launcher.as_ref())) .map_err(|err| Error::LinkExecutable { - from: executable, + from: executable.to_path_buf(), to: link.to_path_buf(), err, }) diff --git a/crates/uv-virtualenv/src/virtualenv.rs b/crates/uv-virtualenv/src/virtualenv.rs index 822e8d7bb..5d3ab4a88 100644 --- a/crates/uv-virtualenv/src/virtualenv.rs +++ b/crates/uv-virtualenv/src/virtualenv.rs @@ -262,10 +262,10 @@ pub(crate) fn create( if cfg!(windows) { if using_minor_version_link { let target = scripts.join(WindowsExecutable::Python.exe(interpreter)); - create_link_to_executable(target.as_path(), executable_target.clone()) + create_link_to_executable(target.as_path(), &executable_target) .map_err(Error::Python)?; let targetw = scripts.join(WindowsExecutable::Pythonw.exe(interpreter)); - create_link_to_executable(targetw.as_path(), executable_target) + create_link_to_executable(targetw.as_path(), &executable_target) .map_err(Error::Python)?; } else { // Always copy `python.exe`. diff --git a/crates/uv/src/commands/python/install.rs b/crates/uv/src/commands/python/install.rs index 37d6a6777..e54c44424 100644 --- a/crates/uv/src/commands/python/install.rs +++ b/crates/uv/src/commands/python/install.rs @@ -768,7 +768,7 @@ fn create_bin_links( installation.executable(false) }; - match create_link_to_executable(&target, executable.clone()) { + match create_link_to_executable(&target, &executable) { Ok(()) => { debug!( "Installed executable at `{}` for {}", @@ -925,7 +925,7 @@ fn create_bin_links( .remove(&target); } - if let Err(err) = create_link_to_executable(&target, executable) { + if let Err(err) = create_link_to_executable(&target, &executable) { errors.push(( InstallErrorKind::Bin, installation.key().clone(), @@ -953,7 +953,7 @@ fn create_bin_links( errors.push(( InstallErrorKind::Bin, installation.key().clone(), - anyhow::Error::new(err), + Error::new(err), )); } } From f3dc457d2a96fb27b88c7cfafe97b7f04ffe78aa Mon Sep 17 00:00:00 2001 From: konsti Date: Mon, 21 Jul 2025 18:21:46 +0200 Subject: [PATCH 123/130] Introduce a generic type for list operations (#14792) We currently have two marker keys that a list, `extras` and `dependency_groups`, both from PEP 751. With the variants PEP, we will add three more. This change is broken out of the wheel variants PR to introduce generic marker list support, plus a change to use `ContainerOperator` in more places. --- crates/uv-pep508/src/lib.rs | 4 +- crates/uv-pep508/src/marker/algebra.rs | 83 ++--- crates/uv-pep508/src/marker/lowering.rs | 41 +-- crates/uv-pep508/src/marker/mod.rs | 4 +- crates/uv-pep508/src/marker/parse.rs | 155 ++++---- crates/uv-pep508/src/marker/simplify.rs | 77 ++-- crates/uv-pep508/src/marker/tree.rs | 375 +++++++------------- crates/uv-pep508/src/verbatim_url.rs | 1 + crates/uv-resolver/src/marker.rs | 7 +- crates/uv-resolver/src/resolution/output.rs | 7 +- 10 files changed, 276 insertions(+), 478 deletions(-) diff --git a/crates/uv-pep508/src/lib.rs b/crates/uv-pep508/src/lib.rs index f63d46206..10e4142e7 100644 --- a/crates/uv-pep508/src/lib.rs +++ b/crates/uv-pep508/src/lib.rs @@ -32,8 +32,8 @@ pub use marker::{ CanonicalMarkerValueExtra, CanonicalMarkerValueString, CanonicalMarkerValueVersion, ContainsMarkerTree, ExtraMarkerTree, ExtraOperator, InMarkerTree, MarkerEnvironment, MarkerEnvironmentBuilder, MarkerExpression, MarkerOperator, MarkerTree, MarkerTreeContents, - MarkerTreeKind, MarkerValue, MarkerValueExtra, MarkerValueString, MarkerValueVersion, - MarkerWarningKind, StringMarkerTree, StringVersion, VersionMarkerTree, + MarkerTreeKind, MarkerValue, MarkerValueExtra, MarkerValueList, MarkerValueString, + MarkerValueVersion, MarkerWarningKind, StringMarkerTree, StringVersion, VersionMarkerTree, }; pub use origin::RequirementOrigin; #[cfg(feature = "non-pep508-extensions")] diff --git a/crates/uv-pep508/src/marker/algebra.rs b/crates/uv-pep508/src/marker/algebra.rs index d1a369491..6b166dbc6 100644 --- a/crates/uv-pep508/src/marker/algebra.rs +++ b/crates/uv-pep508/src/marker/algebra.rs @@ -59,10 +59,10 @@ use uv_pep440::{Operator, Version, VersionSpecifier, release_specifier_to_range} use crate::marker::MarkerValueExtra; use crate::marker::lowering::{ - CanonicalMarkerValueDependencyGroup, CanonicalMarkerValueExtra, CanonicalMarkerValueString, + CanonicalMarkerListPair, CanonicalMarkerValueExtra, CanonicalMarkerValueString, CanonicalMarkerValueVersion, }; -use crate::marker::tree::{ContainerOperator, MarkerValueDependencyGroup}; +use crate::marker::tree::ContainerOperator; use crate::{ ExtraOperator, MarkerExpression, MarkerOperator, MarkerValueString, MarkerValueVersion, }; @@ -188,19 +188,19 @@ impl InternerGuard<'_> { MarkerExpression::VersionIn { key, versions, - negated, + operator, } => match key { MarkerValueVersion::ImplementationVersion => ( Variable::Version(CanonicalMarkerValueVersion::ImplementationVersion), - Edges::from_versions(&versions, negated), + Edges::from_versions(&versions, operator), ), MarkerValueVersion::PythonFullVersion => ( Variable::Version(CanonicalMarkerValueVersion::PythonFullVersion), - Edges::from_versions(&versions, negated), + Edges::from_versions(&versions, operator), ), // Normalize `python_version` markers to `python_full_version` nodes. MarkerValueVersion::PythonVersion => { - match Edges::from_python_versions(versions, negated) { + match Edges::from_python_versions(versions, operator) { Ok(edges) => ( Variable::Version(CanonicalMarkerValueVersion::PythonFullVersion), edges, @@ -315,6 +315,10 @@ impl InternerGuard<'_> { }; (Variable::String(key), Edges::from_string(operator, value)) } + MarkerExpression::List { pair, operator } => ( + Variable::List(pair), + Edges::from_bool(operator == ContainerOperator::In), + ), // A variable representing the existence or absence of a particular extra. MarkerExpression::Extra { name: MarkerValueExtra::Extra(extra), @@ -335,48 +339,6 @@ impl InternerGuard<'_> { name: MarkerValueExtra::Arbitrary(_), .. } => return NodeId::FALSE, - // A variable representing the existence or absence of a particular extra, in the - // context of a PEP 751 lockfile. - MarkerExpression::Extras { - name: MarkerValueExtra::Extra(extra), - operator: ContainerOperator::In, - } => ( - Variable::Extras(CanonicalMarkerValueExtra::Extra(extra)), - Edges::from_bool(true), - ), - MarkerExpression::Extras { - name: MarkerValueExtra::Extra(extra), - operator: ContainerOperator::NotIn, - } => ( - Variable::Extras(CanonicalMarkerValueExtra::Extra(extra)), - Edges::from_bool(false), - ), - // Invalid `extras` names are always `false`. - MarkerExpression::Extras { - name: MarkerValueExtra::Arbitrary(_), - .. - } => return NodeId::FALSE, - // A variable representing the existence or absence of a particular extra, in the - // context of a PEP 751 lockfile. - MarkerExpression::DependencyGroups { - name: MarkerValueDependencyGroup::Group(group), - operator: ContainerOperator::In, - } => ( - Variable::DependencyGroups(CanonicalMarkerValueDependencyGroup::Group(group)), - Edges::from_bool(true), - ), - MarkerExpression::DependencyGroups { - name: MarkerValueDependencyGroup::Group(group), - operator: ContainerOperator::NotIn, - } => ( - Variable::DependencyGroups(CanonicalMarkerValueDependencyGroup::Group(group)), - Edges::from_bool(false), - ), - // Invalid `dependency_group` names are always `false`. - MarkerExpression::DependencyGroups { - name: MarkerValueDependencyGroup::Arbitrary(_), - .. - } => return NodeId::FALSE, }; self.create_node(var, children) @@ -1090,18 +1052,12 @@ pub(crate) enum Variable { /// We keep extras at the leaves of the tree, so when simplifying extras we can /// trivially remove the leaves without having to reconstruct the entire tree. Extra(CanonicalMarkerValueExtra), - /// A variable representing the existence or absence of a given extra, in the context of a - /// PEP 751 lockfile marker. + /// A variable representing whether a ` in ` or ` not in ` + /// expression, where the key is a list. /// - /// We keep extras at the leaves of the tree, so when simplifying extras we can + /// We keep extras and groups at the leaves of the tree, so when simplifying extras we can /// trivially remove the leaves without having to reconstruct the entire tree. - Extras(CanonicalMarkerValueExtra), - /// A variable representing the existence or absence of a given dependency group, in the context of a - /// PEP 751 lockfile marker. - /// - /// We keep groups at the leaves of the tree, so when simplifying groups we can - /// trivially remove the leaves without having to reconstruct the entire tree. - DependencyGroups(CanonicalMarkerValueDependencyGroup), + List(CanonicalMarkerListPair), } impl Variable { @@ -1279,7 +1235,10 @@ impl Edges { /// Returns an [`Edges`] where values in the given range are `true`. /// /// Only for use when the `key` is a `PythonVersion`. Normalizes to `PythonFullVersion`. - fn from_python_versions(versions: Vec, negated: bool) -> Result { + fn from_python_versions( + versions: Vec, + operator: ContainerOperator, + ) -> Result { let mut range: Ranges = versions .into_iter() .map(|version| { @@ -1290,7 +1249,7 @@ impl Edges { .flatten_ok() .collect::, NodeId>>()?; - if negated { + if operator == ContainerOperator::NotIn { range = range.complement(); } @@ -1300,7 +1259,7 @@ impl Edges { } /// Returns an [`Edges`] where values in the given range are `true`. - fn from_versions(versions: &[Version], negated: bool) -> Edges { + fn from_versions(versions: &[Version], operator: ContainerOperator) -> Edges { let mut range: Ranges = versions .iter() .map(|version| { @@ -1311,7 +1270,7 @@ impl Edges { }) .collect(); - if negated { + if operator == ContainerOperator::NotIn { range = range.complement(); } diff --git a/crates/uv-pep508/src/marker/lowering.rs b/crates/uv-pep508/src/marker/lowering.rs index dadfeac53..e52669840 100644 --- a/crates/uv-pep508/src/marker/lowering.rs +++ b/crates/uv-pep508/src/marker/lowering.rs @@ -2,7 +2,7 @@ use std::fmt::{Display, Formatter}; use uv_normalize::{ExtraName, GroupName}; -use crate::marker::tree::MarkerValueDependencyGroup; +use crate::marker::tree::MarkerValueList; use crate::{MarkerValueExtra, MarkerValueString, MarkerValueVersion}; /// Those environment markers with a PEP 440 version as value such as `python_version` @@ -161,34 +161,35 @@ impl Display for CanonicalMarkerValueExtra { } } -/// The [`GroupName`] value used in `dependency_group` markers. +/// A key-value pair for ` in ` or ` not in `, where the key is a list. +/// +/// Used for PEP 751 markers. #[derive(Clone, Debug, Eq, Hash, PartialEq, PartialOrd, Ord)] -pub enum CanonicalMarkerValueDependencyGroup { +pub enum CanonicalMarkerListPair { + /// A valid [`ExtraName`]. + Extras(ExtraName), /// A valid [`GroupName`]. - Group(GroupName), + DependencyGroup(GroupName), + /// For leniency, preserve invalid values. + Arbitrary { key: MarkerValueList, value: String }, } -impl CanonicalMarkerValueDependencyGroup { - /// Returns the [`GroupName`] value. - pub fn group(&self) -> &GroupName { +impl CanonicalMarkerListPair { + /// The key (RHS) of the marker expression. + pub(crate) fn key(&self) -> MarkerValueList { match self { - Self::Group(group) => group, + Self::Extras(_) => MarkerValueList::Extras, + Self::DependencyGroup(_) => MarkerValueList::DependencyGroups, + Self::Arbitrary { key, .. } => *key, } } -} -impl From for MarkerValueDependencyGroup { - fn from(value: CanonicalMarkerValueDependencyGroup) -> Self { - match value { - CanonicalMarkerValueDependencyGroup::Group(group) => Self::Group(group), - } - } -} - -impl Display for CanonicalMarkerValueDependencyGroup { - fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + /// The value (LHS) of the marker expression. + pub(crate) fn value(&self) -> String { match self { - Self::Group(group) => group.fmt(f), + Self::Extras(extra) => extra.to_string(), + Self::DependencyGroup(group) => group.to_string(), + Self::Arbitrary { value, .. } => value.clone(), } } } diff --git a/crates/uv-pep508/src/marker/mod.rs b/crates/uv-pep508/src/marker/mod.rs index f5ac7f1da..55d21c69f 100644 --- a/crates/uv-pep508/src/marker/mod.rs +++ b/crates/uv-pep508/src/marker/mod.rs @@ -23,8 +23,8 @@ pub use lowering::{ pub use tree::{ ContainsMarkerTree, ExtraMarkerTree, ExtraOperator, InMarkerTree, MarkerExpression, MarkerOperator, MarkerTree, MarkerTreeContents, MarkerTreeDebugGraph, MarkerTreeKind, - MarkerValue, MarkerValueExtra, MarkerValueString, MarkerValueVersion, MarkerWarningKind, - StringMarkerTree, StringVersion, VersionMarkerTree, + MarkerValue, MarkerValueExtra, MarkerValueList, MarkerValueString, MarkerValueVersion, + MarkerWarningKind, StringMarkerTree, StringVersion, VersionMarkerTree, }; /// `serde` helpers for [`MarkerTree`]. diff --git a/crates/uv-pep508/src/marker/parse.rs b/crates/uv-pep508/src/marker/parse.rs index de8bfac72..8e4a39078 100644 --- a/crates/uv-pep508/src/marker/parse.rs +++ b/crates/uv-pep508/src/marker/parse.rs @@ -5,7 +5,8 @@ use uv_pep440::{Version, VersionPattern, VersionSpecifier}; use crate::cursor::Cursor; use crate::marker::MarkerValueExtra; -use crate::marker::tree::{ContainerOperator, MarkerValueContains, MarkerValueDependencyGroup}; +use crate::marker::lowering::CanonicalMarkerListPair; +use crate::marker::tree::{ContainerOperator, MarkerValueList}; use crate::{ ExtraOperator, MarkerExpression, MarkerOperator, MarkerTree, MarkerValue, MarkerValueString, MarkerValueVersion, MarkerWarningKind, Pep508Error, Pep508ErrorSource, Pep508Url, Reporter, @@ -169,6 +170,7 @@ pub(crate) fn parse_marker_key_op_value( reporter: &mut impl Reporter, ) -> Result, Pep508Error> { cursor.eat_whitespace(); + let start = cursor.pos(); let l_value = parse_marker_value(cursor, reporter)?; cursor.eat_whitespace(); // "not in" and "in" must be preceded by whitespace. We must already have matched a whitespace @@ -177,6 +179,7 @@ pub(crate) fn parse_marker_key_op_value( let operator = parse_marker_operator(cursor)?; cursor.eat_whitespace(); let r_value = parse_marker_value(cursor, reporter)?; + let len = cursor.pos() - start; // Convert a ` ` expression into its // typed equivalent. @@ -211,7 +214,7 @@ pub(crate) fn parse_marker_key_op_value( MarkerValue::Extra | MarkerValue::MarkerEnvVersion(_) | MarkerValue::MarkerEnvString(_) - | MarkerValue::MarkerEnvContains(_) => { + | MarkerValue::MarkerEnvList(_) => { reporter.report( MarkerWarningKind::MarkerMarkerComparison, "Comparing two markers with each other doesn't make any sense, @@ -239,12 +242,23 @@ pub(crate) fn parse_marker_key_op_value( value, }) } + // `extras in "test"` or `dependency_groups not in "dev"` are invalid. + MarkerValue::MarkerEnvList(key) => { + return Err(Pep508Error { + message: Pep508ErrorSource::String(format!( + "The marker {key} must be on the right hand side of the expression" + )), + start, + len, + input: cursor.to_string(), + }); + } // `extra == '...'` MarkerValue::Extra => { let value = match r_value { MarkerValue::MarkerEnvVersion(_) | MarkerValue::MarkerEnvString(_) - | MarkerValue::MarkerEnvContains(_) + | MarkerValue::MarkerEnvList(_) | MarkerValue::Extra => { reporter.report( MarkerWarningKind::ExtraInvalidComparison, @@ -274,16 +288,56 @@ pub(crate) fn parse_marker_key_op_value( operator: operator.invert(), value: l_string, }), + // `"test" in extras` or `"dev" in dependency_groups` + MarkerValue::MarkerEnvList(key) => { + let operator = + ContainerOperator::from_marker_operator(operator).ok_or_else(|| { + Pep508Error { + message: Pep508ErrorSource::String(format!( + "The operator {operator} is not supported with the marker {key}, only the `in` and `not in` operators are supported" + )), + start, + len, + input: cursor.to_string(), + } + })?; + let pair = match key { + // `'...' in extras` + MarkerValueList::Extras => match ExtraName::from_str(&l_string) { + Ok(name) => CanonicalMarkerListPair::Extras(name), + Err(err) => { + reporter.report( + MarkerWarningKind::ExtrasInvalidComparison, + format!("Expected extra name (found `{l_string}`): {err}"), + ); + CanonicalMarkerListPair::Arbitrary { + key, + value: l_string.to_string(), + } + } + }, + // `'...' in dependency_groups` + MarkerValueList::DependencyGroups => { + match GroupName::from_str(&l_string) { + Ok(name) => CanonicalMarkerListPair::DependencyGroup(name), + Err(err) => { + reporter.report( + MarkerWarningKind::ExtrasInvalidComparison, + format!("Expected dependency group name (found `{l_string}`): {err}"), + ); + CanonicalMarkerListPair::Arbitrary { + key, + value: l_string.to_string(), + } + } + } + } + }; + + Some(MarkerExpression::List { pair, operator }) + } // `'...' == extra` MarkerValue::Extra => parse_extra_expr(operator, &l_string, reporter), - // `'...' in extras` - MarkerValue::MarkerEnvContains(MarkerValueContains::Extras) => { - parse_extras_expr(operator, &l_string, reporter) - } - // `'...' in dependency_groups` - MarkerValue::MarkerEnvContains(MarkerValueContains::DependencyGroups) => { - parse_dependency_groups_expr(operator, &l_string, reporter) - } // `'...' == '...'`, doesn't make much sense MarkerValue::QuotedString(_) => { // Not even pypa/packaging 22.0 supports this @@ -300,16 +354,6 @@ pub(crate) fn parse_marker_key_op_value( } } } - MarkerValue::MarkerEnvContains(key) => { - reporter.report( - MarkerWarningKind::Pep440Error, - format!( - "The `{key}` marker must be used as '...' in {key}' or '... not in {key}', - found `{key} {operator} {r_value}`, will be ignored" - ), - ); - return Ok(None); - } }; Ok(expr) @@ -340,10 +384,7 @@ fn parse_version_in_expr( value: &str, reporter: &mut impl Reporter, ) -> Option { - if !matches!(operator, MarkerOperator::In | MarkerOperator::NotIn) { - return None; - } - let negated = matches!(operator, MarkerOperator::NotIn); + let operator = ContainerOperator::from_marker_operator(operator)?; let mut cursor = Cursor::new(value); let mut versions = Vec::new(); @@ -379,7 +420,7 @@ fn parse_version_in_expr( Some(MarkerExpression::VersionIn { key, versions, - negated, + operator, }) } @@ -519,68 +560,6 @@ fn parse_extra_expr( None } -/// Creates an instance of [`MarkerExpression::Extras`] with the given values, falling back to -/// [`MarkerExpression::Arbitrary`] on failure. -fn parse_extras_expr( - operator: MarkerOperator, - value: &str, - reporter: &mut impl Reporter, -) -> Option { - let name = match ExtraName::from_str(value) { - Ok(name) => MarkerValueExtra::Extra(name), - Err(err) => { - reporter.report( - MarkerWarningKind::ExtrasInvalidComparison, - format!("Expected extra name (found `{value}`): {err}"), - ); - MarkerValueExtra::Arbitrary(value.to_string()) - } - }; - - if let Some(operator) = ContainerOperator::from_marker_operator(operator) { - return Some(MarkerExpression::Extras { operator, name }); - } - - reporter.report( - MarkerWarningKind::ExtrasInvalidComparison, - "Comparing `extras` with any operator other than `in` or `not in` is wrong and will be ignored" - .to_string(), - ); - - None -} - -/// Creates an instance of [`MarkerExpression::DependencyGroups`] with the given values, falling -/// back to [`MarkerExpression::Arbitrary`] on failure. -fn parse_dependency_groups_expr( - operator: MarkerOperator, - value: &str, - reporter: &mut impl Reporter, -) -> Option { - let name = match GroupName::from_str(value) { - Ok(name) => MarkerValueDependencyGroup::Group(name), - Err(err) => { - reporter.report( - MarkerWarningKind::ExtrasInvalidComparison, - format!("Expected extra name (found `{value}`): {err}"), - ); - MarkerValueDependencyGroup::Arbitrary(value.to_string()) - } - }; - - if let Some(operator) = ContainerOperator::from_marker_operator(operator) { - return Some(MarkerExpression::DependencyGroups { operator, name }); - } - - reporter.report( - MarkerWarningKind::ExtrasInvalidComparison, - "Comparing `extras` with any operator other than `in` or `not in` is wrong and will be ignored" - .to_string(), - ); - - None -} - /// ```text /// marker_expr = marker_var:l marker_op:o marker_var:r -> (o, l, r) /// | wsp* '(' marker:m wsp* ')' -> m diff --git a/crates/uv-pep508/src/marker/simplify.rs b/crates/uv-pep508/src/marker/simplify.rs index 6897615c4..b1565835b 100644 --- a/crates/uv-pep508/src/marker/simplify.rs +++ b/crates/uv-pep508/src/marker/simplify.rs @@ -162,6 +162,22 @@ fn collect_dnf( path.pop(); } } + MarkerTreeKind::List(marker) => { + for (is_high, tree) in marker.children() { + let expr = MarkerExpression::List { + pair: marker.pair().clone(), + operator: if is_high { + ContainerOperator::In + } else { + ContainerOperator::NotIn + }, + }; + + path.push(expr); + collect_dnf(tree, dnf, path); + path.pop(); + } + } MarkerTreeKind::Extra(marker) => { for (value, tree) in marker.children() { let operator = if value { @@ -175,42 +191,6 @@ fn collect_dnf( operator, }; - path.push(expr); - collect_dnf(tree, dnf, path); - path.pop(); - } - } - MarkerTreeKind::Extras(marker) => { - for (value, tree) in marker.children() { - let operator = if value { - ContainerOperator::In - } else { - ContainerOperator::NotIn - }; - - let expr = MarkerExpression::Extras { - name: marker.name().clone().into(), - operator, - }; - - path.push(expr); - collect_dnf(tree, dnf, path); - path.pop(); - } - } - MarkerTreeKind::DependencyGroups(marker) => { - for (value, tree) in marker.children() { - let operator = if value { - ContainerOperator::In - } else { - ContainerOperator::NotIn - }; - - let expr = MarkerExpression::DependencyGroups { - name: marker.name().clone().into(), - operator, - }; - path.push(expr); collect_dnf(tree, dnf, path); path.pop(); @@ -433,18 +413,18 @@ fn is_negation(left: &MarkerExpression, right: &MarkerExpression) -> bool { MarkerExpression::VersionIn { key, versions, - negated, + operator, } => { let MarkerExpression::VersionIn { key: key2, versions: versions2, - negated: negated2, + operator: operator2, } = right else { return false; }; - key == key2 && versions == versions2 && negated != negated2 + key == key2 && versions == versions2 && operator != operator2 } MarkerExpression::String { key, @@ -477,27 +457,16 @@ fn is_negation(left: &MarkerExpression, right: &MarkerExpression) -> bool { name == name2 && operator.negate() == *operator2 } - MarkerExpression::Extras { name, operator } => { - let MarkerExpression::Extras { - name: name2, + MarkerExpression::List { pair, operator } => { + let MarkerExpression::List { + pair: pair2, operator: operator2, } = right else { return false; }; - name == name2 && *operator == operator2.negate() - } - MarkerExpression::DependencyGroups { name, operator } => { - let MarkerExpression::DependencyGroups { - name: name2, - operator: operator2, - } = right - else { - return false; - }; - - name == name2 && *operator == operator2.negate() + pair == pair2 && operator != operator2 } } } diff --git a/crates/uv-pep508/src/marker/tree.rs b/crates/uv-pep508/src/marker/tree.rs index 756c90ade..f874fd447 100644 --- a/crates/uv-pep508/src/marker/tree.rs +++ b/crates/uv-pep508/src/marker/tree.rs @@ -16,12 +16,12 @@ use super::algebra::{Edges, INTERNER, NodeId, Variable}; use super::simplify; use crate::cursor::Cursor; use crate::marker::lowering::{ - CanonicalMarkerValueDependencyGroup, CanonicalMarkerValueExtra, CanonicalMarkerValueString, - CanonicalMarkerValueVersion, + CanonicalMarkerListPair, CanonicalMarkerValueString, CanonicalMarkerValueVersion, }; use crate::marker::parse; use crate::{ - MarkerEnvironment, Pep508Error, Pep508ErrorSource, Pep508Url, Reporter, TracingReporter, + CanonicalMarkerValueExtra, MarkerEnvironment, Pep508Error, Pep508ErrorSource, Pep508Url, + Reporter, TracingReporter, }; /// Ways in which marker evaluation can fail @@ -126,16 +126,18 @@ impl Display for MarkerValueString { } } -/// Those markers with exclusively `in` and `not in` operators (PEP 751) -#[derive(Clone, Debug, Eq, Hash, PartialEq, PartialOrd, Ord)] -pub enum MarkerValueContains { +/// Those markers with exclusively `in` and `not in` operators. +/// +/// Contains PEP 751 lockfile markers. +#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq, PartialOrd, Ord)] +pub enum MarkerValueList { /// `extras`. This one is special because it's a list, and user-provided Extras, /// `dependency_groups`. This one is special because it's a list, and user-provided DependencyGroups, } -impl Display for MarkerValueContains { +impl Display for MarkerValueList { fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { match self { Self::Extras => f.write_str("extras"), @@ -153,10 +155,10 @@ pub enum MarkerValue { MarkerEnvVersion(MarkerValueVersion), /// Those environment markers with an arbitrary string as value such as `sys_platform` MarkerEnvString(MarkerValueString), + /// Those markers with exclusively `in` and `not in` operators + MarkerEnvList(MarkerValueList), /// `extra`. This one is special because it's a list, and user-provided Extra, - /// Those markers with exclusively `in` and `not in` operators (PEP 751) - MarkerEnvContains(MarkerValueContains), /// Not a constant, but a user given quoted string with a value inside such as '3.8' or "windows" QuotedString(ArcStr), } @@ -196,9 +198,9 @@ impl FromStr for MarkerValue { "python_version" => Self::MarkerEnvVersion(MarkerValueVersion::PythonVersion), "sys_platform" => Self::MarkerEnvString(MarkerValueString::SysPlatform), "sys.platform" => Self::MarkerEnvString(MarkerValueString::SysPlatformDeprecated), + "extras" => Self::MarkerEnvList(MarkerValueList::Extras), + "dependency_groups" => Self::MarkerEnvList(MarkerValueList::DependencyGroups), "extra" => Self::Extra, - "extras" => Self::MarkerEnvContains(MarkerValueContains::Extras), - "dependency_groups" => Self::MarkerEnvContains(MarkerValueContains::DependencyGroups), _ => return Err(format!("Invalid key: {s}")), }; Ok(value) @@ -210,8 +212,8 @@ impl Display for MarkerValue { match self { Self::MarkerEnvVersion(marker_value_version) => marker_value_version.fmt(f), Self::MarkerEnvString(marker_value_string) => marker_value_string.fmt(f), + Self::MarkerEnvList(marker_value_contains) => marker_value_contains.fmt(f), Self::Extra => f.write_str("extra"), - Self::MarkerEnvContains(marker_value_contains) => marker_value_contains.fmt(f), Self::QuotedString(value) => write!(f, "'{value}'"), } } @@ -499,24 +501,6 @@ impl Display for MarkerValueExtra { } } -/// The [`GroupName`] value used in `dependency_group` markers. -#[derive(Clone, Debug, Eq, Hash, PartialEq, PartialOrd, Ord)] -pub enum MarkerValueDependencyGroup { - /// A valid [`GroupName`]. - Group(GroupName), - /// An invalid name, preserved as an arbitrary string. - Arbitrary(String), -} - -impl Display for MarkerValueDependencyGroup { - fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { - match self { - Self::Group(group) => group.fmt(f), - Self::Arbitrary(string) => string.fmt(f), - } - } -} - /// Represents one clause such as `python_version > "3.8"`. #[derive(Clone, Debug, Eq, Hash, PartialEq, PartialOrd, Ord)] #[allow(missing_docs)] @@ -540,7 +524,7 @@ pub enum MarkerExpression { VersionIn { key: MarkerValueVersion, versions: Vec, - negated: bool, + operator: ContainerOperator, }, /// An string marker comparison, e.g. `sys_platform == '...'`. /// @@ -550,21 +534,16 @@ pub enum MarkerExpression { operator: MarkerOperator, value: ArcStr, }, + /// `'...' in `, a PEP 751 expression. + List { + pair: CanonicalMarkerListPair, + operator: ContainerOperator, + }, /// `extra '...'` or `'...' extra`. Extra { name: MarkerValueExtra, operator: ExtraOperator, }, - /// `'...' in extras` - Extras { - name: MarkerValueExtra, - operator: ContainerOperator, - }, - /// `'...' in dependency_groups` - DependencyGroups { - name: MarkerValueDependencyGroup, - operator: ContainerOperator, - }, } /// The kind of a [`MarkerExpression`]. @@ -572,16 +551,14 @@ pub enum MarkerExpression { pub(crate) enum MarkerExpressionKind { /// A version expression, e.g. ` `. Version(MarkerValueVersion), - /// A version "in" expression, e.g. ` in `. + /// A version `in` expression, e.g. ` in `. VersionIn(MarkerValueVersion), /// A string marker comparison, e.g. `sys_platform == '...'`. String(MarkerValueString), + /// A list `in` or `not in` expression, e.g. `'...' in dependency_groups`. + List(MarkerValueList), /// An extra expression, e.g. `extra == '...'`. Extra, - /// An extras expression, e.g. `'...' in extras`. - Extras, - /// A dependency groups expression, e.g. `'...' in dependency_groups`. - DependencyGroups, } /// The operator for an extra expression, either '==' or '!='. @@ -624,7 +601,7 @@ impl Display for ExtraOperator { } /// The operator for a container expression, either 'in' or 'not in'. -#[derive(Clone, Debug, Eq, Hash, PartialEq, PartialOrd, Ord)] +#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq, PartialOrd, Ord)] pub enum ContainerOperator { /// `in` In, @@ -643,14 +620,6 @@ impl ContainerOperator { _ => None, } } - - /// Negates this operator. - pub(crate) fn negate(&self) -> ContainerOperator { - match *self { - ContainerOperator::In => ContainerOperator::NotIn, - ContainerOperator::NotIn => ContainerOperator::In, - } - } } impl Display for ContainerOperator { @@ -700,9 +669,8 @@ impl MarkerExpression { MarkerExpression::Version { key, .. } => MarkerExpressionKind::Version(*key), MarkerExpression::VersionIn { key, .. } => MarkerExpressionKind::VersionIn(*key), MarkerExpression::String { key, .. } => MarkerExpressionKind::String(*key), + MarkerExpression::List { pair, .. } => MarkerExpressionKind::List(pair.key()), MarkerExpression::Extra { .. } => MarkerExpressionKind::Extra, - MarkerExpression::Extras { .. } => MarkerExpressionKind::Extras, - MarkerExpression::DependencyGroups { .. } => MarkerExpressionKind::DependencyGroups, } } } @@ -721,11 +689,10 @@ impl Display for MarkerExpression { MarkerExpression::VersionIn { key, versions, - negated, + operator, } => { - let op = if *negated { "not in" } else { "in" }; let versions = versions.iter().map(ToString::to_string).join(" "); - write!(f, "{key} {op} '{versions}'") + write!(f, "{key} {operator} '{versions}'") } MarkerExpression::String { key, @@ -741,15 +708,12 @@ impl Display for MarkerExpression { write!(f, "{key} {operator} '{value}'") } + MarkerExpression::List { pair, operator } => { + write!(f, "'{}' {} {}", pair.value(), operator, pair.key()) + } MarkerExpression::Extra { operator, name } => { write!(f, "extra {operator} '{name}'") } - MarkerExpression::Extras { operator, name } => { - write!(f, "'{name}' {operator} extras") - } - MarkerExpression::DependencyGroups { operator, name } => { - write!(f, "'{name}' {operator} dependency_groups") - } } } } @@ -777,24 +741,24 @@ impl<'a> ExtrasEnvironment<'a> { /// Returns the `extra` names in this environment. fn extra(&self) -> &[ExtraName] { match self { - ExtrasEnvironment::Extras(extra) => extra, - ExtrasEnvironment::Pep751(..) => &[], + Self::Extras(extra) => extra, + Self::Pep751(..) => &[], } } /// Returns the `extras` names in this environment, as in a PEP 751 lockfile. fn extras(&self) -> &[ExtraName] { match self { - ExtrasEnvironment::Extras(..) => &[], - ExtrasEnvironment::Pep751(extras, ..) => extras, + Self::Extras(..) => &[], + Self::Pep751(extras, ..) => extras, } } /// Returns the `dependency_group` group names in this environment, as in a PEP 751 lockfile. fn dependency_groups(&self) -> &[GroupName] { match self { - ExtrasEnvironment::Extras(..) => &[], - ExtrasEnvironment::Pep751(.., groups) => groups, + Self::Extras(..) => &[], + Self::Pep751(.., groups) => groups, } } } @@ -1006,6 +970,16 @@ impl MarkerTree { low: low.negate(self.0), }) } + Variable::List(key) => { + let Edges::Boolean { low, high } = node.children else { + unreachable!() + }; + MarkerTreeKind::List(ListMarkerTree { + pair: key, + high: high.negate(self.0), + low: low.negate(self.0), + }) + } Variable::Extra(name) => { let Edges::Boolean { low, high } = node.children else { unreachable!() @@ -1016,26 +990,6 @@ impl MarkerTree { low: low.negate(self.0), }) } - Variable::Extras(name) => { - let Edges::Boolean { low, high } = node.children else { - unreachable!() - }; - MarkerTreeKind::Extras(ExtrasMarkerTree { - name, - high: high.negate(self.0), - low: low.negate(self.0), - }) - } - Variable::DependencyGroups(name) => { - let Edges::Boolean { low, high } = node.children else { - unreachable!() - }; - MarkerTreeKind::DependencyGroups(DependencyGroupsMarkerTree { - name, - high: high.negate(self.0), - low: low.negate(self.0), - }) - } } } @@ -1160,14 +1114,18 @@ impl MarkerTree { .edge(extras.extra().contains(marker.name().extra())) .evaluate_reporter_impl(env, extras, reporter); } - MarkerTreeKind::Extras(marker) => { + MarkerTreeKind::List(marker) => { + let edge = match marker.pair() { + CanonicalMarkerListPair::Extras(extra) => extras.extras().contains(extra), + CanonicalMarkerListPair::DependencyGroup(dependency_group) => { + extras.dependency_groups().contains(dependency_group) + } + // Invalid marker expression + CanonicalMarkerListPair::Arbitrary { .. } => return false, + }; + return marker - .edge(extras.extras().contains(marker.name().extra())) - .evaluate_reporter_impl(env, extras, reporter); - } - MarkerTreeKind::DependencyGroups(marker) => { - return marker - .edge(extras.dependency_groups().contains(marker.name().group())) + .edge(edge) .evaluate_reporter_impl(env, extras, reporter); } } @@ -1194,15 +1152,12 @@ impl MarkerTree { MarkerTreeKind::Contains(marker) => marker .children() .any(|(_, tree)| tree.evaluate_extras(extras)), + MarkerTreeKind::List(marker) => marker + .children() + .any(|(_, tree)| tree.evaluate_extras(extras)), MarkerTreeKind::Extra(marker) => marker .edge(extras.contains(marker.name().extra())) .evaluate_extras(extras), - MarkerTreeKind::Extras(marker) => marker - .children() - .any(|(_, tree)| tree.evaluate_extras(extras)), - MarkerTreeKind::DependencyGroups(marker) => marker - .children() - .any(|(_, tree)| tree.evaluate_extras(extras)), } } @@ -1430,6 +1385,11 @@ impl MarkerTree { imp(tree, f); } } + MarkerTreeKind::List(kind) => { + for (_, tree) in kind.children() { + imp(tree, f); + } + } MarkerTreeKind::Extra(kind) => { if kind.low.is_false() { f(MarkerOperator::Equal, kind.name().extra()); @@ -1440,16 +1400,6 @@ impl MarkerTree { imp(tree, f); } } - MarkerTreeKind::Extras(kind) => { - for (_, tree) in kind.children() { - imp(tree, f); - } - } - MarkerTreeKind::DependencyGroups(kind) => { - for (_, tree) in kind.children() { - imp(tree, f); - } - } } } imp(self, &mut f); @@ -1557,6 +1507,21 @@ impl MarkerTree { write!(f, "{} not in {} -> ", kind.value(), kind.key())?; kind.edge(false).fmt_graph(f, level + 1)?; } + MarkerTreeKind::List(kind) => { + writeln!(f)?; + for _ in 0..level { + write!(f, " ")?; + } + write!(f, "{} in {} -> ", kind.value(), kind.key())?; + kind.edge(true).fmt_graph(f, level + 1)?; + + writeln!(f)?; + for _ in 0..level { + write!(f, " ")?; + } + write!(f, "{} not in {} -> ", kind.value(), kind.key())?; + kind.edge(false).fmt_graph(f, level + 1)?; + } MarkerTreeKind::Extra(kind) => { writeln!(f)?; for _ in 0..level { @@ -1572,36 +1537,6 @@ impl MarkerTree { write!(f, "extra != {} -> ", kind.name())?; kind.edge(false).fmt_graph(f, level + 1)?; } - MarkerTreeKind::Extras(kind) => { - writeln!(f)?; - for _ in 0..level { - write!(f, " ")?; - } - write!(f, "{} in extras -> ", kind.name())?; - kind.edge(true).fmt_graph(f, level + 1)?; - - writeln!(f)?; - for _ in 0..level { - write!(f, " ")?; - } - write!(f, "{} not in extras -> ", kind.name())?; - kind.edge(false).fmt_graph(f, level + 1)?; - } - MarkerTreeKind::DependencyGroups(kind) => { - writeln!(f)?; - for _ in 0..level { - write!(f, " ")?; - } - write!(f, "{} in dependency_groups -> ", kind.name())?; - kind.edge(true).fmt_graph(f, level + 1)?; - - writeln!(f)?; - for _ in 0..level { - write!(f, " ")?; - } - write!(f, "{} not in dependency_groups -> ", kind.name())?; - kind.edge(false).fmt_graph(f, level + 1)?; - } } Ok(()) @@ -1671,12 +1606,10 @@ pub enum MarkerTreeKind<'a> { In(InMarkerTree<'a>), /// A string expression with the `contains` operator. Contains(ContainsMarkerTree<'a>), - /// A string expression (e.g., `extra == 'dev'`). + /// A `in` or `not in` expression. + List(ListMarkerTree<'a>), + /// An extra expression (e.g., `extra == 'dev'`). Extra(ExtraMarkerTree<'a>), - /// A string expression (e.g., `'dev' in extras`). - Extras(ExtrasMarkerTree<'a>), - /// A string expression (e.g., `'dev' in dependency_groups`). - DependencyGroups(DependencyGroupsMarkerTree<'a>), } /// A version marker node, such as `python_version < '3.7'`. @@ -1851,6 +1784,59 @@ impl Ord for ContainsMarkerTree<'_> { } } +#[derive(PartialEq, Eq, Clone, Debug)] +pub struct ListMarkerTree<'a> { + // No separate canonical type, the type is already canonical. + pair: &'a CanonicalMarkerListPair, + high: NodeId, + low: NodeId, +} + +impl ListMarkerTree<'_> { + /// The key-value pair for this expression + pub fn pair(&self) -> &CanonicalMarkerListPair { + self.pair + } + + /// The key (RHS) for this expression. + pub fn key(&self) -> MarkerValueList { + self.pair.key() + } + + /// The value (LHS) for this expression. + pub fn value(&self) -> String { + self.pair.value() + } + + /// The edges of this node, corresponding to the boolean evaluation of the expression. + pub fn children(&self) -> impl Iterator { + [(true, MarkerTree(self.high)), (false, MarkerTree(self.low))].into_iter() + } + + /// Returns the subtree associated with the given edge value. + pub fn edge(&self, value: bool) -> MarkerTree { + if value { + MarkerTree(self.high) + } else { + MarkerTree(self.low) + } + } +} + +impl PartialOrd for ListMarkerTree<'_> { + fn partial_cmp(&self, other: &Self) -> Option { + Some(self.cmp(other)) + } +} + +impl Ord for ListMarkerTree<'_> { + fn cmp(&self, other: &Self) -> Ordering { + self.pair() + .cmp(other.pair()) + .then_with(|| self.children().cmp(other.children())) + } +} + /// A node representing the existence or absence of a given extra, such as `extra == 'bar'`. #[derive(PartialEq, Eq, Clone, Debug)] pub struct ExtraMarkerTree<'a> { @@ -1894,93 +1880,6 @@ impl Ord for ExtraMarkerTree<'_> { } } -/// A node representing the existence or absence of a given extra, such as `'bar' in extras`. -#[derive(PartialEq, Eq, Clone, Debug)] -pub struct ExtrasMarkerTree<'a> { - name: &'a CanonicalMarkerValueExtra, - high: NodeId, - low: NodeId, -} - -impl ExtrasMarkerTree<'_> { - /// Returns the name of the extra in this expression. - pub fn name(&self) -> &CanonicalMarkerValueExtra { - self.name - } - - /// The edges of this node, corresponding to the boolean evaluation of the expression. - pub fn children(&self) -> impl Iterator { - [(true, MarkerTree(self.high)), (false, MarkerTree(self.low))].into_iter() - } - - /// Returns the subtree associated with the given edge value. - pub fn edge(&self, value: bool) -> MarkerTree { - if value { - MarkerTree(self.high) - } else { - MarkerTree(self.low) - } - } -} - -impl PartialOrd for ExtrasMarkerTree<'_> { - fn partial_cmp(&self, other: &Self) -> Option { - Some(self.cmp(other)) - } -} - -impl Ord for ExtrasMarkerTree<'_> { - fn cmp(&self, other: &Self) -> Ordering { - self.name() - .cmp(other.name()) - .then_with(|| self.children().cmp(other.children())) - } -} - -/// A node representing the existence or absence of a given dependency group, such as -/// `'bar' in dependency_groups`. -#[derive(PartialEq, Eq, Clone, Debug)] -pub struct DependencyGroupsMarkerTree<'a> { - name: &'a CanonicalMarkerValueDependencyGroup, - high: NodeId, - low: NodeId, -} - -impl DependencyGroupsMarkerTree<'_> { - /// Returns the name of the group in this expression. - pub fn name(&self) -> &CanonicalMarkerValueDependencyGroup { - self.name - } - - /// The edges of this node, corresponding to the boolean evaluation of the expression. - pub fn children(&self) -> impl Iterator { - [(true, MarkerTree(self.high)), (false, MarkerTree(self.low))].into_iter() - } - - /// Returns the subtree associated with the given edge value. - pub fn edge(&self, value: bool) -> MarkerTree { - if value { - MarkerTree(self.high) - } else { - MarkerTree(self.low) - } - } -} - -impl PartialOrd for DependencyGroupsMarkerTree<'_> { - fn partial_cmp(&self, other: &Self) -> Option { - Some(self.cmp(other)) - } -} - -impl Ord for DependencyGroupsMarkerTree<'_> { - fn cmp(&self, other: &Self) -> Ordering { - self.name() - .cmp(other.name()) - .then_with(|| self.children().cmp(other.children())) - } -} - /// A marker tree that contains at least one expression. /// /// See [`MarkerTree::contents`] for details. @@ -2090,7 +1989,7 @@ mod test { implementation_name: "", implementation_version: "3.7", os_name: "linux", - platform_machine: "", + platform_machine: "x86_64", platform_python_implementation: "", platform_release: "", platform_system: "", diff --git a/crates/uv-pep508/src/verbatim_url.rs b/crates/uv-pep508/src/verbatim_url.rs index 2911de938..9f0e9a5ee 100644 --- a/crates/uv-pep508/src/verbatim_url.rs +++ b/crates/uv-pep508/src/verbatim_url.rs @@ -62,6 +62,7 @@ impl VerbatimUrl { /// /// If no root directory is provided, relative paths are resolved against the current working /// directory. + #[cfg(feature = "non-pep508-extensions")] // PEP 508 arguably only allows absolute file URLs. pub fn from_url_or_path( input: &str, root_dir: Option<&Path>, diff --git a/crates/uv-resolver/src/marker.rs b/crates/uv-resolver/src/marker.rs index 5a2203f9b..02ea1d6df 100644 --- a/crates/uv-resolver/src/marker.rs +++ b/crates/uv-resolver/src/marker.rs @@ -54,12 +54,7 @@ pub(crate) fn requires_python(tree: MarkerTree) -> Option { collect_python_markers(tree, markers, range); } } - MarkerTreeKind::Extras(marker) => { - for (_, tree) in marker.children() { - collect_python_markers(tree, markers, range); - } - } - MarkerTreeKind::DependencyGroups(marker) => { + MarkerTreeKind::List(marker) => { for (_, tree) in marker.children() { collect_python_markers(tree, markers, range); } diff --git a/crates/uv-resolver/src/resolution/output.rs b/crates/uv-resolver/src/resolution/output.rs index 2afbf2c6b..8df52f4f0 100644 --- a/crates/uv-resolver/src/resolution/output.rs +++ b/crates/uv-resolver/src/resolution/output.rs @@ -698,12 +698,7 @@ impl ResolverOutput { add_marker_params_from_tree(tree, set); } } - MarkerTreeKind::Extras(marker) => { - for (_, tree) in marker.children() { - add_marker_params_from_tree(tree, set); - } - } - MarkerTreeKind::DependencyGroups(marker) => { + MarkerTreeKind::List(marker) => { for (_, tree) in marker.children() { add_marker_params_from_tree(tree, set); } From 2c8e394f03030542992d0a2475f2a6a063420488 Mon Sep 17 00:00:00 2001 From: Charlie Marsh Date: Mon, 21 Jul 2025 12:25:50 -0400 Subject: [PATCH 124/130] Create (e.g.) `python3.13t` executables in `uv venv` (#14764) ## Summary CPython's `venv` module creates these, so we should too. On non-Windows, we add `python3.13t`. On Windows, we add `python3.13t.exe` and `pythonw3.13t.exe` (see: https://github.com/python/cpython/blob/65d2c51c10425dcfacc0a13810d58c41240d7ff9/Lib/venv/__init__.py#L362). Closes https://github.com/astral-sh/uv/issues/14760. --- crates/uv-virtualenv/src/virtualenv.rs | 58 +++++++++++++++++++++++- crates/uv/tests/it/python_install.rs | 63 +++++++++++++++++++++++++- 2 files changed, 118 insertions(+), 3 deletions(-) diff --git a/crates/uv-virtualenv/src/virtualenv.rs b/crates/uv-virtualenv/src/virtualenv.rs index 5d3ab4a88..fb22a0724 100644 --- a/crates/uv-virtualenv/src/virtualenv.rs +++ b/crates/uv-virtualenv/src/virtualenv.rs @@ -242,6 +242,16 @@ pub(crate) fn create( interpreter.python_minor(), )), )?; + if interpreter.gil_disabled() { + uv_fs::replace_symlink( + "python", + scripts.join(format!( + "python{}.{}t", + interpreter.python_major(), + interpreter.python_minor(), + )), + )?; + } if interpreter.markers().implementation_name() == "pypy" { uv_fs::replace_symlink( @@ -267,6 +277,14 @@ pub(crate) fn create( let targetw = scripts.join(WindowsExecutable::Pythonw.exe(interpreter)); create_link_to_executable(targetw.as_path(), &executable_target) .map_err(Error::Python)?; + if interpreter.gil_disabled() { + let targett = scripts.join(WindowsExecutable::PythonMajorMinort.exe(interpreter)); + create_link_to_executable(targett.as_path(), &executable_target) + .map_err(Error::Python)?; + let targetwt = scripts.join(WindowsExecutable::PythonwMajorMinort.exe(interpreter)); + create_link_to_executable(targetwt.as_path(), &executable_target) + .map_err(Error::Python)?; + } } else { // Always copy `python.exe`. copy_launcher_windows( @@ -363,6 +381,24 @@ pub(crate) fn create( &scripts, python_home, )?; + + // If the GIL is disabled, copy `venvlaunchert.exe` and `venvwlaunchert.exe`. + if interpreter.gil_disabled() { + copy_launcher_windows( + WindowsExecutable::PythonMajorMinort, + interpreter, + &base_python, + &scripts, + python_home, + )?; + copy_launcher_windows( + WindowsExecutable::PythonwMajorMinort, + interpreter, + &base_python, + &scripts, + python_home, + )?; + } } } } @@ -580,8 +616,12 @@ enum WindowsExecutable { PythonMajor, /// The `python3..exe` executable (or `venvlauncher.exe` launcher shim). PythonMajorMinor, + /// The `python3.t.exe` executable (or `venvlaunchert.exe` launcher shim). + PythonMajorMinort, /// The `pythonw.exe` executable (or `venvwlauncher.exe` launcher shim). Pythonw, + /// The `pythonw3.t.exe` executable (or `venvwlaunchert.exe` launcher shim). + PythonwMajorMinort, /// The `pypy.exe` executable. PyPy, /// The `pypy3.exe` executable. @@ -592,7 +632,7 @@ enum WindowsExecutable { PyPyw, /// The `pypy3.w.exe` executable. PyPyMajorMinorw, - // The `graalpy.exe` executable + /// The `graalpy.exe` executable. GraalPy, } @@ -611,7 +651,21 @@ impl WindowsExecutable { interpreter.python_minor() ) } + WindowsExecutable::PythonMajorMinort => { + format!( + "python{}.{}t.exe", + interpreter.python_major(), + interpreter.python_minor() + ) + } WindowsExecutable::Pythonw => String::from("pythonw.exe"), + WindowsExecutable::PythonwMajorMinort => { + format!( + "pythonw{}.{}t.exe", + interpreter.python_major(), + interpreter.python_minor() + ) + } WindowsExecutable::PyPy => String::from("pypy.exe"), WindowsExecutable::PyPyMajor => { format!("pypy{}.exe", interpreter.python_major()) @@ -646,6 +700,8 @@ impl WindowsExecutable { Self::Python | Self::PythonMajor | Self::PythonMajorMinor => "venvlauncher.exe", Self::Pythonw if interpreter.gil_disabled() => "venvwlaunchert.exe", Self::Pythonw => "venvwlauncher.exe", + Self::PythonMajorMinort => "venvlaunchert.exe", + Self::PythonwMajorMinort => "venvwlaunchert.exe", // From 3.13 on these should replace the `python.exe` and `pythonw.exe` shims. // These are not relevant as of now for PyPy as it doesn't yet support Python 3.13. Self::PyPy | Self::PyPyMajor | Self::PyPyMajorMinor => "venvlauncher.exe", diff --git a/crates/uv/tests/it/python_install.rs b/crates/uv/tests/it/python_install.rs index 51e394aad..19b3a5c7f 100644 --- a/crates/uv/tests/it/python_install.rs +++ b/crates/uv/tests/it/python_install.rs @@ -1087,6 +1087,65 @@ fn python_install_freethreaded() { ----- stderr ----- "###); + // Create a virtual environment with the freethreaded Python + uv_snapshot!(context.filters(), context.venv().arg("--python").arg("3.13t"), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Using CPython 3.13.5 + Creating virtual environment at: .venv + Activate with: source .venv/[BIN]/activate + "); + + // `python`, `python3`, `python3.13`, and `python3.13t` should all be present + let scripts = context + .venv + .join(if cfg!(windows) { "Scripts" } else { "bin" }); + assert!( + scripts + .join(format!("python{}", std::env::consts::EXE_SUFFIX)) + .exists() + ); + + #[cfg(windows)] + assert!( + scripts + .join(format!("pythonw{}", std::env::consts::EXE_SUFFIX)) + .exists() + ); + + #[cfg(unix)] + assert!( + scripts + .join(format!("python3{}", std::env::consts::EXE_SUFFIX)) + .exists() + ); + + #[cfg(unix)] + assert!( + scripts + .join(format!("python3.13{}", std::env::consts::EXE_SUFFIX)) + .exists() + ); + + assert!( + scripts + .join(format!("python3.13t{}", std::env::consts::EXE_SUFFIX)) + .exists() + ); + + #[cfg(windows)] + assert!( + scripts + .join(format!("pythonw3.13t{}", std::env::consts::EXE_SUFFIX)) + .exists() + ); + + // Remove the virtual environment + fs_err::remove_dir_all(&context.venv).unwrap(); + // Should be distinct from 3.13 uv_snapshot!(context.filters(), context.python_install().arg("3.13"), @r" success: true @@ -1099,14 +1158,14 @@ fn python_install_freethreaded() { "); // Should not work with older Python versions - uv_snapshot!(context.filters(), context.python_install().arg("3.12t"), @r###" + uv_snapshot!(context.filters(), context.python_install().arg("3.12t"), @r" success: false exit_code: 2 ----- stdout ----- ----- stderr ----- error: No download found for request: cpython-3.12t-[PLATFORM] - "###); + "); uv_snapshot!(context.filters(), context.python_uninstall().arg("--all"), @r" success: true From 7a56950bab323ab8f77f211e1d2522f2e3ead2ba Mon Sep 17 00:00:00 2001 From: Ali Chaudry Date: Mon, 21 Jul 2025 15:48:47 -0400 Subject: [PATCH 125/130] Update `setup-uv` docs for Github Actions integration guide (re-order python and uv setup) (#14741) I updated the Github Actions integration guide to run Github's `setup-python` before Astral's `setup-uv`, as `setup-uv`'s `activate-environment: true` doesn't work with the original ordering. There is a discussion about this behavior in the `setup-uv` repo [here](https://github.com/astral-sh/setup-uv/issues/479). ## Summary Update the documentation for the Github Actions integration. Caveat: I'm unsure if there are any other reasons where the original ordering (that is,`setup-uv` before `setup-python`) might be preferred. ## Test Plan Tested in a private Github Actions push, as documented in the aforementioned discussion on `setup-uv`'s repo. Confirmed that removing `source .venv/bin/activate` and replacing it with `activate-environment: true` now works in this ordering (but didn't work with the original ordering where `uv` installs before Github's `python`). --- docs/guides/integration/github.md | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/docs/guides/integration/github.md b/docs/guides/integration/github.md index 15d26b280..220db8fc8 100644 --- a/docs/guides/integration/github.md +++ b/docs/guides/integration/github.md @@ -92,13 +92,13 @@ jobs: steps: - uses: actions/checkout@v4 - - name: Install uv - uses: astral-sh/setup-uv@v6 - - name: "Set up Python" uses: actions/setup-python@v5 with: python-version-file: ".python-version" + + - name: Install uv + uses: astral-sh/setup-uv@v6 ``` Or, specify the `pyproject.toml` file to ignore the pin and use the latest version compatible with @@ -115,13 +115,13 @@ jobs: steps: - uses: actions/checkout@v4 - - name: Install uv - uses: astral-sh/setup-uv@v6 - - name: "Set up Python" uses: actions/setup-python@v5 with: python-version-file: "pyproject.toml" + + - name: Install uv + uses: astral-sh/setup-uv@v6 ``` ## Multiple Python versions From a3ea1b69f28dc4a0ab1fee400e7509d1fa3bf136 Mon Sep 17 00:00:00 2001 From: Charlie Marsh Date: Mon, 21 Jul 2025 16:55:33 -0400 Subject: [PATCH 126/130] Add support for `HF_TOKEN` (#14797) ## Summary If `HF_TOKEN` is set, we'll automatically wire it up to authenticate requests when hitting private `huggingface.co` URLs in `uv run`. ## Test Plan An unauthenticated request: ``` > cargo run -- run https://huggingface.co/datasets/cmarsh/test/resolve/main/main.py File "/var/folders/nt/6gf2v7_s3k13zq_t3944rwz40000gn/T/mainYadr5M.py", line 1 Invalid username or password. ^^^^^^^^ SyntaxError: invalid syntax ``` An authenticated request: ``` > HF_TOKEN=hf_... cargo run run https://huggingface.co/datasets/cmarsh/test/resolve/main/main.py Hello from main.py! ``` --- crates/uv-auth/src/lib.rs | 1 + crates/uv-auth/src/middleware.rs | 13 +++++++-- crates/uv-auth/src/providers.rs | 49 ++++++++++++++++++++++++++++++++ crates/uv-static/src/env_vars.rs | 7 +++++ docs/concepts/authentication.md | 15 ++++++++++ docs/reference/environment.md | 9 ++++++ 6 files changed, 91 insertions(+), 3 deletions(-) create mode 100644 crates/uv-auth/src/providers.rs diff --git a/crates/uv-auth/src/lib.rs b/crates/uv-auth/src/lib.rs index 90a957630..8e8a0e057 100644 --- a/crates/uv-auth/src/lib.rs +++ b/crates/uv-auth/src/lib.rs @@ -15,6 +15,7 @@ mod credentials; mod index; mod keyring; mod middleware; +mod providers; mod realm; // TODO(zanieb): Consider passing a cache explicitly throughout diff --git a/crates/uv-auth/src/middleware.rs b/crates/uv-auth/src/middleware.rs index 1842effb3..605675b61 100644 --- a/crates/uv-auth/src/middleware.rs +++ b/crates/uv-auth/src/middleware.rs @@ -7,6 +7,7 @@ use reqwest::{Request, Response}; use reqwest_middleware::{Error, Middleware, Next}; use tracing::{debug, trace, warn}; +use crate::providers::HuggingFaceProvider; use crate::{ CREDENTIALS_CACHE, CredentialsCache, KeyringProvider, cache::FetchUrl, @@ -457,9 +458,8 @@ impl AuthMiddleware { Some(credentials) }; - return self - .complete_request(credentials, request, extensions, next, auth_policy) - .await; + self.complete_request(credentials, request, extensions, next, auth_policy) + .await } /// Fetch credentials for a URL. @@ -503,6 +503,13 @@ impl AuthMiddleware { return credentials; } + // Support for known providers, like Hugging Face. + if let Some(credentials) = HuggingFaceProvider::credentials_for(url).map(Arc::new) { + debug!("Found Hugging Face credentials for {url}"); + self.cache().fetches.done(key, Some(credentials.clone())); + return Some(credentials); + } + // Netrc support based on: . let credentials = if let Some(credentials) = self.netrc.get().and_then(|netrc| { debug!("Checking netrc for credentials for {url}"); diff --git a/crates/uv-auth/src/providers.rs b/crates/uv-auth/src/providers.rs new file mode 100644 index 000000000..85a5a0ec7 --- /dev/null +++ b/crates/uv-auth/src/providers.rs @@ -0,0 +1,49 @@ +use std::sync::LazyLock; +use tracing::debug; +use url::Url; + +use uv_static::EnvVars; + +use crate::Credentials; +use crate::realm::Realm; + +/// The [`Realm`] for the Hugging Face platform. +static HUGGING_FACE_REALM: LazyLock = LazyLock::new(|| { + let url = Url::parse("https://huggingface.co").expect("Failed to parse Hugging Face URL"); + Realm::from(&url) +}); + +/// The authentication token for the Hugging Face platform, if set. +static HUGGING_FACE_TOKEN: LazyLock>> = LazyLock::new(|| { + // Extract the Hugging Face token from the environment variable, if it exists. + let hf_token = std::env::var(EnvVars::HF_TOKEN) + .ok() + .map(String::into_bytes) + .filter(|token| !token.is_empty())?; + + if std::env::var_os(EnvVars::UV_NO_HF_TOKEN).is_some() { + debug!("Ignoring Hugging Face token from environment due to `UV_NO_HF_TOKEN`"); + return None; + } + + debug!("Found Hugging Face token in environment"); + Some(hf_token) +}); + +/// A provider for authentication credentials for the Hugging Face platform. +#[derive(Debug, Clone, PartialEq, Eq)] +pub(crate) struct HuggingFaceProvider; + +impl HuggingFaceProvider { + /// Returns the credentials for the Hugging Face platform, if available. + pub(crate) fn credentials_for(url: &Url) -> Option { + if Realm::from(url) == *HUGGING_FACE_REALM { + if let Some(token) = HUGGING_FACE_TOKEN.as_ref() { + return Some(Credentials::Bearer { + token: token.clone(), + }); + } + } + None + } +} diff --git a/crates/uv-static/src/env_vars.rs b/crates/uv-static/src/env_vars.rs index f7fa6cb31..0e99ec549 100644 --- a/crates/uv-static/src/env_vars.rs +++ b/crates/uv-static/src/env_vars.rs @@ -765,4 +765,11 @@ impl EnvVars { /// Disable GitHub-specific requests that allow uv to skip `git fetch` in some circumstances. pub const UV_NO_GITHUB_FAST_PATH: &'static str = "UV_NO_GITHUB_FAST_PATH"; + + /// Authentication token for Hugging Face requests. When set, uv will use this token + /// when making requests to `https://huggingface.co/` and any subdomains. + pub const HF_TOKEN: &'static str = "HF_TOKEN"; + + /// Disable Hugging Face authentication, even if `HF_TOKEN` is set. + pub const UV_NO_HF_TOKEN: &'static str = "UV_NO_HF_TOKEN"; } diff --git a/docs/concepts/authentication.md b/docs/concepts/authentication.md index 10bf57c21..fe5314b85 100644 --- a/docs/concepts/authentication.md +++ b/docs/concepts/authentication.md @@ -151,3 +151,18 @@ insecure. Use `allow-insecure-host` with caution and only in trusted environments, as it can expose you to security risks due to the lack of certificate verification. + +## Hugging Face support + +uv supports automatic authentication for the Hugging Face Hub. Specifically, if the `HF_TOKEN` +environment variable is set, uv will propagate it to requests to `huggingface.co`. + +This is particularly useful for accessing private scripts in Hugging Face Datasets. For example, you +can run the following command to execute the script `main.py` script from a private dataset: + +```console +$ HF_TOKEN=hf_... uv run https://huggingface.co/datasets///resolve//main.py +``` + +You can disable automatic Hugging Face authentication by setting the `UV_NO_HF_TOKEN=1` environment +variable. diff --git a/docs/reference/environment.md b/docs/reference/environment.md index e848d4a41..a4d686192 100644 --- a/docs/reference/environment.md +++ b/docs/reference/environment.md @@ -252,6 +252,10 @@ Ignore `.env` files when executing `uv run` commands. Disable GitHub-specific requests that allow uv to skip `git fetch` in some circumstances. +### `UV_NO_HF_TOKEN` + +Disable Hugging Face authentication, even if `HF_TOKEN` is set. + ### `UV_NO_INSTALLER_METADATA` Skip writing `uv` installer metadata files (e.g., `INSTALLER`, `REQUESTED`, and `direct_url.json`) to site-packages `.dist-info` directories. @@ -528,6 +532,11 @@ See [force-color.org](https://force-color.org). Used for trusted publishing via `uv publish`. +### `HF_TOKEN` + +Authentication token for Hugging Face requests. When set, uv will use this token +when making requests to `https://huggingface.co/` and any subdomains. + ### `HOME` The standard `HOME` env var. From 036c9bef3f3c29c785e4e1c96edbed9f745e23c8 Mon Sep 17 00:00:00 2001 From: Charlie Marsh Date: Mon, 21 Jul 2025 17:07:35 -0400 Subject: [PATCH 127/130] Add a borrowed `Realm` type (#14798) ## Summary Allows zero-cost comparisons against URL references. --- crates/uv-auth/src/providers.rs | 4 +- crates/uv-auth/src/realm.rs | 74 ++++++++++++++++++++++++++++++++- 2 files changed, 74 insertions(+), 4 deletions(-) diff --git a/crates/uv-auth/src/providers.rs b/crates/uv-auth/src/providers.rs index 85a5a0ec7..2c531d3da 100644 --- a/crates/uv-auth/src/providers.rs +++ b/crates/uv-auth/src/providers.rs @@ -5,7 +5,7 @@ use url::Url; use uv_static::EnvVars; use crate::Credentials; -use crate::realm::Realm; +use crate::realm::{Realm, RealmRef}; /// The [`Realm`] for the Hugging Face platform. static HUGGING_FACE_REALM: LazyLock = LazyLock::new(|| { @@ -37,7 +37,7 @@ pub(crate) struct HuggingFaceProvider; impl HuggingFaceProvider { /// Returns the credentials for the Hugging Face platform, if available. pub(crate) fn credentials_for(url: &Url) -> Option { - if Realm::from(url) == *HUGGING_FACE_REALM { + if RealmRef::from(url) == *HUGGING_FACE_REALM { if let Some(token) = HUGGING_FACE_TOKEN.as_ref() { return Some(Credentials::Bearer { token: token.clone(), diff --git a/crates/uv-auth/src/realm.rs b/crates/uv-auth/src/realm.rs index cfedf299c..03b3c8fcf 100644 --- a/crates/uv-auth/src/realm.rs +++ b/crates/uv-auth/src/realm.rs @@ -1,5 +1,5 @@ +use std::hash::{Hash, Hasher}; use std::{fmt::Display, fmt::Formatter}; - use url::Url; use uv_small_str::SmallString; @@ -22,7 +22,7 @@ use uv_small_str::SmallString; // The port is only allowed to differ if it matches the "default port" for the scheme. // However, `url` (and therefore `reqwest`) sets the `port` to `None` if it matches the default port // so we do not need any special handling here. -#[derive(Debug, Clone, PartialEq, Eq, Hash)] +#[derive(Debug, Clone)] pub(crate) struct Realm { scheme: SmallString, host: Option, @@ -59,6 +59,76 @@ impl Display for Realm { } } +impl PartialEq for Realm { + fn eq(&self, other: &Self) -> bool { + RealmRef::from(self) == RealmRef::from(other) + } +} + +impl Eq for Realm {} + +impl Hash for Realm { + fn hash(&self, state: &mut H) { + RealmRef::from(self).hash(state); + } +} + +/// A reference to a [`Realm`] that can be used for zero-allocation comparisons. +#[derive(Debug, Copy, Clone)] +pub(crate) struct RealmRef<'a> { + scheme: &'a str, + host: Option<&'a str>, + port: Option, +} + +impl<'a> From<&'a Url> for RealmRef<'a> { + fn from(url: &'a Url) -> Self { + Self { + scheme: url.scheme(), + host: url.host_str(), + port: url.port(), + } + } +} + +impl PartialEq for RealmRef<'_> { + fn eq(&self, other: &Self) -> bool { + self.scheme == other.scheme && self.host == other.host && self.port == other.port + } +} + +impl Eq for RealmRef<'_> {} + +impl Hash for RealmRef<'_> { + fn hash(&self, state: &mut H) { + self.scheme.hash(state); + self.host.hash(state); + self.port.hash(state); + } +} + +impl<'a> PartialEq> for Realm { + fn eq(&self, rhs: &RealmRef<'a>) -> bool { + RealmRef::from(self) == *rhs + } +} + +impl PartialEq for RealmRef<'_> { + fn eq(&self, rhs: &Realm) -> bool { + *self == RealmRef::from(rhs) + } +} + +impl<'a> From<&'a Realm> for RealmRef<'a> { + fn from(realm: &'a Realm) -> Self { + Self { + scheme: &realm.scheme, + host: realm.host.as_deref(), + port: realm.port, + } + } +} + #[cfg(test)] mod tests { use url::{ParseError, Url}; From ecfa38608864f99cdb887edc97632fc3ed352fc3 Mon Sep 17 00:00:00 2001 From: Charlie Marsh Date: Mon, 21 Jul 2025 18:15:03 -0400 Subject: [PATCH 128/130] Error on unknown fields in `dependency-metadata` (#14801) ## Summary Closes https://github.com/astral-sh/uv/issues/14800. --- .../src/dependency_metadata.rs | 24 ++++++------- crates/uv/tests/it/lock.rs | 36 +++++++++++++++++++ crates/uv/tests/it/sync.rs | 6 ++-- uv.schema.json | 1 + 4 files changed, 52 insertions(+), 15 deletions(-) diff --git a/crates/uv-distribution-types/src/dependency_metadata.rs b/crates/uv-distribution-types/src/dependency_metadata.rs index ccda34795..1e978db3d 100644 --- a/crates/uv-distribution-types/src/dependency_metadata.rs +++ b/crates/uv-distribution-types/src/dependency_metadata.rs @@ -30,21 +30,20 @@ impl DependencyMetadata { if let Some(version) = version { // If a specific version was requested, search for an exact match, then a global match. - let metadata = versions + let metadata = if let Some(metadata) = versions .iter() - .find(|v| v.version.as_ref() == Some(version)) - .inspect(|_| { - debug!("Found dependency metadata entry for `{package}=={version}`"); - }) - .or_else(|| versions.iter().find(|v| v.version.is_none())) - .inspect(|_| { - debug!("Found global metadata entry for `{package}`"); - }); - let Some(metadata) = metadata else { + .find(|entry| entry.version.as_ref() == Some(version)) + { + debug!("Found dependency metadata entry for `{package}=={version}`"); + metadata + } else if let Some(metadata) = versions.iter().find(|entry| entry.version.is_none()) { + debug!("Found global metadata entry for `{package}`"); + metadata + } else { warn!("No dependency metadata entry found for `{package}=={version}`"); return None; }; - debug!("Found dependency metadata entry for `{package}=={version}`"); + Some(ResolutionMetadata { name: metadata.name.clone(), version: version.clone(), @@ -65,6 +64,7 @@ impl DependencyMetadata { return None; }; debug!("Found dependency metadata entry for `{package}` (assuming: `{version}`)"); + Some(ResolutionMetadata { name: metadata.name.clone(), version, @@ -86,7 +86,7 @@ impl DependencyMetadata { /// . #[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, PartialOrd, Ord)] #[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] -#[serde(rename_all = "kebab-case")] +#[serde(rename_all = "kebab-case", deny_unknown_fields)] pub struct StaticMetadata { // Mandatory fields pub name: PackageName, diff --git a/crates/uv/tests/it/lock.rs b/crates/uv/tests/it/lock.rs index 0962ff6d2..f4ccb7bf7 100644 --- a/crates/uv/tests/it/lock.rs +++ b/crates/uv/tests/it/lock.rs @@ -18597,6 +18597,42 @@ fn lock_dependency_metadata() -> Result<()> { Removed sniffio v1.3.1 "###); + // Update the static metadata. + pyproject_toml.write_str( + r#" + [project] + name = "project" + version = "0.1.0" + requires-python = ">=3.12" + dependencies = ["anyio==3.7.0"] + + [[tool.uv.dependency-metadata]] + name = "anyio" + version = "3.7.0" + requires_dist = ["typing-extensions"] + "#, + )?; + + // The operation should warn. + uv_snapshot!(context.filters(), context.lock(), @r#" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + warning: Failed to parse `pyproject.toml` during settings discovery: + TOML parse error at line 11, column 9 + | + 11 | requires_dist = ["typing-extensions"] + | ^^^^^^^^^^^^^ + unknown field `requires_dist`, expected one of `name`, `version`, `requires-dist`, `requires-python`, `provides-extras` + + Resolved 4 packages in [TIME] + Added idna v3.6 + Removed iniconfig v2.0.0 + Added sniffio v1.3.1 + "#); + Ok(()) } diff --git a/crates/uv/tests/it/sync.rs b/crates/uv/tests/it/sync.rs index 1639ecaae..4f2853e61 100644 --- a/crates/uv/tests/it/sync.rs +++ b/crates/uv/tests/it/sync.rs @@ -7560,7 +7560,7 @@ fn sync_derivation_chain() -> Result<()> { [[tool.uv.dependency-metadata]] name = "wsgiref" version = "0.1.2" - dependencies = [] + requires-dist = [] "#, )?; @@ -7623,7 +7623,7 @@ fn sync_derivation_chain_extra() -> Result<()> { [[tool.uv.dependency-metadata]] name = "wsgiref" version = "0.1.2" - dependencies = [] + requires-dist = [] "#, )?; @@ -7688,7 +7688,7 @@ fn sync_derivation_chain_group() -> Result<()> { [[tool.uv.dependency-metadata]] name = "wsgiref" version = "0.1.2" - dependencies = [] + requires-dist = [] "#, )?; diff --git a/uv.schema.json b/uv.schema.json index 22b30cd06..d8346aab1 100644 --- a/uv.schema.json +++ b/uv.schema.json @@ -2104,6 +2104,7 @@ ] } }, + "additionalProperties": false, "required": [ "name" ] From c1bf934721dd327d4cb1cb3b915cfff475e54bc5 Mon Sep 17 00:00:00 2001 From: Ping Shuijie Date: Tue, 22 Jul 2025 18:13:05 +0800 Subject: [PATCH 129/130] chore: fix some minor issues in comments (#14807) ## Summary fix some minor issues in comments ## Test Plan Signed-off-by: pingshuijie --- crates/uv/src/commands/project/environment.rs | 2 +- crates/uv/tests/it/pip_compile.rs | 4 ++-- crates/uv/tests/it/version.rs | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/crates/uv/src/commands/project/environment.rs b/crates/uv/src/commands/project/environment.rs index 4f9d936c5..b5bb3fd23 100644 --- a/crates/uv/src/commands/project/environment.rs +++ b/crates/uv/src/commands/project/environment.rs @@ -63,7 +63,7 @@ impl EphemeralEnvironment { /// environment's `site-packages` directory to Python's import search paths in addition to /// the ephemeral environment's `site-packages` directory. This works well at runtime, but /// is too dynamic for static analysis tools like ty to understand. As such, we - /// additionally write the `sys.prefix` of the parent environment to to the + /// additionally write the `sys.prefix` of the parent environment to the /// `extends-environment` key of the ephemeral environment's `pyvenv.cfg` file, making it /// easier for these tools to statically and reliably understand the relationship between /// the two environments. diff --git a/crates/uv/tests/it/pip_compile.rs b/crates/uv/tests/it/pip_compile.rs index 69da12fd6..3a4dc28c4 100644 --- a/crates/uv/tests/it/pip_compile.rs +++ b/crates/uv/tests/it/pip_compile.rs @@ -12186,7 +12186,7 @@ requires-python = ">3.8" fn prerelease_path_requirement() -> Result<()> { let context = TestContext::new("3.12"); - // Create an a package that requires a pre-release version of `flask`. + // Create a package that requires a pre-release version of `flask`. let pyproject_toml = context.temp_dir.child("pyproject.toml"); pyproject_toml.write_str( r#"[project] @@ -12240,7 +12240,7 @@ requires-python = ">3.8" fn prerelease_editable_requirement() -> Result<()> { let context = TestContext::new("3.12"); - // Create an a package that requires a pre-release version of `flask`.r + // Create a package that requires a pre-release version of `flask`.r let pyproject_toml = context.temp_dir.child("pyproject.toml"); pyproject_toml.write_str( r#"[project] diff --git a/crates/uv/tests/it/version.rs b/crates/uv/tests/it/version.rs index e5f6e1687..53cb0de06 100644 --- a/crates/uv/tests/it/version.rs +++ b/crates/uv/tests/it/version.rs @@ -1993,7 +1993,7 @@ fn version_set_workspace() -> Result<()> { ); }); - // Set the other child's version, refereshing the lock and sync + // Set the other child's version, refreshing the lock and sync let mut version_cmd = context.version(); version_cmd .arg("--package") From 8bffa693b41e4a8c8d47e7ee9a95c8e64103305d Mon Sep 17 00:00:00 2001 From: Zanie Blue Date: Tue, 22 Jul 2025 07:11:05 -0500 Subject: [PATCH 130/130] Copy entry points and Jupyter data directories into ephemeral environments (#14790) This is an alternative to https://github.com/astral-sh/uv/pull/14788 which has the benefit that it addresses https://github.com/astral-sh/uv/issues/13327 which would be an issue even if we reverted #14447. There are two changes here 1. We copy entry points into the ephemeral environment, and rewrite their shebangs (or trampoline target) to ensure the ephemeral environment is not bypassed. 2. We link `etc/jupyter` and `share/jupyter` data directories into the ephemeral environment, this is in order to ensure the above doesn't break Jupyter which unfortunately cannot find the `share` directory otherwise. I'd love not to do this, as it seems brittle and we don't have a motivating use-case beyond Jupyter. I've opened https://github.com/jupyterlab/jupyterlab/issues/17716 upstream for discussion, as there is a viable patch that could be made upstream to resolve the problem. I've limited the fix to Jupyter directories so we can remove it without breakage. Closes https://github.com/astral-sh/uv/issues/14729 Closes https://github.com/astral-sh/uv/issues/13327 Closes https://github.com/astral-sh/uv/issues/14749 --------- Co-authored-by: Charlie Marsh --- crates/uv-fs/src/lib.rs | 34 ++++ crates/uv-trampoline-builder/src/lib.rs | 61 ++++++ crates/uv/src/commands/project/environment.rs | 14 ++ crates/uv/src/commands/project/run.rs | 155 +++++++++++++++- crates/uv/tests/it/run.rs | 175 ++++++++++++++++++ 5 files changed, 437 insertions(+), 2 deletions(-) diff --git a/crates/uv-fs/src/lib.rs b/crates/uv-fs/src/lib.rs index dcc0f00b2..17f52dcf5 100644 --- a/crates/uv-fs/src/lib.rs +++ b/crates/uv-fs/src/lib.rs @@ -84,6 +84,8 @@ pub async fn read_to_string_transcode(path: impl AsRef) -> std::io::Result /// junction at the same path. /// /// Note that because junctions are used, the source must be a directory. +/// +/// Changes to this function should be reflected in [`create_symlink`]. #[cfg(windows)] pub fn replace_symlink(src: impl AsRef, dst: impl AsRef) -> std::io::Result<()> { // If the source is a file, we can't create a junction @@ -138,6 +140,38 @@ pub fn replace_symlink(src: impl AsRef, dst: impl AsRef) -> std::io: } } +/// Create a symlink at `dst` pointing to `src`. +/// +/// On Windows, this uses the `junction` crate to create a junction point. +/// +/// Note that because junctions are used, the source must be a directory. +/// +/// Changes to this function should be reflected in [`replace_symlink`]. +#[cfg(windows)] +pub fn create_symlink(src: impl AsRef, dst: impl AsRef) -> std::io::Result<()> { + // If the source is a file, we can't create a junction + if src.as_ref().is_file() { + return Err(std::io::Error::new( + std::io::ErrorKind::InvalidInput, + format!( + "Cannot create a junction for {}: is not a directory", + src.as_ref().display() + ), + )); + } + + junction::create( + dunce::simplified(src.as_ref()), + dunce::simplified(dst.as_ref()), + ) +} + +/// Create a symlink at `dst` pointing to `src`. +#[cfg(unix)] +pub fn create_symlink(src: impl AsRef, dst: impl AsRef) -> std::io::Result<()> { + fs_err::os::unix::fs::symlink(src.as_ref(), dst.as_ref()) +} + #[cfg(unix)] pub fn remove_symlink(path: impl AsRef) -> std::io::Result<()> { fs_err::remove_file(path.as_ref()) diff --git a/crates/uv-trampoline-builder/src/lib.rs b/crates/uv-trampoline-builder/src/lib.rs index 2e1cde872..1a25b9454 100644 --- a/crates/uv-trampoline-builder/src/lib.rs +++ b/crates/uv-trampoline-builder/src/lib.rs @@ -41,6 +41,7 @@ const MAGIC_NUMBER_SIZE: usize = 4; pub struct Launcher { pub kind: LauncherKind, pub python_path: PathBuf, + payload: Vec, } impl Launcher { @@ -109,11 +110,69 @@ impl Launcher { String::from_utf8(buffer).map_err(|err| Error::InvalidPath(err.utf8_error()))?, ); + #[allow(clippy::cast_possible_truncation)] + let file_size = { + let raw_length = file + .seek(io::SeekFrom::End(0)) + .map_err(|e| Error::InvalidLauncherSeek("size probe".into(), 0, e))?; + + if raw_length > usize::MAX as u64 { + return Err(Error::InvalidDataLength(raw_length)); + } + + // SAFETY: Above we guarantee the length is less than uszie + raw_length as usize + }; + + // Read the payload + file.seek(io::SeekFrom::Start(0)) + .map_err(|e| Error::InvalidLauncherSeek("rewind".into(), 0, e))?; + let payload_len = + file_size.saturating_sub(MAGIC_NUMBER_SIZE + PATH_LENGTH_SIZE + path_length); + let mut buffer = vec![0u8; payload_len]; + file.read_exact(&mut buffer) + .map_err(|err| Error::InvalidLauncherRead("payload".into(), err))?; + Ok(Some(Self { kind, + payload: buffer, python_path: path, })) } + + pub fn write_to_file(self, file: &mut File) -> Result<(), Error> { + let python_path = self.python_path.simplified_display().to_string(); + + if python_path.len() > MAX_PATH_LENGTH as usize { + return Err(Error::InvalidPathLength( + u32::try_from(python_path.len()).expect("path length already checked"), + )); + } + + let mut launcher: Vec = Vec::with_capacity( + self.payload.len() + python_path.len() + PATH_LENGTH_SIZE + MAGIC_NUMBER_SIZE, + ); + launcher.extend_from_slice(&self.payload); + launcher.extend_from_slice(python_path.as_bytes()); + launcher.extend_from_slice( + &u32::try_from(python_path.len()) + .expect("file path should be smaller than 4GB") + .to_le_bytes(), + ); + launcher.extend_from_slice(self.kind.magic_number()); + + file.write_all(&launcher)?; + Ok(()) + } + + #[must_use] + pub fn with_python_path(self, path: PathBuf) -> Self { + Self { + kind: self.kind, + payload: self.payload, + python_path: path, + } + } } /// The kind of trampoline launcher to create. @@ -177,6 +236,8 @@ pub enum Error { Io(#[from] io::Error), #[error("Only paths with a length up to 32KB are supported but found a length of {0} bytes")] InvalidPathLength(u32), + #[error("Only data with a length up to usize is supported but found a length of {0} bytes")] + InvalidDataLength(u64), #[error("Failed to parse executable path")] InvalidPath(#[source] Utf8Error), #[error("Failed to seek to {0} at offset {1}")] diff --git a/crates/uv/src/commands/project/environment.rs b/crates/uv/src/commands/project/environment.rs index b5bb3fd23..af3b3b351 100644 --- a/crates/uv/src/commands/project/environment.rs +++ b/crates/uv/src/commands/project/environment.rs @@ -78,6 +78,20 @@ impl EphemeralEnvironment { )?; Ok(()) } + + /// Returns the path to the environment's scripts directory. + pub(crate) fn scripts(&self) -> &Path { + self.0.scripts() + } + + /// Returns the path to the environment's Python executable. + pub(crate) fn sys_executable(&self) -> &Path { + self.0.interpreter().sys_executable() + } + + pub(crate) fn sys_prefix(&self) -> &Path { + self.0.interpreter().sys_prefix() + } } /// A [`PythonEnvironment`] stored in the cache. diff --git a/crates/uv/src/commands/project/run.rs b/crates/uv/src/commands/project/run.rs index ba8935013..44d0dc474 100644 --- a/crates/uv/src/commands/project/run.rs +++ b/crates/uv/src/commands/project/run.rs @@ -9,8 +9,9 @@ use anyhow::{Context, anyhow, bail}; use futures::StreamExt; use itertools::Itertools; use owo_colors::OwoColorize; +use thiserror::Error; use tokio::process::Command; -use tracing::{debug, warn}; +use tracing::{debug, trace, warn}; use url::Url; use uv_cache::Cache; @@ -22,7 +23,7 @@ use uv_configuration::{ }; use uv_distribution_types::Requirement; use uv_fs::which::is_executable; -use uv_fs::{PythonExt, Simplified}; +use uv_fs::{PythonExt, Simplified, create_symlink}; use uv_installer::{SatisfiesResult, SitePackages}; use uv_normalize::{DefaultExtras, DefaultGroups, PackageName}; use uv_python::{ @@ -1071,6 +1072,67 @@ hint: If you are running a script with `{}` in the shebang, you may need to incl requirements_site_packages.escape_for_python(), ))?; + // N.B. The order here matters — earlier interpreters take precedence over the + // later ones. + for interpreter in [requirements_env.interpreter(), &base_interpreter] { + // Copy each entrypoint from the base environments to the ephemeral environment, + // updating the Python executable target to ensure they run in the ephemeral + // environment. + for entry in fs_err::read_dir(interpreter.scripts())? { + let entry = entry?; + if !entry.file_type()?.is_file() { + continue; + } + match copy_entrypoint( + &entry.path(), + &ephemeral_env.scripts().join(entry.file_name()), + interpreter.sys_executable(), + ephemeral_env.sys_executable(), + ) { + Ok(()) => {} + // If the entrypoint already exists, skip it. + Err(CopyEntrypointError::Io(err)) + if err.kind() == std::io::ErrorKind::AlreadyExists => + { + trace!( + "Skipping copy of entrypoint `{}`: already exists", + &entry.path().display() + ); + } + Err(err) => return Err(err.into()), + } + } + + // Link data directories from the base environment to the ephemeral environment. + // + // This is critical for Jupyter Lab, which cannot operate without the files it + // writes to `/share/jupyter`. + // + // See https://github.com/jupyterlab/jupyterlab/issues/17716 + for dir in &["etc/jupyter", "share/jupyter"] { + let source = interpreter.sys_prefix().join(dir); + if !matches!(source.try_exists(), Ok(true)) { + continue; + } + if !source.is_dir() { + continue; + } + let target = ephemeral_env.sys_prefix().join(dir); + if let Some(parent) = target.parent() { + fs_err::create_dir_all(parent)?; + } + match create_symlink(&source, &target) { + Ok(()) => trace!( + "Created link for {} -> {}", + target.user_display(), + source.user_display() + ), + Err(err) if err.kind() == std::io::ErrorKind::AlreadyExists => {} + Err(err) => return Err(err.into()), + } + } + } + // Write the `sys.prefix` of the parent environment to the `extends-environment` key of the `pyvenv.cfg` // file. This helps out static-analysis tools such as ty (see docs on // `CachedEnvironment::set_parent_environment`). @@ -1669,3 +1731,92 @@ fn read_recursion_depth_from_environment_variable() -> anyhow::Result { .parse::() .with_context(|| format!("invalid value for {}", EnvVars::UV_RUN_RECURSION_DEPTH)) } + +#[derive(Error, Debug)] +enum CopyEntrypointError { + #[error(transparent)] + Io(#[from] std::io::Error), + #[cfg(windows)] + #[error(transparent)] + Trampoline(#[from] uv_trampoline_builder::Error), +} + +/// Create a copy of the entrypoint at `source` at `target`, if it has a Python shebang, replacing +/// the previous Python executable with a new one. +/// +/// This is a no-op if the target already exists. +/// +/// Note on Windows, the entrypoints do not use shebangs and require a rewrite of the trampoline. +#[cfg(unix)] +fn copy_entrypoint( + source: &Path, + target: &Path, + previous_executable: &Path, + python_executable: &Path, +) -> Result<(), CopyEntrypointError> { + use std::io::Write; + use std::os::unix::fs::PermissionsExt; + + use fs_err::os::unix::fs::OpenOptionsExt; + + let contents = fs_err::read_to_string(source)?; + + let Some(contents) = contents + // Check for a relative path or relocatable shebang + .strip_prefix( + r#"#!/bin/sh +'''exec' "$(dirname -- "$(realpath -- "$0")")"/'python' "$0" "$@" +' ''' +"#, + ) + // Or an absolute path shebang + .or_else(|| contents.strip_prefix(&format!("#!{}\n", previous_executable.display()))) + else { + // If it's not a Python shebang, we'll skip it + trace!( + "Skipping copy of entrypoint `{}`: does not start with expected shebang", + source.user_display() + ); + return Ok(()); + }; + + let contents = format!("#!{}\n{}", python_executable.display(), contents); + let mode = fs_err::metadata(source)?.permissions().mode(); + let mut file = fs_err::OpenOptions::new() + .create_new(true) + .write(true) + .mode(mode) + .open(target)?; + file.write_all(contents.as_bytes())?; + + trace!("Updated entrypoint at {}", target.user_display()); + + Ok(()) +} + +/// Create a copy of the entrypoint at `source` at `target`, if it's a Python script launcher, +/// replacing the target Python executable with a new one. +#[cfg(windows)] +fn copy_entrypoint( + source: &Path, + target: &Path, + _previous_executable: &Path, + python_executable: &Path, +) -> Result<(), CopyEntrypointError> { + use uv_trampoline_builder::Launcher; + + let Some(launcher) = Launcher::try_from_path(source)? else { + return Ok(()); + }; + + let launcher = launcher.with_python_path(python_executable.to_path_buf()); + let mut file = fs_err::OpenOptions::new() + .create_new(true) + .write(true) + .open(target)?; + launcher.write_to_file(&mut file)?; + + trace!("Updated entrypoint at {}", target.user_display()); + + Ok(()) +} diff --git a/crates/uv/tests/it/run.rs b/crates/uv/tests/it/run.rs index ad8672788..2e9762a60 100644 --- a/crates/uv/tests/it/run.rs +++ b/crates/uv/tests/it/run.rs @@ -1319,6 +1319,181 @@ fn run_with_pyvenv_cfg_file() -> Result<()> { Ok(()) } +#[test] +fn run_with_overlay_interpreter() -> Result<()> { + let context = TestContext::new("3.12").with_filtered_exe_suffix(); + + let pyproject_toml = context.temp_dir.child("pyproject.toml"); + pyproject_toml.write_str(indoc! { r#" + [project] + name = "foo" + version = "1.0.0" + requires-python = ">=3.8" + dependencies = ["anyio"] + + [build-system] + requires = ["setuptools>=42"] + build-backend = "setuptools.build_meta" + + [project.scripts] + main = "foo:main" + "# + })?; + + let foo = context.temp_dir.child("src").child("foo"); + foo.create_dir_all()?; + let init_py = foo.child("__init__.py"); + init_py.write_str(indoc! { r#" + import sys + import shutil + from pathlib import Path + + def show_python(): + print(sys.executable) + + def copy_entrypoint(): + base = Path(sys.executable) + shutil.copyfile(base.with_name("main").with_suffix(base.suffix), sys.argv[1]) + + def main(): + show_python() + if len(sys.argv) > 1: + copy_entrypoint() + "# + })?; + + // The project's entrypoint should be rewritten to use the overlay interpreter. + uv_snapshot!(context.filters(), context.run().arg("--with").arg("iniconfig").arg("main").arg(context.temp_dir.child("main").as_os_str()), @r" + success: true + exit_code: 0 + ----- stdout ----- + [CACHE_DIR]/builds-v0/[TMP]/python + + ----- stderr ----- + Resolved 6 packages in [TIME] + Prepared 4 packages in [TIME] + Installed 4 packages in [TIME] + + anyio==4.3.0 + + foo==1.0.0 (from file://[TEMP_DIR]/) + + idna==3.6 + + sniffio==1.3.1 + Resolved 1 package in [TIME] + Prepared 1 package in [TIME] + Installed 1 package in [TIME] + + iniconfig==2.0.0 + "); + + #[cfg(unix)] + insta::with_settings!({ + filters => context.filters(), + }, { + assert_snapshot!( + context.read("main"), @r##" + #![CACHE_DIR]/builds-v0/[TMP]/python + # -*- coding: utf-8 -*- + import sys + from foo import main + if __name__ == "__main__": + if sys.argv[0].endswith("-script.pyw"): + sys.argv[0] = sys.argv[0][:-11] + elif sys.argv[0].endswith(".exe"): + sys.argv[0] = sys.argv[0][:-4] + sys.exit(main()) + "## + ); + } + ); + + // The package, its dependencies, and the overlay dependencies should be available. + context + .run() + .arg("--with") + .arg("iniconfig") + .arg("python") + .arg("-c") + .arg("import foo; import anyio; import iniconfig") + .assert() + .success(); + + // When layering the project on top (via `--with`), the overlay interpreter also should be used. + uv_snapshot!(context.filters(), context.run().arg("--no-project").arg("--with").arg(".").arg("main"), @r" + success: true + exit_code: 0 + ----- stdout ----- + [CACHE_DIR]/builds-v0/[TMP]/python + + ----- stderr ----- + Resolved 4 packages in [TIME] + Prepared 1 package in [TIME] + Installed 4 packages in [TIME] + + anyio==4.3.0 + + foo==1.0.0 (from file://[TEMP_DIR]/) + + idna==3.6 + + sniffio==1.3.1 + "); + + // Switch to a relocatable virtual environment. + context.venv().arg("--relocatable").assert().success(); + + // The project's entrypoint should be rewritten to use the overlay interpreter. + uv_snapshot!(context.filters(), context.run().arg("--with").arg("iniconfig").arg("main").arg(context.temp_dir.child("main").as_os_str()), @r" + success: true + exit_code: 0 + ----- stdout ----- + [CACHE_DIR]/builds-v0/[TMP]/python + + ----- stderr ----- + Resolved 6 packages in [TIME] + Audited 4 packages in [TIME] + Resolved 1 package in [TIME] + "); + + // The package, its dependencies, and the overlay dependencies should be available. + context + .run() + .arg("--with") + .arg("iniconfig") + .arg("python") + .arg("-c") + .arg("import foo; import anyio; import iniconfig") + .assert() + .success(); + + #[cfg(unix)] + insta::with_settings!({ + filters => context.filters(), + }, { + assert_snapshot!( + context.read("main"), @r##" + #![CACHE_DIR]/builds-v0/[TMP]/python + # -*- coding: utf-8 -*- + import sys + from foo import main + if __name__ == "__main__": + if sys.argv[0].endswith("-script.pyw"): + sys.argv[0] = sys.argv[0][:-11] + elif sys.argv[0].endswith(".exe"): + sys.argv[0] = sys.argv[0][:-4] + sys.exit(main()) + "## + ); + } + ); + + // When layering the project on top (via `--with`), the overlay interpreter also should be used. + uv_snapshot!(context.filters(), context.run().arg("--no-project").arg("--with").arg(".").arg("main"), @r" + success: true + exit_code: 0 + ----- stdout ----- + [CACHE_DIR]/builds-v0/[TMP]/python + + ----- stderr ----- + Resolved 4 packages in [TIME] + "); + + Ok(()) +} + #[test] fn run_with_build_constraints() -> Result<()> { let context = TestContext::new("3.9");