Perform a universal resolution, attempting to generate a single requirements.txt output file that is compatible with all operating systems, architectures, and Python implementations.
In universal mode, the current Python version (or user-provided --python-version) will be treated as a lower bound. For example, --universal --python-version 3.7 would produce a universal resolution for Python 3.7 and later.
Implies --no-strip-markers.
@@ -3590,6 +3606,22 @@ be used with caution, as it can modify the system Python installation.
diff --git a/uv.schema.json b/uv.schema.json
index 33c1ff1f5..0d2b47490 100644
--- a/uv.schema.json
+++ b/uv.schema.json
@@ -2589,6 +2589,118 @@
"enum": [
"cu80"
]
+ },
+ {
+ "description": "Use the PyTorch index for ROCm 6.3.",
+ "type": "string",
+ "enum": [
+ "rocm6.3"
+ ]
+ },
+ {
+ "description": "Use the PyTorch index for ROCm 6.2.4.",
+ "type": "string",
+ "enum": [
+ "rocm6.2.4"
+ ]
+ },
+ {
+ "description": "Use the PyTorch index for ROCm 6.2.",
+ "type": "string",
+ "enum": [
+ "rocm6.2"
+ ]
+ },
+ {
+ "description": "Use the PyTorch index for ROCm 6.1.",
+ "type": "string",
+ "enum": [
+ "rocm6.1"
+ ]
+ },
+ {
+ "description": "Use the PyTorch index for ROCm 6.0.",
+ "type": "string",
+ "enum": [
+ "rocm6.0"
+ ]
+ },
+ {
+ "description": "Use the PyTorch index for ROCm 5.7.",
+ "type": "string",
+ "enum": [
+ "rocm5.7"
+ ]
+ },
+ {
+ "description": "Use the PyTorch index for ROCm 5.6.",
+ "type": "string",
+ "enum": [
+ "rocm5.6"
+ ]
+ },
+ {
+ "description": "Use the PyTorch index for ROCm 5.5.",
+ "type": "string",
+ "enum": [
+ "rocm5.5"
+ ]
+ },
+ {
+ "description": "Use the PyTorch index for ROCm 5.4.2.",
+ "type": "string",
+ "enum": [
+ "rocm5.4.2"
+ ]
+ },
+ {
+ "description": "Use the PyTorch index for ROCm 5.4.",
+ "type": "string",
+ "enum": [
+ "rocm5.4"
+ ]
+ },
+ {
+ "description": "Use the PyTorch index for ROCm 5.3.",
+ "type": "string",
+ "enum": [
+ "rocm5.3"
+ ]
+ },
+ {
+ "description": "Use the PyTorch index for ROCm 5.2.",
+ "type": "string",
+ "enum": [
+ "rocm5.2"
+ ]
+ },
+ {
+ "description": "Use the PyTorch index for ROCm 5.1.1.",
+ "type": "string",
+ "enum": [
+ "rocm5.1.1"
+ ]
+ },
+ {
+ "description": "Use the PyTorch index for ROCm 4.2.",
+ "type": "string",
+ "enum": [
+ "rocm4.2"
+ ]
+ },
+ {
+ "description": "Use the PyTorch index for ROCm 4.1.",
+ "type": "string",
+ "enum": [
+ "rocm4.1"
+ ]
+ },
+ {
+ "description": "Use the PyTorch index for ROCm 4.0.1.",
+ "type": "string",
+ "enum": [
+ "rocm4.0.1"
+ ]
}
]
},
From ee0ba65eb22c6bba59ea216ff07aeb170f8d3f2a Mon Sep 17 00:00:00 2001
From: konsti
Date: Wed, 18 Jun 2025 15:06:09 +0200
Subject: [PATCH 016/349] Unify test venv `python` command creation (#14117)
Refactoring in preparation for
https://github.com/astral-sh/uv/pull/14080
---
crates/uv/tests/it/build.rs | 3 +-
crates/uv/tests/it/build_backend.rs | 49 +-
crates/uv/tests/it/common/mod.rs | 46 +-
crates/uv/tests/it/pip_install.rs | 5 +-
crates/uv/tests/it/pip_install_scenarios.rs | 764 ++++---------------
crates/uv/tests/it/pip_sync.rs | 42 +-
crates/uv/tests/it/pip_uninstall.rs | 59 +-
scripts/scenarios/templates/compile.mustache | 4 +-
scripts/scenarios/templates/install.mustache | 49 +-
scripts/scenarios/templates/lock.mustache | 2 +-
10 files changed, 212 insertions(+), 811 deletions(-)
diff --git a/crates/uv/tests/it/build.rs b/crates/uv/tests/it/build.rs
index 4fe7ca9cb..706c1a681 100644
--- a/crates/uv/tests/it/build.rs
+++ b/crates/uv/tests/it/build.rs
@@ -7,7 +7,6 @@ use indoc::indoc;
use insta::assert_snapshot;
use predicates::prelude::predicate;
use std::env::current_dir;
-use std::process::Command;
use zip::ZipArchive;
#[test]
@@ -1857,7 +1856,7 @@ fn build_unconfigured_setuptools() -> Result<()> {
+ greet==0.1.0 (from file://[TEMP_DIR]/)
"###);
- uv_snapshot!(context.filters(), Command::new(context.interpreter()).arg("-c").arg("import greet"), @r###"
+ uv_snapshot!(context.filters(), context.python_command().arg("-c").arg("import greet"), @r###"
success: true
exit_code: 0
----- stdout -----
diff --git a/crates/uv/tests/it/build_backend.rs b/crates/uv/tests/it/build_backend.rs
index a806dc989..c2d99ba3e 100644
--- a/crates/uv/tests/it/build_backend.rs
+++ b/crates/uv/tests/it/build_backend.rs
@@ -50,13 +50,9 @@ fn built_by_uv_direct_wheel() -> Result<()> {
.assert()
.success();
- uv_snapshot!(context
- .run()
- .arg("python")
+ uv_snapshot!(context.python_command()
.arg("-c")
- .arg(BUILT_BY_UV_TEST_SCRIPT)
- // Python on windows
- .env(EnvVars::PYTHONUTF8, "1"), @r###"
+ .arg(BUILT_BY_UV_TEST_SCRIPT), @r###"
success: true
exit_code: 0
----- stdout -----
@@ -138,13 +134,9 @@ fn built_by_uv_direct() -> Result<()> {
drop(wheel_dir);
- uv_snapshot!(context
- .run()
- .arg("python")
+ uv_snapshot!(context.python_command()
.arg("-c")
- .arg(BUILT_BY_UV_TEST_SCRIPT)
- // Python on windows
- .env(EnvVars::PYTHONUTF8, "1"), @r###"
+ .arg(BUILT_BY_UV_TEST_SCRIPT), @r###"
success: true
exit_code: 0
----- stdout -----
@@ -169,7 +161,8 @@ fn built_by_uv_editable() -> Result<()> {
// Without the editable, pytest fails.
context.pip_install().arg("pytest").assert().success();
- Command::new(context.interpreter())
+ context
+ .python_command()
.arg("-m")
.arg("pytest")
.current_dir(built_by_uv)
@@ -200,7 +193,7 @@ fn built_by_uv_editable() -> Result<()> {
drop(wheel_dir);
// Now, pytest passes.
- uv_snapshot!(Command::new(context.interpreter())
+ uv_snapshot!(context.python_command()
.arg("-m")
.arg("pytest")
// Avoid showing absolute paths and column dependent layout
@@ -340,11 +333,9 @@ fn rename_module() -> Result<()> {
.success();
// Importing the module with the `module-name` name succeeds.
- uv_snapshot!(Command::new(context.interpreter())
+ uv_snapshot!(context.python_command()
.arg("-c")
- .arg("import bar")
- // Python on windows
- .env(EnvVars::PYTHONUTF8, "1"), @r###"
+ .arg("import bar"), @r###"
success: true
exit_code: 0
----- stdout -----
@@ -354,11 +345,9 @@ fn rename_module() -> Result<()> {
"###);
// Importing the package name fails, it was overridden by `module-name`.
- uv_snapshot!(Command::new(context.interpreter())
+ uv_snapshot!(context.python_command()
.arg("-c")
- .arg("import foo")
- // Python on windows
- .env(EnvVars::PYTHONUTF8, "1"), @r###"
+ .arg("import foo"), @r###"
success: false
exit_code: 1
----- stdout -----
@@ -419,11 +408,9 @@ fn rename_module_editable_build() -> Result<()> {
.success();
// Importing the module with the `module-name` name succeeds.
- uv_snapshot!(Command::new(context.interpreter())
+ uv_snapshot!(context.python_command()
.arg("-c")
- .arg("import bar")
- // Python on windows
- .env(EnvVars::PYTHONUTF8, "1"), @r###"
+ .arg("import bar"), @r###"
success: true
exit_code: 0
----- stdout -----
@@ -514,11 +501,9 @@ fn build_module_name_normalization() -> Result<()> {
.assert()
.success();
- uv_snapshot!(Command::new(context.interpreter())
+ uv_snapshot!(context.python_command()
.arg("-c")
- .arg("import Django_plugin")
- // Python on windows
- .env(EnvVars::PYTHONUTF8, "1"), @r"
+ .arg("import Django_plugin"), @r"
success: true
exit_code: 0
----- stdout -----
@@ -728,7 +713,7 @@ fn complex_namespace_packages() -> Result<()> {
"
);
- uv_snapshot!(Command::new(context.interpreter())
+ uv_snapshot!(context.python_command()
.arg("-c")
.arg("from complex_project.part_b import two; print(two())"),
@r"
@@ -769,7 +754,7 @@ fn complex_namespace_packages() -> Result<()> {
"
);
- uv_snapshot!(Command::new(context.interpreter())
+ uv_snapshot!(context.python_command()
.arg("-c")
.arg("from complex_project.part_b import two; print(two())"),
@r"
diff --git a/crates/uv/tests/it/common/mod.rs b/crates/uv/tests/it/common/mod.rs
index 66eb21729..f997561a9 100644
--- a/crates/uv/tests/it/common/mod.rs
+++ b/crates/uv/tests/it/common/mod.rs
@@ -1085,15 +1085,30 @@ impl TestContext {
}
pub fn interpreter(&self) -> PathBuf {
- venv_to_interpreter(&self.venv)
+ let venv = &self.venv;
+ if cfg!(unix) {
+ venv.join("bin").join("python")
+ } else if cfg!(windows) {
+ venv.join("Scripts").join("python.exe")
+ } else {
+ unimplemented!("Only Windows and Unix are supported")
+ }
+ }
+
+ pub fn python_command(&self) -> Command {
+ let mut command = self.new_command_with(&self.interpreter());
+ command
+ // Our tests change files in <1s, so we must disable CPython bytecode caching or we'll get stale files
+ // https://github.com/python/cpython/issues/75953
+ .arg("-B")
+ // Python on windows
+ .env(EnvVars::PYTHONUTF8, "1");
+ command
}
/// Run the given python code and check whether it succeeds.
pub fn assert_command(&self, command: &str) -> Assert {
- self.new_command_with(&venv_to_interpreter(&self.venv))
- // Our tests change files in <1s, so we must disable CPython bytecode caching or we'll get stale files
- // https://github.com/python/cpython/issues/75953
- .arg("-B")
+ self.python_command()
.arg("-c")
.arg(command)
.current_dir(&self.temp_dir)
@@ -1102,10 +1117,7 @@ impl TestContext {
/// Run the given python file and check whether it succeeds.
pub fn assert_file(&self, file: impl AsRef) -> Assert {
- self.new_command_with(&venv_to_interpreter(&self.venv))
- // Our tests change files in <1s, so we must disable CPython bytecode caching or we'll get stale files
- // https://github.com/python/cpython/issues/75953
- .arg("-B")
+ self.python_command()
.arg(file.as_ref())
.current_dir(&self.temp_dir)
.assert()
@@ -1120,6 +1132,12 @@ impl TestContext {
.stdout(version);
}
+ /// Assert a package is not installed.
+ pub fn assert_not_installed(&self, package: &'static str) {
+ self.assert_command(format!("import {package}").as_str())
+ .failure();
+ }
+
/// Generate various escaped regex patterns for the given path.
pub fn path_patterns(path: impl AsRef) -> Vec {
let mut patterns = Vec::new();
@@ -1347,16 +1365,6 @@ pub fn venv_bin_path(venv: impl AsRef) -> PathBuf {
}
}
-pub fn venv_to_interpreter(venv: &Path) -> PathBuf {
- if cfg!(unix) {
- venv.join("bin").join("python")
- } else if cfg!(windows) {
- venv.join("Scripts").join("python.exe")
- } else {
- unimplemented!("Only Windows and Unix are supported")
- }
-}
-
/// Get the path to the python interpreter for a specific python version.
pub fn get_python(version: &PythonVersion) -> PathBuf {
ManagedPythonInstallations::from_settings(None)
diff --git a/crates/uv/tests/it/pip_install.rs b/crates/uv/tests/it/pip_install.rs
index 569edf00e..e0876b23c 100644
--- a/crates/uv/tests/it/pip_install.rs
+++ b/crates/uv/tests/it/pip_install.rs
@@ -19,7 +19,7 @@ use wiremock::{
use crate::common::{self, decode_token};
use crate::common::{
DEFAULT_PYTHON_VERSION, TestContext, build_vendor_links_url, download_to_disk, get_bin,
- uv_snapshot, venv_bin_path, venv_to_interpreter,
+ uv_snapshot, venv_bin_path,
};
use uv_fs::Simplified;
use uv_static::EnvVars;
@@ -9083,8 +9083,7 @@ fn build_tag() {
);
// Ensure that we choose the highest build tag (5).
- uv_snapshot!(Command::new(venv_to_interpreter(&context.venv))
- .arg("-B")
+ uv_snapshot!(context.python_command()
.arg("-c")
.arg("import build_tag; build_tag.main()")
.current_dir(&context.temp_dir), @r###"
diff --git a/crates/uv/tests/it/pip_install_scenarios.rs b/crates/uv/tests/it/pip_install_scenarios.rs
index 1a95b1caa..153d5a8fb 100644
--- a/crates/uv/tests/it/pip_install_scenarios.rs
+++ b/crates/uv/tests/it/pip_install_scenarios.rs
@@ -5,52 +5,20 @@
//!
#![cfg(all(feature = "python", feature = "pypi", unix))]
-use std::path::Path;
use std::process::Command;
-use assert_cmd::assert::Assert;
-use assert_cmd::prelude::*;
-
use uv_static::EnvVars;
-use crate::common::{
- TestContext, build_vendor_links_url, get_bin, packse_index_url, uv_snapshot,
- venv_to_interpreter,
-};
-
-fn assert_command(venv: &Path, command: &str, temp_dir: &Path) -> Assert {
- Command::new(venv_to_interpreter(venv))
- .arg("-c")
- .arg(command)
- .current_dir(temp_dir)
- .assert()
-}
-
-fn assert_installed(venv: &Path, package: &'static str, version: &'static str, temp_dir: &Path) {
- assert_command(
- venv,
- format!("import {package} as package; print(package.__version__, end='')").as_str(),
- temp_dir,
- )
- .success()
- .stdout(version);
-}
-
-fn assert_not_installed(venv: &Path, package: &'static str, temp_dir: &Path) {
- assert_command(venv, format!("import {package}").as_str(), temp_dir).failure();
-}
+use crate::common::{TestContext, build_vendor_links_url, packse_index_url, uv_snapshot};
/// Create a `pip install` command with options shared across all scenarios.
fn command(context: &TestContext) -> Command {
- let mut command = Command::new(get_bin());
+ let mut command = context.pip_install();
command
- .arg("pip")
- .arg("install")
.arg("--index-url")
.arg(packse_index_url())
.arg("--find-links")
.arg(build_vendor_links_url());
- context.add_shared_options(&mut command, true);
command.env_remove(EnvVars::UV_EXCLUDE_NEWER);
command
}
@@ -88,11 +56,7 @@ fn requires_exact_version_does_not_exist() {
╰─▶ Because there is no version of package-a==2.0.0 and you require package-a==2.0.0, we can conclude that your requirements are unsatisfiable.
");
- assert_not_installed(
- &context.venv,
- "requires_exact_version_does_not_exist_a",
- &context.temp_dir,
- );
+ context.assert_not_installed("requires_exact_version_does_not_exist_a");
}
/// The user requires a version of `a` greater than `1.0.0` but only smaller or equal versions exist
@@ -130,11 +94,7 @@ fn requires_greater_version_does_not_exist() {
╰─▶ Because only package-a<=1.0.0 is available and you require package-a>1.0.0, we can conclude that your requirements are unsatisfiable.
");
- assert_not_installed(
- &context.venv,
- "requires_greater_version_does_not_exist_a",
- &context.temp_dir,
- );
+ context.assert_not_installed("requires_greater_version_does_not_exist_a");
}
/// The user requires a version of `a` less than `1.0.0` but only larger versions exist
@@ -174,11 +134,7 @@ fn requires_less_version_does_not_exist() {
╰─▶ Because only package-a>=2.0.0 is available and you require package-a<2.0.0, we can conclude that your requirements are unsatisfiable.
");
- assert_not_installed(
- &context.venv,
- "requires_less_version_does_not_exist_a",
- &context.temp_dir,
- );
+ context.assert_not_installed("requires_less_version_does_not_exist_a");
}
/// The user requires any version of package `a` which does not exist.
@@ -211,11 +167,7 @@ fn requires_package_does_not_exist() {
╰─▶ Because package-a was not found in the package registry and you require package-a, we can conclude that your requirements are unsatisfiable.
");
- assert_not_installed(
- &context.venv,
- "requires_package_does_not_exist_a",
- &context.temp_dir,
- );
+ context.assert_not_installed("requires_package_does_not_exist_a");
}
/// The user requires package `a` but `a` requires package `b` which does not exist
@@ -254,11 +206,7 @@ fn transitive_requires_package_does_not_exist() {
And because only package-a==1.0.0 is available and you require package-a, we can conclude that your requirements are unsatisfiable.
");
- assert_not_installed(
- &context.venv,
- "transitive_requires_package_does_not_exist_a",
- &context.temp_dir,
- );
+ context.assert_not_installed("transitive_requires_package_does_not_exist_a");
}
/// There is a non-contiguous range of compatible versions for the requested package `a`, but another dependency `c` excludes the range. This is the same as `dependency-excludes-range-of-compatible-versions` but some of the versions of `a` are incompatible for another reason e.g. dependency on non-existent package `d`.
@@ -376,21 +324,12 @@ fn dependency_excludes_non_contiguous_range_of_compatible_versions() {
");
// Only the `2.x` versions of `a` are available since `a==1.0.0` and `a==3.0.0` require incompatible versions of `b`, but all available versions of `c` exclude that range of `a` so resolution fails.
- assert_not_installed(
- &context.venv,
- "dependency_excludes_non_contiguous_range_of_compatible_versions_a",
- &context.temp_dir,
- );
- assert_not_installed(
- &context.venv,
- "dependency_excludes_non_contiguous_range_of_compatible_versions_b",
- &context.temp_dir,
- );
- assert_not_installed(
- &context.venv,
- "dependency_excludes_non_contiguous_range_of_compatible_versions_c",
- &context.temp_dir,
- );
+ context
+ .assert_not_installed("dependency_excludes_non_contiguous_range_of_compatible_versions_a");
+ context
+ .assert_not_installed("dependency_excludes_non_contiguous_range_of_compatible_versions_b");
+ context
+ .assert_not_installed("dependency_excludes_non_contiguous_range_of_compatible_versions_c");
}
/// There is a range of compatible versions for the requested package `a`, but another dependency `c` excludes that range.
@@ -499,21 +438,9 @@ fn dependency_excludes_range_of_compatible_versions() {
");
// Only the `2.x` versions of `a` are available since `a==1.0.0` and `a==3.0.0` require incompatible versions of `b`, but all available versions of `c` exclude that range of `a` so resolution fails.
- assert_not_installed(
- &context.venv,
- "dependency_excludes_range_of_compatible_versions_a",
- &context.temp_dir,
- );
- assert_not_installed(
- &context.venv,
- "dependency_excludes_range_of_compatible_versions_b",
- &context.temp_dir,
- );
- assert_not_installed(
- &context.venv,
- "dependency_excludes_range_of_compatible_versions_c",
- &context.temp_dir,
- );
+ context.assert_not_installed("dependency_excludes_range_of_compatible_versions_a");
+ context.assert_not_installed("dependency_excludes_range_of_compatible_versions_b");
+ context.assert_not_installed("dependency_excludes_range_of_compatible_versions_c");
}
/// Only one version of the requested package `a` is compatible, but the user has banned that version.
@@ -586,16 +513,8 @@ fn excluded_only_compatible_version() {
");
// Only `a==1.2.0` is available since `a==1.0.0` and `a==3.0.0` require incompatible versions of `b`. The user has excluded that version of `a` so resolution fails.
- assert_not_installed(
- &context.venv,
- "excluded_only_compatible_version_a",
- &context.temp_dir,
- );
- assert_not_installed(
- &context.venv,
- "excluded_only_compatible_version_b",
- &context.temp_dir,
- );
+ context.assert_not_installed("excluded_only_compatible_version_a");
+ context.assert_not_installed("excluded_only_compatible_version_b");
}
/// Only one version of the requested package is available, but the user has banned that version.
@@ -635,7 +554,7 @@ fn excluded_only_version() {
");
// Only `a==1.0.0` is available but the user excluded it.
- assert_not_installed(&context.venv, "excluded_only_version_a", &context.temp_dir);
+ context.assert_not_installed("excluded_only_version_a");
}
/// Multiple optional dependencies are requested for the package via an 'all' extra.
@@ -701,24 +620,9 @@ fn all_extras_required() {
+ package-c==1.0.0
");
- assert_installed(
- &context.venv,
- "all_extras_required_a",
- "1.0.0",
- &context.temp_dir,
- );
- assert_installed(
- &context.venv,
- "all_extras_required_b",
- "1.0.0",
- &context.temp_dir,
- );
- assert_installed(
- &context.venv,
- "all_extras_required_c",
- "1.0.0",
- &context.temp_dir,
- );
+ context.assert_installed("all_extras_required_a", "1.0.0");
+ context.assert_installed("all_extras_required_b", "1.0.0");
+ context.assert_installed("all_extras_required_c", "1.0.0");
}
/// Optional dependencies are requested for the package, the extra is only available on an older version.
@@ -771,12 +675,7 @@ fn extra_does_not_exist_backtrack() {
");
// The resolver should not backtrack to `a==1.0.0` because missing extras are allowed during resolution. `b` should not be installed.
- assert_installed(
- &context.venv,
- "extra_does_not_exist_backtrack_a",
- "3.0.0",
- &context.temp_dir,
- );
+ context.assert_installed("extra_does_not_exist_backtrack_a", "3.0.0");
}
/// One of two incompatible optional dependencies are requested for the package.
@@ -829,18 +728,8 @@ fn extra_incompatible_with_extra_not_requested() {
");
// Because the user does not request both extras, it is okay that one is incompatible with the other.
- assert_installed(
- &context.venv,
- "extra_incompatible_with_extra_not_requested_a",
- "1.0.0",
- &context.temp_dir,
- );
- assert_installed(
- &context.venv,
- "extra_incompatible_with_extra_not_requested_b",
- "2.0.0",
- &context.temp_dir,
- );
+ context.assert_installed("extra_incompatible_with_extra_not_requested_a", "1.0.0");
+ context.assert_installed("extra_incompatible_with_extra_not_requested_b", "2.0.0");
}
/// Multiple optional dependencies are requested for the package, but they have conflicting requirements with each other.
@@ -892,11 +781,7 @@ fn extra_incompatible_with_extra() {
");
// Because both `extra_b` and `extra_c` are requested and they require incompatible versions of `b`, `a` cannot be installed.
- assert_not_installed(
- &context.venv,
- "extra_incompatible_with_extra_a",
- &context.temp_dir,
- );
+ context.assert_not_installed("extra_incompatible_with_extra_a");
}
/// Optional dependencies are requested for the package, but the extra is not compatible with other requested versions.
@@ -946,16 +831,8 @@ fn extra_incompatible_with_root() {
");
// Because the user requested `b==2.0.0` but the requested extra requires `b==1.0.0`, the dependencies cannot be satisfied.
- assert_not_installed(
- &context.venv,
- "extra_incompatible_with_root_a",
- &context.temp_dir,
- );
- assert_not_installed(
- &context.venv,
- "extra_incompatible_with_root_b",
- &context.temp_dir,
- );
+ context.assert_not_installed("extra_incompatible_with_root_a");
+ context.assert_not_installed("extra_incompatible_with_root_b");
}
/// Optional dependencies are requested for the package.
@@ -1001,18 +878,8 @@ fn extra_required() {
+ package-b==1.0.0
");
- assert_installed(
- &context.venv,
- "extra_required_a",
- "1.0.0",
- &context.temp_dir,
- );
- assert_installed(
- &context.venv,
- "extra_required_b",
- "1.0.0",
- &context.temp_dir,
- );
+ context.assert_installed("extra_required_a", "1.0.0");
+ context.assert_installed("extra_required_b", "1.0.0");
}
/// Optional dependencies are requested for the package, but the extra does not exist.
@@ -1052,7 +919,7 @@ fn missing_extra() {
");
// Missing extras are ignored during resolution.
- assert_installed(&context.venv, "missing_extra_a", "1.0.0", &context.temp_dir);
+ context.assert_installed("missing_extra_a", "1.0.0");
}
/// Multiple optional dependencies are requested for the package.
@@ -1106,24 +973,9 @@ fn multiple_extras_required() {
+ package-c==1.0.0
");
- assert_installed(
- &context.venv,
- "multiple_extras_required_a",
- "1.0.0",
- &context.temp_dir,
- );
- assert_installed(
- &context.venv,
- "multiple_extras_required_b",
- "1.0.0",
- &context.temp_dir,
- );
- assert_installed(
- &context.venv,
- "multiple_extras_required_c",
- "1.0.0",
- &context.temp_dir,
- );
+ context.assert_installed("multiple_extras_required_a", "1.0.0");
+ context.assert_installed("multiple_extras_required_b", "1.0.0");
+ context.assert_installed("multiple_extras_required_c", "1.0.0");
}
/// The user requires two incompatible, existing versions of package `a`
@@ -1164,16 +1016,8 @@ fn direct_incompatible_versions() {
╰─▶ Because you require package-a==1.0.0 and package-a==2.0.0, we can conclude that your requirements are unsatisfiable.
");
- assert_not_installed(
- &context.venv,
- "direct_incompatible_versions_a",
- &context.temp_dir,
- );
- assert_not_installed(
- &context.venv,
- "direct_incompatible_versions_a",
- &context.temp_dir,
- );
+ context.assert_not_installed("direct_incompatible_versions_a");
+ context.assert_not_installed("direct_incompatible_versions_a");
}
/// The user requires `a`, which requires two incompatible, existing versions of package `b`
@@ -1214,11 +1058,7 @@ fn transitive_incompatible_versions() {
And because you require package-a==1.0.0, we can conclude that your requirements are unsatisfiable.
");
- assert_not_installed(
- &context.venv,
- "transitive_incompatible_versions_a",
- &context.temp_dir,
- );
+ context.assert_not_installed("transitive_incompatible_versions_a");
}
/// The user requires packages `a` and `b` but `a` requires a different version of `b`
@@ -1265,16 +1105,8 @@ fn transitive_incompatible_with_root_version() {
And because you require package-a and package-b==1.0.0, we can conclude that your requirements are unsatisfiable.
");
- assert_not_installed(
- &context.venv,
- "transitive_incompatible_with_root_version_a",
- &context.temp_dir,
- );
- assert_not_installed(
- &context.venv,
- "transitive_incompatible_with_root_version_b",
- &context.temp_dir,
- );
+ context.assert_not_installed("transitive_incompatible_with_root_version_a");
+ context.assert_not_installed("transitive_incompatible_with_root_version_b");
}
/// The user requires package `a` and `b`; `a` and `b` require different versions of `c`
@@ -1327,16 +1159,8 @@ fn transitive_incompatible_with_transitive() {
And because you require package-a and package-b, we can conclude that your requirements are unsatisfiable.
");
- assert_not_installed(
- &context.venv,
- "transitive_incompatible_with_transitive_a",
- &context.temp_dir,
- );
- assert_not_installed(
- &context.venv,
- "transitive_incompatible_with_transitive_b",
- &context.temp_dir,
- );
+ context.assert_not_installed("transitive_incompatible_with_transitive_a");
+ context.assert_not_installed("transitive_incompatible_with_transitive_b");
}
/// A local version should be included in inclusive ordered comparisons.
@@ -1378,12 +1202,7 @@ fn local_greater_than_or_equal() {
");
// The version '1.2.3+foo' satisfies the constraint '>=1.2.3'.
- assert_installed(
- &context.venv,
- "local_greater_than_or_equal_a",
- "1.2.3+foo",
- &context.temp_dir,
- );
+ context.assert_installed("local_greater_than_or_equal_a", "1.2.3+foo");
}
/// A local version should be excluded in exclusive ordered comparisons.
@@ -1419,7 +1238,7 @@ fn local_greater_than() {
╰─▶ Because only package-a==1.2.3+foo is available and you require package-a>1.2.3, we can conclude that your requirements are unsatisfiable.
");
- assert_not_installed(&context.venv, "local_greater_than_a", &context.temp_dir);
+ context.assert_not_installed("local_greater_than_a");
}
/// A local version should be included in inclusive ordered comparisons.
@@ -1461,12 +1280,7 @@ fn local_less_than_or_equal() {
");
// The version '1.2.3+foo' satisfies the constraint '<=1.2.3'.
- assert_installed(
- &context.venv,
- "local_less_than_or_equal_a",
- "1.2.3+foo",
- &context.temp_dir,
- );
+ context.assert_installed("local_less_than_or_equal_a", "1.2.3+foo");
}
/// A local version should be excluded in exclusive ordered comparisons.
@@ -1502,7 +1316,7 @@ fn local_less_than() {
╰─▶ Because only package-a==1.2.3+foo is available and you require package-a<1.2.3, we can conclude that your requirements are unsatisfiable.
");
- assert_not_installed(&context.venv, "local_less_than_a", &context.temp_dir);
+ context.assert_not_installed("local_less_than_a");
}
/// Tests that we can select an older version with a local segment when newer versions are incompatible.
@@ -1546,12 +1360,7 @@ fn local_not_latest() {
+ package-a==1.2.1+foo
");
- assert_installed(
- &context.venv,
- "local_not_latest_a",
- "1.2.1+foo",
- &context.temp_dir,
- );
+ context.assert_installed("local_not_latest_a", "1.2.1+foo");
}
/// If there is a 1.2.3 version with an sdist published and no compatible wheels, then the sdist will be used.
@@ -1593,12 +1402,7 @@ fn local_not_used_with_sdist() {
");
// The version '1.2.3' with an sdist satisfies the constraint '==1.2.3'.
- assert_installed(
- &context.venv,
- "local_not_used_with_sdist_a",
- "1.2.3+foo",
- &context.temp_dir,
- );
+ context.assert_installed("local_not_used_with_sdist_a", "1.2.3+foo");
}
/// A simple version constraint should not exclude published versions with local segments.
@@ -1640,12 +1444,7 @@ fn local_simple() {
");
// The version '1.2.3+foo' satisfies the constraint '==1.2.3'.
- assert_installed(
- &context.venv,
- "local_simple_a",
- "1.2.3+foo",
- &context.temp_dir,
- );
+ context.assert_installed("local_simple_a", "1.2.3+foo");
}
/// A dependency depends on a conflicting local version of a direct dependency, but we can backtrack to a compatible version.
@@ -1701,18 +1500,8 @@ fn local_transitive_backtrack() {
");
// Backtracking to '1.0.0' gives us compatible local versions of b.
- assert_installed(
- &context.venv,
- "local_transitive_backtrack_a",
- "1.0.0",
- &context.temp_dir,
- );
- assert_installed(
- &context.venv,
- "local_transitive_backtrack_b",
- "2.0.0+foo",
- &context.temp_dir,
- );
+ context.assert_installed("local_transitive_backtrack_a", "1.0.0");
+ context.assert_installed("local_transitive_backtrack_b", "2.0.0+foo");
}
/// A dependency depends on a conflicting local version of a direct dependency.
@@ -1759,16 +1548,8 @@ fn local_transitive_conflicting() {
And because you require package-a and package-b==2.0.0+foo, we can conclude that your requirements are unsatisfiable.
");
- assert_not_installed(
- &context.venv,
- "local_transitive_conflicting_a",
- &context.temp_dir,
- );
- assert_not_installed(
- &context.venv,
- "local_transitive_conflicting_b",
- &context.temp_dir,
- );
+ context.assert_not_installed("local_transitive_conflicting_a");
+ context.assert_not_installed("local_transitive_conflicting_b");
}
/// A transitive dependency has both a non-local and local version published, but the non-local version is unusable.
@@ -1819,18 +1600,8 @@ fn local_transitive_confounding() {
");
// The version '2.0.0+foo' satisfies the constraint '==2.0.0'.
- assert_installed(
- &context.venv,
- "local_transitive_confounding_a",
- "1.0.0",
- &context.temp_dir,
- );
- assert_installed(
- &context.venv,
- "local_transitive_confounding_b",
- "2.0.0+foo",
- &context.temp_dir,
- );
+ context.assert_installed("local_transitive_confounding_a", "1.0.0");
+ context.assert_installed("local_transitive_confounding_b", "2.0.0+foo");
}
/// A transitive constraint on a local version should match an inclusive ordered operator.
@@ -1881,18 +1652,8 @@ fn local_transitive_greater_than_or_equal() {
");
// The version '2.0.0+foo' satisfies both >=2.0.0 and ==2.0.0+foo.
- assert_installed(
- &context.venv,
- "local_transitive_greater_than_or_equal_a",
- "1.0.0",
- &context.temp_dir,
- );
- assert_installed(
- &context.venv,
- "local_transitive_greater_than_or_equal_b",
- "2.0.0+foo",
- &context.temp_dir,
- );
+ context.assert_installed("local_transitive_greater_than_or_equal_a", "1.0.0");
+ context.assert_installed("local_transitive_greater_than_or_equal_b", "2.0.0+foo");
}
/// A transitive constraint on a local version should not match an exclusive ordered operator.
@@ -1939,16 +1700,8 @@ fn local_transitive_greater_than() {
And because you require package-a and package-b==2.0.0+foo, we can conclude that your requirements are unsatisfiable.
");
- assert_not_installed(
- &context.venv,
- "local_transitive_greater_than_a",
- &context.temp_dir,
- );
- assert_not_installed(
- &context.venv,
- "local_transitive_greater_than_b",
- &context.temp_dir,
- );
+ context.assert_not_installed("local_transitive_greater_than_a");
+ context.assert_not_installed("local_transitive_greater_than_b");
}
/// A transitive constraint on a local version should match an inclusive ordered operator.
@@ -1999,18 +1752,8 @@ fn local_transitive_less_than_or_equal() {
");
// The version '2.0.0+foo' satisfies both <=2.0.0 and ==2.0.0+foo.
- assert_installed(
- &context.venv,
- "local_transitive_less_than_or_equal_a",
- "1.0.0",
- &context.temp_dir,
- );
- assert_installed(
- &context.venv,
- "local_transitive_less_than_or_equal_b",
- "2.0.0+foo",
- &context.temp_dir,
- );
+ context.assert_installed("local_transitive_less_than_or_equal_a", "1.0.0");
+ context.assert_installed("local_transitive_less_than_or_equal_b", "2.0.0+foo");
}
/// A transitive constraint on a local version should not match an exclusive ordered operator.
@@ -2057,16 +1800,8 @@ fn local_transitive_less_than() {
And because you require package-a and package-b==2.0.0+foo, we can conclude that your requirements are unsatisfiable.
");
- assert_not_installed(
- &context.venv,
- "local_transitive_less_than_a",
- &context.temp_dir,
- );
- assert_not_installed(
- &context.venv,
- "local_transitive_less_than_b",
- &context.temp_dir,
- );
+ context.assert_not_installed("local_transitive_less_than_a");
+ context.assert_not_installed("local_transitive_less_than_b");
}
/// A simple version constraint should not exclude published versions with local segments.
@@ -2117,18 +1852,8 @@ fn local_transitive() {
");
// The version '2.0.0+foo' satisfies both ==2.0.0 and ==2.0.0+foo.
- assert_installed(
- &context.venv,
- "local_transitive_a",
- "1.0.0",
- &context.temp_dir,
- );
- assert_installed(
- &context.venv,
- "local_transitive_b",
- "2.0.0+foo",
- &context.temp_dir,
- );
+ context.assert_installed("local_transitive_a", "1.0.0");
+ context.assert_installed("local_transitive_b", "2.0.0+foo");
}
/// Even if there is a 1.2.3 version published, if it is unavailable for some reason (no sdist and no compatible wheels in this case), a 1.2.3 version with a local segment should be usable instead.
@@ -2170,12 +1895,7 @@ fn local_used_without_sdist() {
");
// The version '1.2.3+foo' satisfies the constraint '==1.2.3'.
- assert_installed(
- &context.venv,
- "local_used_without_sdist_a",
- "1.2.3+foo",
- &context.temp_dir,
- );
+ context.assert_installed("local_used_without_sdist_a", "1.2.3+foo");
}
/// An equal version constraint should match a post-release version if the post-release version is available.
@@ -2216,12 +1936,7 @@ fn post_equal_available() {
");
// The version '1.2.3.post0' satisfies the constraint '==1.2.3.post0'.
- assert_installed(
- &context.venv,
- "post_equal_available_a",
- "1.2.3.post0",
- &context.temp_dir,
- );
+ context.assert_installed("post_equal_available_a", "1.2.3.post0");
}
/// An equal version constraint should not match a post-release version if the post-release version is not available.
@@ -2259,11 +1974,7 @@ fn post_equal_not_available() {
╰─▶ Because there is no version of package-a==1.2.3.post0 and you require package-a==1.2.3.post0, we can conclude that your requirements are unsatisfiable.
");
- assert_not_installed(
- &context.venv,
- "post_equal_not_available_a",
- &context.temp_dir,
- );
+ context.assert_not_installed("post_equal_not_available_a");
}
/// A greater-than-or-equal version constraint should match a post-release version if the constraint is itself a post-release version.
@@ -2305,12 +2016,7 @@ fn post_greater_than_or_equal_post() {
");
// The version '1.2.3.post1' satisfies the constraint '>=1.2.3.post0'.
- assert_installed(
- &context.venv,
- "post_greater_than_or_equal_post_a",
- "1.2.3.post1",
- &context.temp_dir,
- );
+ context.assert_installed("post_greater_than_or_equal_post_a", "1.2.3.post1");
}
/// A greater-than-or-equal version constraint should match a post-release version.
@@ -2349,12 +2055,7 @@ fn post_greater_than_or_equal() {
");
// The version '1.2.3.post1' satisfies the constraint '>=1.2.3'.
- assert_installed(
- &context.venv,
- "post_greater_than_or_equal_a",
- "1.2.3.post1",
- &context.temp_dir,
- );
+ context.assert_installed("post_greater_than_or_equal_a", "1.2.3.post1");
}
/// A greater-than version constraint should not match a post-release version if the post-release version is not available.
@@ -2394,11 +2095,7 @@ fn post_greater_than_post_not_available() {
╰─▶ Because only package-a<=1.2.3.post1 is available and you require package-a>=1.2.3.post3, we can conclude that your requirements are unsatisfiable.
");
- assert_not_installed(
- &context.venv,
- "post_greater_than_post_not_available_a",
- &context.temp_dir,
- );
+ context.assert_not_installed("post_greater_than_post_not_available_a");
}
/// A greater-than version constraint should match a post-release version if the constraint is itself a post-release version.
@@ -2439,12 +2136,7 @@ fn post_greater_than_post() {
");
// The version '1.2.3.post1' satisfies the constraint '>1.2.3.post0'.
- assert_installed(
- &context.venv,
- "post_greater_than_post_a",
- "1.2.3.post1",
- &context.temp_dir,
- );
+ context.assert_installed("post_greater_than_post_a", "1.2.3.post1");
}
/// A greater-than version constraint should not match a post-release version.
@@ -2480,7 +2172,7 @@ fn post_greater_than() {
╰─▶ Because only package-a==1.2.3.post1 is available and you require package-a>1.2.3, we can conclude that your requirements are unsatisfiable.
");
- assert_not_installed(&context.venv, "post_greater_than_a", &context.temp_dir);
+ context.assert_not_installed("post_greater_than_a");
}
/// A less-than-or-equal version constraint should not match a post-release version.
@@ -2516,11 +2208,7 @@ fn post_less_than_or_equal() {
╰─▶ Because only package-a==1.2.3.post1 is available and you require package-a<=1.2.3, we can conclude that your requirements are unsatisfiable.
");
- assert_not_installed(
- &context.venv,
- "post_less_than_or_equal_a",
- &context.temp_dir,
- );
+ context.assert_not_installed("post_less_than_or_equal_a");
}
/// A less-than version constraint should not match a post-release version.
@@ -2556,7 +2244,7 @@ fn post_less_than() {
╰─▶ Because only package-a==1.2.3.post1 is available and you require package-a<1.2.3, we can conclude that your requirements are unsatisfiable.
");
- assert_not_installed(&context.venv, "post_less_than_a", &context.temp_dir);
+ context.assert_not_installed("post_less_than_a");
}
/// A greater-than version constraint should not match a post-release version with a local version identifier.
@@ -2594,11 +2282,7 @@ fn post_local_greater_than_post() {
╰─▶ Because only package-a<=1.2.3.post1+local is available and you require package-a>=1.2.3.post2, we can conclude that your requirements are unsatisfiable.
");
- assert_not_installed(
- &context.venv,
- "post_local_greater_than_post_a",
- &context.temp_dir,
- );
+ context.assert_not_installed("post_local_greater_than_post_a");
}
/// A greater-than version constraint should not match a post-release version with a local version identifier.
@@ -2636,11 +2320,7 @@ fn post_local_greater_than() {
╰─▶ Because only package-a<=1.2.3.post1+local is available and you require package-a>1.2.3, we can conclude that your requirements are unsatisfiable.
");
- assert_not_installed(
- &context.venv,
- "post_local_greater_than_a",
- &context.temp_dir,
- );
+ context.assert_not_installed("post_local_greater_than_a");
}
/// A simple version constraint should not match a post-release version.
@@ -2676,7 +2356,7 @@ fn post_simple() {
╰─▶ Because there is no version of package-a==1.2.3 and you require package-a==1.2.3, we can conclude that your requirements are unsatisfiable.
");
- assert_not_installed(&context.venv, "post_simple_a", &context.temp_dir);
+ context.assert_not_installed("post_simple_a");
}
/// The user requires `a` which has multiple prereleases available with different labels.
@@ -2721,12 +2401,7 @@ fn package_multiple_prereleases_kinds() {
");
// Release candidates should be the highest precedence prerelease kind.
- assert_installed(
- &context.venv,
- "package_multiple_prereleases_kinds_a",
- "1.0.0rc1",
- &context.temp_dir,
- );
+ context.assert_installed("package_multiple_prereleases_kinds_a", "1.0.0rc1");
}
/// The user requires `a` which has multiple alphas available.
@@ -2771,12 +2446,7 @@ fn package_multiple_prereleases_numbers() {
");
// The latest alpha version should be selected.
- assert_installed(
- &context.venv,
- "package_multiple_prereleases_numbers_a",
- "1.0.0a3",
- &context.temp_dir,
- );
+ context.assert_installed("package_multiple_prereleases_numbers_a", "1.0.0a3");
}
/// The user requires a non-prerelease version of `a` which only has prerelease versions available. There are pre-releases on the boundary of their range.
@@ -2819,12 +2489,7 @@ fn package_only_prereleases_boundary() {
");
// Since there are only prerelease versions of `a` available, a prerelease is allowed. Since the user did not explicitly request a pre-release, pre-releases at the boundary should not be selected.
- assert_installed(
- &context.venv,
- "package_only_prereleases_boundary_a",
- "0.1.0a1",
- &context.temp_dir,
- );
+ context.assert_installed("package_only_prereleases_boundary_a", "0.1.0a1");
}
/// The user requires a version of package `a` which only matches prerelease versions but they did not include a prerelease specifier.
@@ -2865,11 +2530,7 @@ fn package_only_prereleases_in_range() {
");
// Since there are stable versions of `a` available, prerelease versions should not be selected without explicit opt-in.
- assert_not_installed(
- &context.venv,
- "package_only_prereleases_in_range_a",
- &context.temp_dir,
- );
+ context.assert_not_installed("package_only_prereleases_in_range_a");
}
/// The user requires any version of package `a` which only has prerelease versions available.
@@ -2908,12 +2569,7 @@ fn package_only_prereleases() {
");
// Since there are only prerelease versions of `a` available, it should be installed even though the user did not include a prerelease specifier.
- assert_installed(
- &context.venv,
- "package_only_prereleases_a",
- "1.0.0a1",
- &context.temp_dir,
- );
+ context.assert_installed("package_only_prereleases_a", "1.0.0a1");
}
/// The user requires a version of `a` with a prerelease specifier and both prerelease and stable releases are available.
@@ -2961,12 +2617,7 @@ fn package_prerelease_specified_mixed_available() {
");
// Since the user provided a prerelease specifier, the latest prerelease version should be selected.
- assert_installed(
- &context.venv,
- "package_prerelease_specified_mixed_available_a",
- "1.0.0a1",
- &context.temp_dir,
- );
+ context.assert_installed("package_prerelease_specified_mixed_available_a", "1.0.0a1");
}
/// The user requires a version of `a` with a prerelease specifier and only stable releases are available.
@@ -3014,11 +2665,9 @@ fn package_prerelease_specified_only_final_available() {
");
// The latest stable version should be selected.
- assert_installed(
- &context.venv,
+ context.assert_installed(
"package_prerelease_specified_only_final_available_a",
"0.3.0",
- &context.temp_dir,
);
}
@@ -3067,11 +2716,9 @@ fn package_prerelease_specified_only_prerelease_available() {
");
// The latest prerelease version should be selected.
- assert_installed(
- &context.venv,
+ context.assert_installed(
"package_prerelease_specified_only_prerelease_available_a",
"0.3.0a1",
- &context.temp_dir,
);
}
@@ -3116,12 +2763,7 @@ fn package_prereleases_boundary() {
");
// Since the user did not use a pre-release specifier, pre-releases at the boundary should not be selected even though pre-releases are allowed.
- assert_installed(
- &context.venv,
- "package_prereleases_boundary_a",
- "0.1.0",
- &context.temp_dir,
- );
+ context.assert_installed("package_prereleases_boundary_a", "0.1.0");
}
/// The user requires a non-prerelease version of `a` but has enabled pre-releases. There are pre-releases on the boundary of their range.
@@ -3165,12 +2807,7 @@ fn package_prereleases_global_boundary() {
");
// Since the user did not use a pre-release specifier, pre-releases at the boundary should not be selected even though pre-releases are allowed.
- assert_installed(
- &context.venv,
- "package_prereleases_global_boundary_a",
- "0.1.0",
- &context.temp_dir,
- );
+ context.assert_installed("package_prereleases_global_boundary_a", "0.1.0");
}
/// The user requires a prerelease version of `a`. There are pre-releases on the boundary of their range.
@@ -3220,12 +2857,7 @@ fn package_prereleases_specifier_boundary() {
");
// Since the user used a pre-release specifier, pre-releases at the boundary should be selected.
- assert_installed(
- &context.venv,
- "package_prereleases_specifier_boundary_a",
- "0.2.0a1",
- &context.temp_dir,
- );
+ context.assert_installed("package_prereleases_specifier_boundary_a", "0.2.0a1");
}
/// The user requires a version of package `a` which only matches prerelease versions. They did not include a prerelease specifier for the package, but they opted into prereleases globally.
@@ -3269,11 +2901,9 @@ fn requires_package_only_prereleases_in_range_global_opt_in() {
+ package-a==1.0.0a1
");
- assert_installed(
- &context.venv,
+ context.assert_installed(
"requires_package_only_prereleases_in_range_global_opt_in_a",
"1.0.0a1",
- &context.temp_dir,
);
}
@@ -3315,12 +2945,7 @@ fn requires_package_prerelease_and_final_any() {
");
// Since the user did not provide a prerelease specifier, the older stable version should be selected.
- assert_installed(
- &context.venv,
- "requires_package_prerelease_and_final_any_a",
- "0.1.0",
- &context.temp_dir,
- );
+ context.assert_installed("requires_package_prerelease_and_final_any_a", "0.1.0");
}
/// The user requires package `a` which has a dependency on a package which only matches prerelease versions; the user has opted into allowing prereleases in `b` explicitly.
@@ -3374,17 +2999,13 @@ fn transitive_package_only_prereleases_in_range_opt_in() {
");
// Since the user included a dependency on `b` with a prerelease specifier, a prerelease version can be selected.
- assert_installed(
- &context.venv,
+ context.assert_installed(
"transitive_package_only_prereleases_in_range_opt_in_a",
"0.1.0",
- &context.temp_dir,
);
- assert_installed(
- &context.venv,
+ context.assert_installed(
"transitive_package_only_prereleases_in_range_opt_in_b",
"1.0.0a1",
- &context.temp_dir,
);
}
@@ -3432,11 +3053,7 @@ fn transitive_package_only_prereleases_in_range() {
");
// Since there are stable versions of `b` available, the prerelease version should not be selected without explicit opt-in. The available version is excluded by the range requested by the user.
- assert_not_installed(
- &context.venv,
- "transitive_package_only_prereleases_in_range_a",
- &context.temp_dir,
- );
+ context.assert_not_installed("transitive_package_only_prereleases_in_range_a");
}
/// The user requires any version of package `a` which requires `b` which only has prerelease versions available.
@@ -3481,18 +3098,8 @@ fn transitive_package_only_prereleases() {
");
// Since there are only prerelease versions of `b` available, it should be selected even though the user did not opt-in to prereleases.
- assert_installed(
- &context.venv,
- "transitive_package_only_prereleases_a",
- "0.1.0",
- &context.temp_dir,
- );
- assert_installed(
- &context.venv,
- "transitive_package_only_prereleases_b",
- "1.0.0a1",
- &context.temp_dir,
- );
+ context.assert_installed("transitive_package_only_prereleases_a", "0.1.0");
+ context.assert_installed("transitive_package_only_prereleases_b", "1.0.0a1");
}
/// A transitive dependency has both a prerelease and a stable selector, but can only be satisfied by a prerelease. There are many prerelease versions and some are excluded.
@@ -3605,16 +3212,10 @@ fn transitive_prerelease_and_stable_dependency_many_versions_holes() {
");
// Since the user did not explicitly opt-in to a prerelease, it cannot be selected.
- assert_not_installed(
- &context.venv,
- "transitive_prerelease_and_stable_dependency_many_versions_holes_a",
- &context.temp_dir,
- );
- assert_not_installed(
- &context.venv,
- "transitive_prerelease_and_stable_dependency_many_versions_holes_b",
- &context.temp_dir,
- );
+ context
+ .assert_not_installed("transitive_prerelease_and_stable_dependency_many_versions_holes_a");
+ context
+ .assert_not_installed("transitive_prerelease_and_stable_dependency_many_versions_holes_b");
}
/// A transitive dependency has both a prerelease and a stable selector, but can only be satisfied by a prerelease. There are many prerelease versions.
@@ -3716,16 +3317,8 @@ fn transitive_prerelease_and_stable_dependency_many_versions() {
");
// Since the user did not explicitly opt-in to a prerelease, it cannot be selected.
- assert_not_installed(
- &context.venv,
- "transitive_prerelease_and_stable_dependency_many_versions_a",
- &context.temp_dir,
- );
- assert_not_installed(
- &context.venv,
- "transitive_prerelease_and_stable_dependency_many_versions_b",
- &context.temp_dir,
- );
+ context.assert_not_installed("transitive_prerelease_and_stable_dependency_many_versions_a");
+ context.assert_not_installed("transitive_prerelease_and_stable_dependency_many_versions_b");
}
/// A transitive dependency has both a prerelease and a stable selector, but can only be satisfied by a prerelease. The user includes an opt-in to prereleases of the transitive dependency.
@@ -3788,23 +3381,17 @@ fn transitive_prerelease_and_stable_dependency_opt_in() {
");
// Since the user explicitly opted-in to a prerelease for `c`, it can be installed.
- assert_installed(
- &context.venv,
+ context.assert_installed(
"transitive_prerelease_and_stable_dependency_opt_in_a",
"1.0.0",
- &context.temp_dir,
);
- assert_installed(
- &context.venv,
+ context.assert_installed(
"transitive_prerelease_and_stable_dependency_opt_in_b",
"1.0.0",
- &context.temp_dir,
);
- assert_installed(
- &context.venv,
+ context.assert_installed(
"transitive_prerelease_and_stable_dependency_opt_in_c",
"2.0.0b1",
- &context.temp_dir,
);
}
@@ -3860,16 +3447,8 @@ fn transitive_prerelease_and_stable_dependency() {
");
// Since the user did not explicitly opt-in to a prerelease, it cannot be selected.
- assert_not_installed(
- &context.venv,
- "transitive_prerelease_and_stable_dependency_a",
- &context.temp_dir,
- );
- assert_not_installed(
- &context.venv,
- "transitive_prerelease_and_stable_dependency_b",
- &context.temp_dir,
- );
+ context.assert_not_installed("transitive_prerelease_and_stable_dependency_a");
+ context.assert_not_installed("transitive_prerelease_and_stable_dependency_b");
}
/// The user requires a package where recent versions require a Python version greater than the current version, but an older version is compatible.
@@ -3915,12 +3494,7 @@ fn python_greater_than_current_backtrack() {
+ package-a==1.0.0
");
- assert_installed(
- &context.venv,
- "python_greater_than_current_backtrack_a",
- "1.0.0",
- &context.temp_dir,
- );
+ context.assert_installed("python_greater_than_current_backtrack_a", "1.0.0");
}
/// The user requires a package where recent versions require a Python version greater than the current version, but an excluded older version is compatible.
@@ -3975,11 +3549,7 @@ fn python_greater_than_current_excluded() {
And because you require package-a>=2.0.0, we can conclude that your requirements are unsatisfiable.
");
- assert_not_installed(
- &context.venv,
- "python_greater_than_current_excluded_a",
- &context.temp_dir,
- );
+ context.assert_not_installed("python_greater_than_current_excluded_a");
}
/// The user requires a package which has many versions which all require a Python version greater than the current version
@@ -4037,11 +3607,7 @@ fn python_greater_than_current_many() {
╰─▶ Because there is no version of package-a==1.0.0 and you require package-a==1.0.0, we can conclude that your requirements are unsatisfiable.
");
- assert_not_installed(
- &context.venv,
- "python_greater_than_current_many_a",
- &context.temp_dir,
- );
+ context.assert_not_installed("python_greater_than_current_many_a");
}
/// The user requires a package which requires a Python version with a patch version greater than the current patch version
@@ -4079,11 +3645,7 @@ fn python_greater_than_current_patch() {
And because you require package-a==1.0.0, we can conclude that your requirements are unsatisfiable.
");
- assert_not_installed(
- &context.venv,
- "python_greater_than_current_patch_a",
- &context.temp_dir,
- );
+ context.assert_not_installed("python_greater_than_current_patch_a");
}
/// The user requires a package which requires a Python version greater than the current version
@@ -4120,11 +3682,7 @@ fn python_greater_than_current() {
And because you require package-a==1.0.0, we can conclude that your requirements are unsatisfiable.
");
- assert_not_installed(
- &context.venv,
- "python_greater_than_current_a",
- &context.temp_dir,
- );
+ context.assert_not_installed("python_greater_than_current_a");
}
/// The user requires a package which requires a Python version less than the current version
@@ -4199,11 +3757,7 @@ fn python_version_does_not_exist() {
And because you require package-a==1.0.0, we can conclude that your requirements are unsatisfiable.
");
- assert_not_installed(
- &context.venv,
- "python_version_does_not_exist_a",
- &context.temp_dir,
- );
+ context.assert_not_installed("python_version_does_not_exist_a");
}
/// Both wheels and source distributions are available, and the user has disabled binaries.
@@ -4323,11 +3877,7 @@ fn no_sdist_no_wheels_with_matching_abi() {
hint: You require CPython 3.12 (`cp312`), but we only found wheels for `package-a` (v1.0.0) with the following Python ABI tag: `graalpy240_310_native`
");
- assert_not_installed(
- &context.venv,
- "no_sdist_no_wheels_with_matching_abi_a",
- &context.temp_dir,
- );
+ context.assert_not_installed("no_sdist_no_wheels_with_matching_abi_a");
}
/// No wheels with matching platform tags are available, nor are any source distributions available
@@ -4367,11 +3917,7 @@ fn no_sdist_no_wheels_with_matching_platform() {
hint: Wheels are available for `package-a` (v1.0.0) on the following platform: `macosx_10_0_ppc64`
");
- assert_not_installed(
- &context.venv,
- "no_sdist_no_wheels_with_matching_platform_a",
- &context.temp_dir,
- );
+ context.assert_not_installed("no_sdist_no_wheels_with_matching_platform_a");
}
/// No wheels with matching Python tags are available, nor are any source distributions available
@@ -4411,11 +3957,7 @@ fn no_sdist_no_wheels_with_matching_python() {
hint: You require CPython 3.12 (`cp312`), but we only found wheels for `package-a` (v1.0.0) with the following Python implementation tag: `graalpy310`
");
- assert_not_installed(
- &context.venv,
- "no_sdist_no_wheels_with_matching_python_a",
- &context.temp_dir,
- );
+ context.assert_not_installed("no_sdist_no_wheels_with_matching_python_a");
}
/// No wheels are available, only source distributions but the user has disabled builds.
@@ -4456,7 +3998,7 @@ fn no_wheels_no_build() {
hint: Wheels are required for `package-a` because building from source is disabled for `package-a` (i.e., with `--no-build-package package-a`)
");
- assert_not_installed(&context.venv, "no_wheels_no_build_a", &context.temp_dir);
+ context.assert_not_installed("no_wheels_no_build_a");
}
/// No wheels with matching platform tags are available, just source distributions.
@@ -4569,7 +4111,7 @@ fn only_wheels_no_binary() {
hint: A source distribution is required for `package-a` because using pre-built wheels is disabled for `package-a` (i.e., with `--no-binary-package package-a`)
");
- assert_not_installed(&context.venv, "only_wheels_no_binary_a", &context.temp_dir);
+ context.assert_not_installed("only_wheels_no_binary_a");
}
/// No source distributions are available, only wheels.
@@ -4684,11 +4226,7 @@ fn package_only_yanked_in_range() {
");
// Since there are other versions of `a` available, yanked versions should not be selected without explicit opt-in.
- assert_not_installed(
- &context.venv,
- "package_only_yanked_in_range_a",
- &context.temp_dir,
- );
+ context.assert_not_installed("package_only_yanked_in_range_a");
}
/// The user requires any version of package `a` which only has yanked versions available.
@@ -4726,7 +4264,7 @@ fn package_only_yanked() {
");
// Yanked versions should not be installed, even if they are the only one available.
- assert_not_installed(&context.venv, "package_only_yanked_a", &context.temp_dir);
+ context.assert_not_installed("package_only_yanked_a");
}
/// The user requires any version of `a` and both yanked and unyanked releases are available.
@@ -4772,12 +4310,7 @@ fn package_yanked_specified_mixed_available() {
");
// The latest unyanked version should be selected.
- assert_installed(
- &context.venv,
- "package_yanked_specified_mixed_available_a",
- "0.3.0",
- &context.temp_dir,
- );
+ context.assert_installed("package_yanked_specified_mixed_available_a", "0.3.0");
}
/// The user requires any version of package `a` has a yanked version available and an older unyanked version.
@@ -4818,12 +4351,7 @@ fn requires_package_yanked_and_unyanked_any() {
");
// The unyanked version should be selected.
- assert_installed(
- &context.venv,
- "requires_package_yanked_and_unyanked_any_a",
- "0.1.0",
- &context.temp_dir,
- );
+ context.assert_installed("requires_package_yanked_and_unyanked_any_a", "0.1.0");
}
/// The user requires package `a` which has a dependency on a package which only matches yanked versions; the user has opted into allowing the yanked version of `b` explicitly.
@@ -4877,18 +4405,8 @@ fn transitive_package_only_yanked_in_range_opt_in() {
"#);
// Since the user included a dependency on `b` with an exact specifier, the yanked version can be selected.
- assert_installed(
- &context.venv,
- "transitive_package_only_yanked_in_range_opt_in_a",
- "0.1.0",
- &context.temp_dir,
- );
- assert_installed(
- &context.venv,
- "transitive_package_only_yanked_in_range_opt_in_b",
- "1.0.0",
- &context.temp_dir,
- );
+ context.assert_installed("transitive_package_only_yanked_in_range_opt_in_a", "0.1.0");
+ context.assert_installed("transitive_package_only_yanked_in_range_opt_in_b", "1.0.0");
}
/// The user requires package `a` which has a dependency on a package which only matches yanked versions.
@@ -4937,11 +4455,7 @@ fn transitive_package_only_yanked_in_range() {
");
// Yanked versions should not be installed, even if they are the only valid version in a range.
- assert_not_installed(
- &context.venv,
- "transitive_package_only_yanked_in_range_a",
- &context.temp_dir,
- );
+ context.assert_not_installed("transitive_package_only_yanked_in_range_a");
}
/// The user requires any version of package `a` which requires `b` which only has yanked versions available.
@@ -4985,11 +4499,7 @@ fn transitive_package_only_yanked() {
");
// Yanked versions should not be installed, even if they are the only one available.
- assert_not_installed(
- &context.venv,
- "transitive_package_only_yanked_a",
- &context.temp_dir,
- );
+ context.assert_not_installed("transitive_package_only_yanked_a");
}
/// A transitive dependency has both a yanked and an unyanked version, but can only be satisfied by a yanked. The user includes an opt-in to the yanked version of the transitive dependency.
@@ -5052,23 +4562,17 @@ fn transitive_yanked_and_unyanked_dependency_opt_in() {
"#);
// Since the user explicitly selected the yanked version of `c`, it can be installed.
- assert_installed(
- &context.venv,
+ context.assert_installed(
"transitive_yanked_and_unyanked_dependency_opt_in_a",
"1.0.0",
- &context.temp_dir,
);
- assert_installed(
- &context.venv,
+ context.assert_installed(
"transitive_yanked_and_unyanked_dependency_opt_in_b",
"1.0.0",
- &context.temp_dir,
);
- assert_installed(
- &context.venv,
+ context.assert_installed(
"transitive_yanked_and_unyanked_dependency_opt_in_c",
"2.0.0",
- &context.temp_dir,
);
}
@@ -5122,14 +4626,6 @@ fn transitive_yanked_and_unyanked_dependency() {
");
// Since the user did not explicitly select the yanked version, it cannot be used.
- assert_not_installed(
- &context.venv,
- "transitive_yanked_and_unyanked_dependency_a",
- &context.temp_dir,
- );
- assert_not_installed(
- &context.venv,
- "transitive_yanked_and_unyanked_dependency_b",
- &context.temp_dir,
- );
+ context.assert_not_installed("transitive_yanked_and_unyanked_dependency_a");
+ context.assert_not_installed("transitive_yanked_and_unyanked_dependency_b");
}
diff --git a/crates/uv/tests/it/pip_sync.rs b/crates/uv/tests/it/pip_sync.rs
index 32252396e..43cbc26c7 100644
--- a/crates/uv/tests/it/pip_sync.rs
+++ b/crates/uv/tests/it/pip_sync.rs
@@ -1,6 +1,4 @@
use std::env::consts::EXE_SUFFIX;
-use std::path::Path;
-use std::process::Command;
use anyhow::Result;
use assert_cmd::prelude::*;
@@ -11,24 +9,10 @@ use indoc::indoc;
use predicates::Predicate;
use url::Url;
-use crate::common::{
- TestContext, download_to_disk, site_packages_path, uv_snapshot, venv_to_interpreter,
-};
+use crate::common::{TestContext, download_to_disk, site_packages_path, uv_snapshot};
use uv_fs::{Simplified, copy_dir_all};
use uv_static::EnvVars;
-fn check_command(venv: &Path, command: &str, temp_dir: &Path) {
- Command::new(venv_to_interpreter(venv))
- // Our tests change files in <1s, so we must disable CPython bytecode caching or we'll get stale files
- // https://github.com/python/cpython/issues/75953
- .arg("-B")
- .arg("-c")
- .arg(command)
- .current_dir(temp_dir)
- .assert()
- .success();
-}
-
#[test]
fn missing_requirements_txt() {
let context = TestContext::new("3.12");
@@ -463,7 +447,13 @@ fn link() -> Result<()> {
"###
);
- check_command(&context2.venv, "import iniconfig", &context2.temp_dir);
+ context2
+ .python_command()
+ .arg("-c")
+ .arg("import iniconfig")
+ .current_dir(&context2.temp_dir)
+ .assert()
+ .success();
Ok(())
}
@@ -5221,8 +5211,8 @@ fn target_built_distribution() -> Result<()> {
context.assert_command("import iniconfig").failure();
// Ensure that we can import the package by augmenting the `PYTHONPATH`.
- Command::new(venv_to_interpreter(&context.venv))
- .arg("-B")
+ context
+ .python_command()
.arg("-c")
.arg("import iniconfig")
.env(EnvVars::PYTHONPATH, context.temp_dir.child("target").path())
@@ -5326,8 +5316,8 @@ fn target_source_distribution() -> Result<()> {
context.assert_command("import iniconfig").failure();
// Ensure that we can import the package by augmenting the `PYTHONPATH`.
- Command::new(venv_to_interpreter(&context.venv))
- .arg("-B")
+ context
+ .python_command()
.arg("-c")
.arg("import iniconfig")
.env(EnvVars::PYTHONPATH, context.temp_dir.child("target").path())
@@ -5397,8 +5387,8 @@ fn target_no_build_isolation() -> Result<()> {
context.assert_command("import wheel").failure();
// Ensure that we can import the package by augmenting the `PYTHONPATH`.
- Command::new(venv_to_interpreter(&context.venv))
- .arg("-B")
+ context
+ .python_command()
.arg("-c")
.arg("import wheel")
.env(EnvVars::PYTHONPATH, context.temp_dir.child("target").path())
@@ -5474,8 +5464,8 @@ fn prefix() -> Result<()> {
context.assert_command("import iniconfig").failure();
// Ensure that we can import the package by augmenting the `PYTHONPATH`.
- Command::new(venv_to_interpreter(&context.venv))
- .arg("-B")
+ context
+ .python_command()
.arg("-c")
.arg("import iniconfig")
.env(
diff --git a/crates/uv/tests/it/pip_uninstall.rs b/crates/uv/tests/it/pip_uninstall.rs
index 3c1c0d717..5e6cbf6f9 100644
--- a/crates/uv/tests/it/pip_uninstall.rs
+++ b/crates/uv/tests/it/pip_uninstall.rs
@@ -5,7 +5,7 @@ use assert_cmd::prelude::*;
use assert_fs::fixture::ChildPath;
use assert_fs::prelude::*;
-use crate::common::{TestContext, get_bin, uv_snapshot, venv_to_interpreter};
+use crate::common::{TestContext, get_bin, uv_snapshot};
#[test]
fn no_arguments() {
@@ -113,12 +113,7 @@ fn uninstall() -> Result<()> {
.assert()
.success();
- Command::new(venv_to_interpreter(&context.venv))
- .arg("-c")
- .arg("import markupsafe")
- .current_dir(&context.temp_dir)
- .assert()
- .success();
+ context.assert_command("import markupsafe").success();
uv_snapshot!(context.pip_uninstall()
.arg("MarkupSafe"), @r###"
@@ -132,12 +127,7 @@ fn uninstall() -> Result<()> {
"###
);
- Command::new(venv_to_interpreter(&context.venv))
- .arg("-c")
- .arg("import markupsafe")
- .current_dir(&context.temp_dir)
- .assert()
- .failure();
+ context.assert_command("import markupsafe").failure();
Ok(())
}
@@ -156,12 +146,7 @@ fn missing_record() -> Result<()> {
.assert()
.success();
- Command::new(venv_to_interpreter(&context.venv))
- .arg("-c")
- .arg("import markupsafe")
- .current_dir(&context.temp_dir)
- .assert()
- .success();
+ context.assert_command("import markupsafe").success();
// Delete the RECORD file.
let dist_info = context.site_packages().join("MarkupSafe-2.1.3.dist-info");
@@ -202,11 +187,7 @@ fn uninstall_editable_by_name() -> Result<()> {
.assert()
.success();
- Command::new(venv_to_interpreter(&context.venv))
- .arg("-c")
- .arg("import poetry_editable")
- .assert()
- .success();
+ context.assert_command("import poetry_editable").success();
// Uninstall the editable by name.
uv_snapshot!(context.filters(), context.pip_uninstall()
@@ -221,11 +202,7 @@ fn uninstall_editable_by_name() -> Result<()> {
"###
);
- Command::new(venv_to_interpreter(&context.venv))
- .arg("-c")
- .arg("import poetry_editable")
- .assert()
- .failure();
+ context.assert_command("import poetry_editable").failure();
Ok(())
}
@@ -251,11 +228,7 @@ fn uninstall_by_path() -> Result<()> {
.assert()
.success();
- Command::new(venv_to_interpreter(&context.venv))
- .arg("-c")
- .arg("import poetry_editable")
- .assert()
- .success();
+ context.assert_command("import poetry_editable").success();
// Uninstall the editable by path.
uv_snapshot!(context.filters(), context.pip_uninstall()
@@ -270,11 +243,7 @@ fn uninstall_by_path() -> Result<()> {
"###
);
- Command::new(venv_to_interpreter(&context.venv))
- .arg("-c")
- .arg("import poetry_editable")
- .assert()
- .failure();
+ context.assert_command("import poetry_editable").failure();
Ok(())
}
@@ -300,11 +269,7 @@ fn uninstall_duplicate_by_path() -> Result<()> {
.assert()
.success();
- Command::new(venv_to_interpreter(&context.venv))
- .arg("-c")
- .arg("import poetry_editable")
- .assert()
- .success();
+ context.assert_command("import poetry_editable").success();
// Uninstall the editable by both path and name.
uv_snapshot!(context.filters(), context.pip_uninstall()
@@ -320,11 +285,7 @@ fn uninstall_duplicate_by_path() -> Result<()> {
"###
);
- Command::new(venv_to_interpreter(&context.venv))
- .arg("-c")
- .arg("import poetry_editable")
- .assert()
- .failure();
+ context.assert_command("import poetry_editable").failure();
Ok(())
}
diff --git a/scripts/scenarios/templates/compile.mustache b/scripts/scenarios/templates/compile.mustache
index aa6db8529..2a3202662 100644
--- a/scripts/scenarios/templates/compile.mustache
+++ b/scripts/scenarios/templates/compile.mustache
@@ -16,8 +16,8 @@ use predicates::prelude::predicate;
use uv_static::EnvVars;
use crate::common::{
- build_vendor_links_url, get_bin, packse_index_url, python_path_with_versions, uv_snapshot,
- TestContext,
+ TestContext, build_vendor_links_url, get_bin, packse_index_url, python_path_with_versions,
+ uv_snapshot,
};
/// Provision python binaries and return a `pip compile` command with options shared across all scenarios.
diff --git a/scripts/scenarios/templates/install.mustache b/scripts/scenarios/templates/install.mustache
index 15f48077e..8f1c477b2 100644
--- a/scripts/scenarios/templates/install.mustache
+++ b/scripts/scenarios/templates/install.mustache
@@ -5,52 +5,20 @@
//!
#![cfg(all(feature = "python", feature = "pypi", unix))]
-use std::path::Path;
use std::process::Command;
-use assert_cmd::assert::Assert;
-use assert_cmd::prelude::*;
-
use uv_static::EnvVars;
-use crate::common::{
- build_vendor_links_url, get_bin, packse_index_url, uv_snapshot, venv_to_interpreter,
- TestContext,
-};
-
-fn assert_command(venv: &Path, command: &str, temp_dir: &Path) -> Assert {
- Command::new(venv_to_interpreter(venv))
- .arg("-c")
- .arg(command)
- .current_dir(temp_dir)
- .assert()
-}
-
-fn assert_installed(venv: &Path, package: &'static str, version: &'static str, temp_dir: &Path) {
- assert_command(
- venv,
- format!("import {package} as package; print(package.__version__, end='')").as_str(),
- temp_dir,
- )
- .success()
- .stdout(version);
-}
-
-fn assert_not_installed(venv: &Path, package: &'static str, temp_dir: &Path) {
- assert_command(venv, format!("import {package}").as_str(), temp_dir).failure();
-}
+use crate::common::{TestContext, build_vendor_links_url, packse_index_url, uv_snapshot};
/// Create a `pip install` command with options shared across all scenarios.
fn command(context: &TestContext) -> Command {
- let mut command = Command::new(get_bin());
+ let mut command = context.pip_install();
command
- .arg("pip")
- .arg("install")
.arg("--index-url")
.arg(packse_index_url())
.arg("--find-links")
.arg(build_vendor_links_url());
- context.add_shared_options(&mut command, true);
command.env_remove(EnvVars::UV_EXCLUDE_NEWER);
command
}
@@ -93,25 +61,20 @@ fn {{module_name}}() {
{{/resolver_options.python_platform}}
{{#root.requires}}
.arg("{{requirement}}")
- {{/root.requires}}, @r###"
- "###);
+ {{/root.requires}}, @r#"
+ "#);
{{#expected.explanation}}
// {{expected.explanation}}
{{/expected.explanation}}
{{#expected.satisfiable}}
{{#expected.packages}}
- assert_installed(
- &context.venv,
- "{{module_name}}",
- "{{version}}",
- &context.temp_dir
- );
+ context.assert_installed("{{module_name}}", "{{version}}");
{{/expected.packages}}
{{/expected.satisfiable}}
{{^expected.satisfiable}}
{{#root.requires}}
- assert_not_installed(&context.venv, "{{module_name}}", &context.temp_dir);
+ context.assert_not_installed("{{module_name}}");
{{/root.requires}}
{{/expected.satisfiable}}
}
diff --git a/scripts/scenarios/templates/lock.mustache b/scripts/scenarios/templates/lock.mustache
index 7d80b8f9b..74deb3764 100644
--- a/scripts/scenarios/templates/lock.mustache
+++ b/scripts/scenarios/templates/lock.mustache
@@ -15,7 +15,7 @@ use insta::assert_snapshot;
use uv_static::EnvVars;
-use crate::common::{packse_index_url, TestContext, uv_snapshot};
+use crate::common::{TestContext, packse_index_url, uv_snapshot};
{{#scenarios}}
From 611a13c8415dc1739ee7c33a7375e846871bc4b2 Mon Sep 17 00:00:00 2001
From: John Mumm
Date: Wed, 18 Jun 2025 10:30:12 -0400
Subject: [PATCH 017/349] Fix benchmark compilation failure: `cannot find
attribute clap in this scope` (#14128)
[Two benchmark
jobs](https://github.com/astral-sh/uv/actions/runs/15732775460/job/44337710992?pr=14126)
were failing with `error: cannot find attribute clap in this scope`
based on #14120. This updates the recently added `#[clap(name = rocm...`
lines to use `cfg_attr(feature = "clap",`.
---
crates/uv-torch/src/backend.rs | 32 ++++++++++++++++----------------
1 file changed, 16 insertions(+), 16 deletions(-)
diff --git a/crates/uv-torch/src/backend.rs b/crates/uv-torch/src/backend.rs
index 263ea07bd..958ea3d9e 100644
--- a/crates/uv-torch/src/backend.rs
+++ b/crates/uv-torch/src/backend.rs
@@ -109,67 +109,67 @@ pub enum TorchMode {
Cu80,
/// Use the PyTorch index for ROCm 6.3.
#[serde(rename = "rocm6.3")]
- #[clap(name = "rocm6.3")]
+ #[cfg_attr(feature = "clap", clap(name = "rocm6.3"))]
Rocm63,
/// Use the PyTorch index for ROCm 6.2.4.
#[serde(rename = "rocm6.2.4")]
- #[clap(name = "rocm6.2.4")]
+ #[cfg_attr(feature = "clap", clap(name = "rocm6.2.4"))]
Rocm624,
/// Use the PyTorch index for ROCm 6.2.
#[serde(rename = "rocm6.2")]
- #[clap(name = "rocm6.2")]
+ #[cfg_attr(feature = "clap", clap(name = "rocm6.2"))]
Rocm62,
/// Use the PyTorch index for ROCm 6.1.
#[serde(rename = "rocm6.1")]
- #[clap(name = "rocm6.1")]
+ #[cfg_attr(feature = "clap", clap(name = "rocm6.1"))]
Rocm61,
/// Use the PyTorch index for ROCm 6.0.
#[serde(rename = "rocm6.0")]
- #[clap(name = "rocm6.0")]
+ #[cfg_attr(feature = "clap", clap(name = "rocm6.0"))]
Rocm60,
/// Use the PyTorch index for ROCm 5.7.
#[serde(rename = "rocm5.7")]
- #[clap(name = "rocm5.7")]
+ #[cfg_attr(feature = "clap", clap(name = "rocm5.7"))]
Rocm57,
/// Use the PyTorch index for ROCm 5.6.
#[serde(rename = "rocm5.6")]
- #[clap(name = "rocm5.6")]
+ #[cfg_attr(feature = "clap", clap(name = "rocm5.6"))]
Rocm56,
/// Use the PyTorch index for ROCm 5.5.
#[serde(rename = "rocm5.5")]
- #[clap(name = "rocm5.5")]
+ #[cfg_attr(feature = "clap", clap(name = "rocm5.5"))]
Rocm55,
/// Use the PyTorch index for ROCm 5.4.2.
#[serde(rename = "rocm5.4.2")]
- #[clap(name = "rocm5.4.2")]
+ #[cfg_attr(feature = "clap", clap(name = "rocm5.4.2"))]
Rocm542,
/// Use the PyTorch index for ROCm 5.4.
#[serde(rename = "rocm5.4")]
- #[clap(name = "rocm5.4")]
+ #[cfg_attr(feature = "clap", clap(name = "rocm5.4"))]
Rocm54,
/// Use the PyTorch index for ROCm 5.3.
#[serde(rename = "rocm5.3")]
- #[clap(name = "rocm5.3")]
+ #[cfg_attr(feature = "clap", clap(name = "rocm5.3"))]
Rocm53,
/// Use the PyTorch index for ROCm 5.2.
#[serde(rename = "rocm5.2")]
- #[clap(name = "rocm5.2")]
+ #[cfg_attr(feature = "clap", clap(name = "rocm5.2"))]
Rocm52,
/// Use the PyTorch index for ROCm 5.1.1.
#[serde(rename = "rocm5.1.1")]
- #[clap(name = "rocm5.1.1")]
+ #[cfg_attr(feature = "clap", clap(name = "rocm5.1.1"))]
Rocm511,
/// Use the PyTorch index for ROCm 4.2.
#[serde(rename = "rocm4.2")]
- #[clap(name = "rocm4.2")]
+ #[cfg_attr(feature = "clap", clap(name = "rocm4.2"))]
Rocm42,
/// Use the PyTorch index for ROCm 4.1.
#[serde(rename = "rocm4.1")]
- #[clap(name = "rocm4.1")]
+ #[cfg_attr(feature = "clap", clap(name = "rocm4.1"))]
Rocm41,
/// Use the PyTorch index for ROCm 4.0.1.
#[serde(rename = "rocm4.0.1")]
- #[clap(name = "rocm4.0.1")]
+ #[cfg_attr(feature = "clap", clap(name = "rocm4.0.1"))]
Rocm401,
}
From 75d4cd30d6bbb13e2d871d614abe2065c46e9b4a Mon Sep 17 00:00:00 2001
From: Zanie Blue
Date: Wed, 18 Jun 2025 09:55:09 -0500
Subject: [PATCH 018/349] Use Depot for Windows `cargo test` (#14122)
Replaces https://github.com/astral-sh/uv/pull/12320
Switches to Depot for the large Windows runner we use for `cargo test`.
The runtime goes from 8m 20s -> 6m 44s (total) and 7m 18s -> 4m 41s
(test run) which are 20% and 35% speedups respectively.
A few things got marginally slower, like Python installs went from 11s
-> 38s, the Rust cache went from 15s -> 30s, and drive setup went from
7s -> 20s.
---
.github/workflows/ci.yml | 2 +-
.github/workflows/setup-dev-drive.ps1 | 45 ++++++++++++++++++++++-----
2 files changed, 38 insertions(+), 9 deletions(-)
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index feaa38210..459495600 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -266,7 +266,7 @@ jobs:
timeout-minutes: 15
needs: determine_changes
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && (needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main') }}
- runs-on: github-windows-2025-x86_64-16
+ runs-on: depot-windows-2022-16
name: "cargo test | windows"
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
diff --git a/.github/workflows/setup-dev-drive.ps1 b/.github/workflows/setup-dev-drive.ps1
index e0e2a765b..e003cc359 100644
--- a/.github/workflows/setup-dev-drive.ps1
+++ b/.github/workflows/setup-dev-drive.ps1
@@ -1,13 +1,43 @@
# Configures a drive for testing in CI.
+#
+# When using standard GitHub Actions runners, a `D:` drive is present and has
+# similar or better performance characteristics than a ReFS dev drive. Sometimes
+# using a larger runner is still more performant (e.g., when running the test
+# suite) and we need to create a dev drive. This script automatically configures
+# the appropriate drive.
+#
+# When using GitHub Actions' "larger runners", the `D:` drive is not present and
+# we create a DevDrive mount on `C:`. This is purported to be more performant
+# than an ReFS drive, though we did not see a change when we switched over.
+#
+# When using Depot runners, the underling infrastructure is EC2, which does not
+# support Hyper-V. The `New-VHD` commandlet only works with Hyper-V, but we can
+# create a ReFS drive using `diskpart` and `format` directory. We cannot use a
+# DevDrive, as that also requires Hyper-V. The Depot runners use `D:` already,
+# so we must check if it's a Depot runner first, and we use `V:` as the target
+# instead.
-# When not using a GitHub Actions "larger runner", the `D:` drive is present and
-# has similar or better performance characteristics than a ReFS dev drive.
-# Sometimes using a larger runner is still more performant (e.g., when running
-# the test suite) and we need to create a dev drive. This script automatically
-# configures the appropriate drive.
-# Note we use `Get-PSDrive` is not sufficient because the drive letter is assigned.
-if (Test-Path "D:\") {
+if ($env:DEPOT_RUNNER -eq "1") {
+ Write-Output "DEPOT_RUNNER detected, setting up custom dev drive..."
+
+ # Create VHD and configure drive using diskpart
+ $vhdPath = "C:\uv_dev_drive.vhdx"
+ @"
+create vdisk file="$vhdPath" maximum=20480 type=expandable
+attach vdisk
+create partition primary
+active
+assign letter=V
+"@ | diskpart
+
+ # Format the drive as ReFS
+ format V: /fs:ReFS /q /y
+ $Drive = "V:"
+
+ Write-Output "Custom dev drive created at $Drive"
+} elseif (Test-Path "D:\") {
+ # Note `Get-PSDrive` is not sufficient because the drive letter is assigned.
Write-Output "Using existing drive at D:"
$Drive = "D:"
} else {
@@ -61,4 +91,3 @@ Write-Output `
"UV_WORKSPACE=$($Drive)/uv" `
"PATH=$($Drive)/.cargo/bin;$env:PATH" `
>> $env:GITHUB_ENV
-
From 1fc65a1d9de51b2dd567733208f4595dc442db89 Mon Sep 17 00:00:00 2001
From: Zanie Blue
Date: Wed, 18 Jun 2025 11:30:37 -0500
Subject: [PATCH 019/349] Publish to DockerHub (#14088)
The primary motivation here is to avoid confusion with non-official
repositories, e.g., https://github.com/astral-sh/uv/issues/13958 which
could lead to attacks against our users.
Resolves
- https://github.com/astral-sh/uv/issues/12679
- #8699
---
.github/workflows/build-docker.yml | 34 ++++++++++++++++++------------
1 file changed, 21 insertions(+), 13 deletions(-)
diff --git a/.github/workflows/build-docker.yml b/.github/workflows/build-docker.yml
index e310add68..4f19fb3df 100644
--- a/.github/workflows/build-docker.yml
+++ b/.github/workflows/build-docker.yml
@@ -37,7 +37,8 @@ on:
- .github/workflows/build-docker.yml
env:
- UV_BASE_IMG: ghcr.io/${{ github.repository_owner }}/uv
+ UV_GHCR_IMAGE: ghcr.io/${{ github.repository_owner }}/uv
+ UV_DOCKERHUB_IMAGE: docker.io/astral/uv
jobs:
docker-plan:
@@ -84,13 +85,12 @@ jobs:
with:
submodules: recursive
- # Login to DockerHub first, to avoid rate-limiting
+ # Login to DockerHub (when not pushing, it's to avoid rate-limiting)
- uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
- # PRs from forks don't have access to secrets, disable this step in that case.
if: ${{ github.event.pull_request.head.repo.full_name == 'astral-sh/uv' }}
with:
- username: astralshbot
- password: ${{ secrets.DOCKERHUB_TOKEN_RO }}
+ username: ${{ needs.docker-plan.outputs.push == 'true' && 'astral' || 'astralshbot' }}
+ password: ${{ needs.docker-plan.outputs.push == 'true' && secrets.DOCKERHUB_TOKEN_RW || secrets.DOCKERHUB_TOKEN_RO }}
- uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
with:
@@ -117,7 +117,9 @@ jobs:
id: meta
uses: docker/metadata-action@902fa8ec7d6ecbf8d84d538b9b233a880e428804 # v5.7.0
with:
- images: ${{ env.UV_BASE_IMG }}
+ images: |
+ ${{ env.UV_GHCR_IMAGE }}
+ ${{ env.UV_DOCKERHUB_IMAGE }}
# Defining this makes sure the org.opencontainers.image.version OCI label becomes the actual release version and not the branch name
tags: |
type=raw,value=dry-run,enable=${{ needs.docker-plan.outputs.push == 'false' }}
@@ -186,12 +188,12 @@ jobs:
- python:3.9-slim-bookworm,python3.9-bookworm-slim
- python:3.8-slim-bookworm,python3.8-bookworm-slim
steps:
- # Login to DockerHub first, to avoid rate-limiting
+ # Login to DockerHub (when not pushing, it's to avoid rate-limiting)
- uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
if: ${{ github.event.pull_request.head.repo.full_name == 'astral-sh/uv' }}
with:
- username: astralshbot
- password: ${{ secrets.DOCKERHUB_TOKEN_RO }}
+ username: ${{ needs.docker-plan.outputs.push == 'true' && 'astral' || 'astralshbot' }}
+ password: ${{ needs.docker-plan.outputs.push == 'true' && secrets.DOCKERHUB_TOKEN_RW || secrets.DOCKERHUB_TOKEN_RO }}
- uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
with:
@@ -212,7 +214,7 @@ jobs:
# Generate Dockerfile content
cat < Dockerfile
FROM ${BASE_IMAGE}
- COPY --from=${{ env.UV_BASE_IMG }}:latest /uv /uvx /usr/local/bin/
+ COPY --from=${{ env.UV_GHCR_IMAGE }}:latest /uv /uvx /usr/local/bin/
ENTRYPOINT []
CMD ["/usr/local/bin/uv"]
EOF
@@ -245,7 +247,9 @@ jobs:
env:
DOCKER_METADATA_ANNOTATIONS_LEVELS: index
with:
- images: ${{ env.UV_BASE_IMG }}
+ images: |
+ ${{ env.UV_GHCR_IMAGE }}
+ ${{ env.UV_DOCKERHUB_IMAGE }}
flavor: |
latest=false
tags: |
@@ -266,7 +270,7 @@ jobs:
- name: Generate artifact attestation
uses: actions/attest-build-provenance@e8998f949152b193b063cb0ec769d69d929409be # v2.4.0
with:
- subject-name: ${{ env.UV_BASE_IMG }}
+ subject-name: ${{ env.UV_GHCR_IMAGE }}
subject-digest: ${{ steps.build-and-push.outputs.digest }}
# Re-tag the base image, to ensure it's shown as the newest on the registry UI
@@ -289,12 +293,16 @@ jobs:
- name: Push tags
env:
- IMAGE: ${{ env.UV_BASE_IMG }}
+ IMAGE: ${{ env.UV_GHCR_IMAGE }}
DIGEST: ${{ needs.docker-publish-base.outputs.image-digest }}
TAGS: ${{ needs.docker-publish-base.outputs.image-tags }}
run: |
docker pull "${IMAGE}@${DIGEST}"
for tag in $TAGS; do
+ # Skip re-tag for DockerHub
+ if [[ "$tag" == "${{ env.UV_DOCKERHUB_IMAGE }}"* ]]; then
+ continue
+ fi
docker tag "${IMAGE}@${DIGEST}" "${tag}"
docker push "${tag}"
done
From e1046242e7449dabd6af61df26187e1848c3ec55 Mon Sep 17 00:00:00 2001
From: Zanie Blue
Date: Wed, 18 Jun 2025 13:46:30 -0500
Subject: [PATCH 020/349] Fix Docker attestations (#14133)
These regressed in #14088 and were found during my test publish from a
fork.
---
.github/workflows/build-docker.yml | 13 +++++++------
1 file changed, 7 insertions(+), 6 deletions(-)
diff --git a/.github/workflows/build-docker.yml b/.github/workflows/build-docker.yml
index 4f19fb3df..1f5229aef 100644
--- a/.github/workflows/build-docker.yml
+++ b/.github/workflows/build-docker.yml
@@ -73,8 +73,9 @@ jobs:
runs-on: ubuntu-latest
permissions:
contents: read
- id-token: write # for Depot OIDC
- packages: write # for GHCR
+ id-token: write # for Depot OIDC and GHCR signing
+ packages: write # for GHCR image pushes
+ attestations: write # for GHCR attestations
environment:
name: release
outputs:
@@ -141,7 +142,7 @@ jobs:
if: ${{ needs.docker-plan.outputs.push == 'true' }}
uses: actions/attest-build-provenance@c074443f1aee8d4aeeae555aebba3282517141b2 # v2.2.3
with:
- subject-name: ${{ env.UV_BASE_IMG }}
+ subject-name: ${{ env.UV_GHCR_IMAGE }}
subject-digest: ${{ steps.build.outputs.digest }}
docker-publish-extra:
@@ -154,9 +155,9 @@ jobs:
- docker-publish-base
if: ${{ needs.docker-plan.outputs.push == 'true' }}
permissions:
- packages: write
- attestations: write # needed to push image attestations to the Github attestation store
- id-token: write # needed for signing the images with GitHub OIDC Token
+ id-token: write # for Depot OIDC and GHCR signing
+ packages: write # for GHCR image pushes
+ attestations: write # for GHCR attestations
strategy:
fail-fast: false
matrix:
From c3e4b6380608a0425982f3082a5403e7249a7532 Mon Sep 17 00:00:00 2001
From: Jack O'Connor
Date: Wed, 18 Jun 2025 12:12:56 -0700
Subject: [PATCH 021/349] document the way member sources shadow workspace
sources
Closes https://github.com/astral-sh/uv/issues/14093.
---
docs/concepts/projects/workspaces.md | 7 +++++++
1 file changed, 7 insertions(+)
diff --git a/docs/concepts/projects/workspaces.md b/docs/concepts/projects/workspaces.md
index 942cea8c2..4b2d670b4 100644
--- a/docs/concepts/projects/workspaces.md
+++ b/docs/concepts/projects/workspaces.md
@@ -113,6 +113,13 @@ build-backend = "hatchling.build"
Every workspace member would, by default, install `tqdm` from GitHub, unless a specific member
overrides the `tqdm` entry in its own `tool.uv.sources` table.
+!!! note
+
+ If a workspace member provides `tool.uv.sources` for some dependency, it will ignore any
+ `tool.uv.sources` for the same dependency in the workspace root, even if the member's source is
+ limited by a [marker](dependencies.md#platform-specific-sources) that doesn't match the current
+ platform.
+
## Workspace layouts
The most common workspace layout can be thought of as a root project with a series of accompanying
From cc8d5a92154717f5c6baccb21b45cc1cd0dccca1 Mon Sep 17 00:00:00 2001
From: Jack O'Connor
Date: Thu, 19 Jun 2025 14:47:22 -0700
Subject: [PATCH 022/349] handle an existing shebang in `uv init --script`
(#14141)
Closes https://github.com/astral-sh/uv/issues/14085.
---
Cargo.lock | 2 ++
crates/uv-scripts/Cargo.toml | 2 ++
crates/uv-scripts/src/lib.rs | 23 +++++++++++---
crates/uv-warnings/src/lib.rs | 8 ++---
crates/uv/tests/it/init.rs | 59 +++++++++++++++++++++++++++++++++++
docs/guides/scripts.md | 2 ++
6 files changed, 88 insertions(+), 8 deletions(-)
diff --git a/Cargo.lock b/Cargo.lock
index 264a17e55..77a17a4f3 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -5742,6 +5742,7 @@ dependencies = [
"fs-err 3.1.1",
"indoc",
"memchr",
+ "regex",
"serde",
"thiserror 2.0.12",
"toml",
@@ -5751,6 +5752,7 @@ dependencies = [
"uv-pypi-types",
"uv-redacted",
"uv-settings",
+ "uv-warnings",
"uv-workspace",
]
diff --git a/crates/uv-scripts/Cargo.toml b/crates/uv-scripts/Cargo.toml
index 993633918..124eb1fea 100644
--- a/crates/uv-scripts/Cargo.toml
+++ b/crates/uv-scripts/Cargo.toml
@@ -16,11 +16,13 @@ uv-pep508 = { workspace = true }
uv-pypi-types = { workspace = true }
uv-redacted = { workspace = true }
uv-settings = { workspace = true }
+uv-warnings = { workspace = true }
uv-workspace = { workspace = true }
fs-err = { workspace = true, features = ["tokio"] }
indoc = { workspace = true }
memchr = { workspace = true }
+regex = { workspace = true }
serde = { workspace = true, features = ["derive"] }
thiserror = { workspace = true }
toml = { workspace = true }
diff --git a/crates/uv-scripts/src/lib.rs b/crates/uv-scripts/src/lib.rs
index 1023b4141..b80cdc219 100644
--- a/crates/uv-scripts/src/lib.rs
+++ b/crates/uv-scripts/src/lib.rs
@@ -14,6 +14,7 @@ use uv_pep508::PackageName;
use uv_pypi_types::VerbatimParsedUrl;
use uv_redacted::DisplaySafeUrl;
use uv_settings::{GlobalOptions, ResolverInstallerOptions};
+use uv_warnings::warn_user;
use uv_workspace::pyproject::Sources;
static FINDER: LazyLock = LazyLock::new(|| Finder::new(b"# /// script"));
@@ -238,11 +239,25 @@ impl Pep723Script {
let metadata = serialize_metadata(&default_metadata);
let script = if let Some(existing_contents) = existing_contents {
+ let (mut shebang, contents) = extract_shebang(&existing_contents)?;
+ if !shebang.is_empty() {
+ shebang.push_str("\n#\n");
+ // If the shebang doesn't contain `uv`, it's probably something like
+ // `#! /usr/bin/env python`, which isn't going to respect the inline metadata.
+ // Issue a warning for users who might not know that.
+ // TODO: There are a lot of mistakes we could consider detecting here, like
+ // `uv run` without `--script` when the file doesn't end in `.py`.
+ if !regex::Regex::new(r"\buv\b").unwrap().is_match(&shebang) {
+ warn_user!(
+ "If you execute {} directly, it might ignore its inline metadata.\nConsider replacing its shebang with: {}",
+ file.to_string_lossy().cyan(),
+ "#!/usr/bin/env -S uv run --script".cyan(),
+ );
+ }
+ }
indoc::formatdoc! {r"
- {metadata}
- {content}
- ",
- content = String::from_utf8(existing_contents).map_err(|err| Pep723Error::Utf8(err.utf8_error()))?}
+ {shebang}{metadata}
+ {contents}" }
} else {
indoc::formatdoc! {r#"
{metadata}
diff --git a/crates/uv-warnings/src/lib.rs b/crates/uv-warnings/src/lib.rs
index 9ed4c646e..5f2287cac 100644
--- a/crates/uv-warnings/src/lib.rs
+++ b/crates/uv-warnings/src/lib.rs
@@ -24,7 +24,7 @@ pub fn disable() {
/// Warn a user, if warnings are enabled.
#[macro_export]
macro_rules! warn_user {
- ($($arg:tt)*) => {
+ ($($arg:tt)*) => {{
use $crate::anstream::eprintln;
use $crate::owo_colors::OwoColorize;
@@ -33,7 +33,7 @@ macro_rules! warn_user {
let formatted = message.bold();
eprintln!("{}{} {formatted}", "warning".yellow().bold(), ":".bold());
}
- };
+ }};
}
pub static WARNINGS: LazyLock>> = LazyLock::new(Mutex::default);
@@ -42,7 +42,7 @@ pub static WARNINGS: LazyLock>> = LazyLock::new(Mutex::d
/// message.
#[macro_export]
macro_rules! warn_user_once {
- ($($arg:tt)*) => {
+ ($($arg:tt)*) => {{
use $crate::anstream::eprintln;
use $crate::owo_colors::OwoColorize;
@@ -54,5 +54,5 @@ macro_rules! warn_user_once {
}
}
}
- };
+ }};
}
diff --git a/crates/uv/tests/it/init.rs b/crates/uv/tests/it/init.rs
index e9e5e54a7..c5993d670 100644
--- a/crates/uv/tests/it/init.rs
+++ b/crates/uv/tests/it/init.rs
@@ -929,6 +929,65 @@ fn init_script_file_conflicts() -> Result<()> {
Ok(())
}
+// Init script should not trash an existing shebang.
+#[test]
+fn init_script_shebang() -> Result<()> {
+ let context = TestContext::new("3.12");
+
+ let script_path = context.temp_dir.child("script.py");
+
+ let contents = "#! /usr/bin/env python3\nprint(\"Hello, world!\")";
+ fs_err::write(&script_path, contents)?;
+ uv_snapshot!(context.filters(), context.init().arg("--script").arg("script.py"), @r"
+ success: true
+ exit_code: 0
+ ----- stdout -----
+
+ ----- stderr -----
+ warning: If you execute script.py directly, it might ignore its inline metadata.
+ Consider replacing its shebang with: #!/usr/bin/env -S uv run --script
+ Initialized script at `script.py`
+ ");
+ let resulting_script = fs_err::read_to_string(&script_path)?;
+ assert_snapshot!(resulting_script, @r#"
+ #! /usr/bin/env python3
+ #
+ # /// script
+ # requires-python = ">=3.12"
+ # dependencies = []
+ # ///
+
+ print("Hello, world!")
+ "#
+ );
+
+ // If the shebang already contains `uv`, the result is the same, but we suppress the warning.
+ let contents = "#!/usr/bin/env -S uv run --script\nprint(\"Hello, world!\")";
+ fs_err::write(&script_path, contents)?;
+ uv_snapshot!(context.filters(), context.init().arg("--script").arg("script.py"), @r"
+ success: true
+ exit_code: 0
+ ----- stdout -----
+
+ ----- stderr -----
+ Initialized script at `script.py`
+ ");
+ let resulting_script = fs_err::read_to_string(&script_path)?;
+ assert_snapshot!(resulting_script, @r#"
+ #!/usr/bin/env -S uv run --script
+ #
+ # /// script
+ # requires-python = ">=3.12"
+ # dependencies = []
+ # ///
+
+ print("Hello, world!")
+ "#
+ );
+
+ Ok(())
+}
+
/// Run `uv init --lib` with an existing py.typed file
#[test]
fn init_py_typed_exists() -> Result<()> {
diff --git a/docs/guides/scripts.md b/docs/guides/scripts.md
index 7142db155..26d85e76d 100644
--- a/docs/guides/scripts.md
+++ b/docs/guides/scripts.md
@@ -241,10 +241,12 @@ Declaration of dependencies is also supported in this context, for example:
```python title="example"
#!/usr/bin/env -S uv run --script
+#
# /// script
# requires-python = ">=3.12"
# dependencies = ["httpx"]
# ///
+
import httpx
print(httpx.get("https://example.com"))
From 62365d4ec86c146fc1d2050bff3fa70e53abfa08 Mon Sep 17 00:00:00 2001
From: John Mumm
Date: Fri, 20 Jun 2025 03:21:32 -0400
Subject: [PATCH 023/349] Support netrc and same-origin credential propagation
on index redirects (#14126)
This PR is a combination of #12920 and #13754. Prior to these changes,
following a redirect when searching indexes would bypass our
authentication middleware. This PR updates uv to support propagating
credentials through our middleware on same-origin redirects and to
support netrc credentials for both same- and cross-origin redirects. It
does not handle the case described in #11097 where the redirect location
itself includes credentials (e.g.,
`https://user:pass@redirect-location.com`). That will be addressed in
follow-up work.
This includes unit tests for the new redirect logic and integration
tests for credential propagation. The automated external registries test
is also passing for AWS CodeArtifact, Azure Artifacts, GCP Artifact
Registry, JFrog Artifactory, GitLab, Cloudsmith, and Gemfury.
---
Cargo.lock | 1 +
crates/uv-client/Cargo.toml | 1 +
crates/uv-client/src/base_client.rs | 574 ++++++++++++++++++++++-
crates/uv-client/src/cached_client.rs | 2 -
crates/uv-client/src/lib.rs | 2 +-
crates/uv-client/src/registry_client.rs | 243 +++++++++-
crates/uv-distribution/src/source/mod.rs | 7 +-
crates/uv-git/src/resolver.rs | 4 +-
crates/uv-publish/src/lib.rs | 27 +-
crates/uv/tests/it/common/mod.rs | 4 +-
crates/uv/tests/it/edit.rs | 191 ++++++++
11 files changed, 1022 insertions(+), 34 deletions(-)
diff --git a/Cargo.lock b/Cargo.lock
index 77a17a4f3..a30a0cbe1 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -4946,6 +4946,7 @@ dependencies = [
"uv-torch",
"uv-version",
"uv-warnings",
+ "wiremock",
]
[[package]]
diff --git a/crates/uv-client/Cargo.toml b/crates/uv-client/Cargo.toml
index 81d1909fe..bc7fc611f 100644
--- a/crates/uv-client/Cargo.toml
+++ b/crates/uv-client/Cargo.toml
@@ -65,3 +65,4 @@ hyper = { version = "1.4.1", features = ["server", "http1"] }
hyper-util = { version = "0.1.8", features = ["tokio"] }
insta = { version = "1.40.0", features = ["filters", "json", "redactions"] }
tokio = { workspace = true }
+wiremock = { workspace = true }
diff --git a/crates/uv-client/src/base_client.rs b/crates/uv-client/src/base_client.rs
index f5fda246d..85c384b0d 100644
--- a/crates/uv-client/src/base_client.rs
+++ b/crates/uv-client/src/base_client.rs
@@ -6,14 +6,23 @@ use std::sync::Arc;
use std::time::Duration;
use std::{env, io, iter};
+use anyhow::anyhow;
+use http::{
+ HeaderMap, HeaderName, HeaderValue, Method, StatusCode,
+ header::{
+ AUTHORIZATION, CONTENT_ENCODING, CONTENT_LENGTH, CONTENT_TYPE, COOKIE, LOCATION,
+ PROXY_AUTHORIZATION, REFERER, TRANSFER_ENCODING, WWW_AUTHENTICATE,
+ },
+};
use itertools::Itertools;
-use reqwest::{Client, ClientBuilder, Proxy, Response};
+use reqwest::{Client, ClientBuilder, IntoUrl, Proxy, Request, Response, multipart};
use reqwest_middleware::{ClientWithMiddleware, Middleware};
use reqwest_retry::policies::ExponentialBackoff;
use reqwest_retry::{
DefaultRetryableStrategy, RetryTransientMiddleware, Retryable, RetryableStrategy,
};
use tracing::{debug, trace};
+use url::ParseError;
use url::Url;
use uv_auth::{AuthMiddleware, Indexes};
@@ -32,6 +41,10 @@ use crate::middleware::OfflineMiddleware;
use crate::tls::read_identity;
pub const DEFAULT_RETRIES: u32 = 3;
+/// Maximum number of redirects to follow before giving up.
+///
+/// This is the default used by [`reqwest`].
+const DEFAULT_MAX_REDIRECTS: u32 = 10;
/// Selectively skip parts or the entire auth middleware.
#[derive(Debug, Clone, Copy, Default)]
@@ -61,6 +74,31 @@ pub struct BaseClientBuilder<'a> {
default_timeout: Duration,
extra_middleware: Option,
proxies: Vec,
+ redirect_policy: RedirectPolicy,
+ /// Whether credentials should be propagated during cross-origin redirects.
+ ///
+ /// A policy allowing propagation is insecure and should only be available for test code.
+ cross_origin_credential_policy: CrossOriginCredentialsPolicy,
+}
+
+/// The policy for handling HTTP redirects.
+#[derive(Debug, Default, Clone, Copy)]
+pub enum RedirectPolicy {
+ /// Use reqwest's built-in redirect handling. This bypasses our custom middleware
+ /// on redirect.
+ #[default]
+ BypassMiddleware,
+ /// Handle redirects manually, re-triggering our custom middleware for each request.
+ RetriggerMiddleware,
+}
+
+impl RedirectPolicy {
+ pub fn reqwest_policy(self) -> reqwest::redirect::Policy {
+ match self {
+ RedirectPolicy::BypassMiddleware => reqwest::redirect::Policy::default(),
+ RedirectPolicy::RetriggerMiddleware => reqwest::redirect::Policy::none(),
+ }
+ }
}
/// A list of user-defined middlewares to be applied to the client.
@@ -96,6 +134,8 @@ impl BaseClientBuilder<'_> {
default_timeout: Duration::from_secs(30),
extra_middleware: None,
proxies: vec![],
+ redirect_policy: RedirectPolicy::default(),
+ cross_origin_credential_policy: CrossOriginCredentialsPolicy::Secure,
}
}
}
@@ -173,6 +213,24 @@ impl<'a> BaseClientBuilder<'a> {
self
}
+ #[must_use]
+ pub fn redirect(mut self, policy: RedirectPolicy) -> Self {
+ self.redirect_policy = policy;
+ self
+ }
+
+ /// Allows credentials to be propagated on cross-origin redirects.
+ ///
+ /// WARNING: This should only be available for tests. In production code, propagating credentials
+ /// during cross-origin redirects can lead to security vulnerabilities including credential
+ /// leakage to untrusted domains.
+ #[cfg(test)]
+ #[must_use]
+ pub fn allow_cross_origin_credentials(mut self) -> Self {
+ self.cross_origin_credential_policy = CrossOriginCredentialsPolicy::Insecure;
+ self
+ }
+
pub fn is_offline(&self) -> bool {
matches!(self.connectivity, Connectivity::Offline)
}
@@ -229,6 +287,7 @@ impl<'a> BaseClientBuilder<'a> {
timeout,
ssl_cert_file_exists,
Security::Secure,
+ self.redirect_policy,
);
// Create an insecure client that accepts invalid certificates.
@@ -237,11 +296,20 @@ impl<'a> BaseClientBuilder<'a> {
timeout,
ssl_cert_file_exists,
Security::Insecure,
+ self.redirect_policy,
);
// Wrap in any relevant middleware and handle connectivity.
- let client = self.apply_middleware(raw_client.clone());
- let dangerous_client = self.apply_middleware(raw_dangerous_client.clone());
+ let client = RedirectClientWithMiddleware {
+ client: self.apply_middleware(raw_client.clone()),
+ redirect_policy: self.redirect_policy,
+ cross_origin_credentials_policy: self.cross_origin_credential_policy,
+ };
+ let dangerous_client = RedirectClientWithMiddleware {
+ client: self.apply_middleware(raw_dangerous_client.clone()),
+ redirect_policy: self.redirect_policy,
+ cross_origin_credentials_policy: self.cross_origin_credential_policy,
+ };
BaseClient {
connectivity: self.connectivity,
@@ -258,8 +326,16 @@ impl<'a> BaseClientBuilder<'a> {
/// Share the underlying client between two different middleware configurations.
pub fn wrap_existing(&self, existing: &BaseClient) -> BaseClient {
// Wrap in any relevant middleware and handle connectivity.
- let client = self.apply_middleware(existing.raw_client.clone());
- let dangerous_client = self.apply_middleware(existing.raw_dangerous_client.clone());
+ let client = RedirectClientWithMiddleware {
+ client: self.apply_middleware(existing.raw_client.clone()),
+ redirect_policy: self.redirect_policy,
+ cross_origin_credentials_policy: self.cross_origin_credential_policy,
+ };
+ let dangerous_client = RedirectClientWithMiddleware {
+ client: self.apply_middleware(existing.raw_dangerous_client.clone()),
+ redirect_policy: self.redirect_policy,
+ cross_origin_credentials_policy: self.cross_origin_credential_policy,
+ };
BaseClient {
connectivity: self.connectivity,
@@ -279,6 +355,7 @@ impl<'a> BaseClientBuilder<'a> {
timeout: Duration,
ssl_cert_file_exists: bool,
security: Security,
+ redirect_policy: RedirectPolicy,
) -> Client {
// Configure the builder.
let client_builder = ClientBuilder::new()
@@ -286,7 +363,8 @@ impl<'a> BaseClientBuilder<'a> {
.user_agent(user_agent)
.pool_max_idle_per_host(20)
.read_timeout(timeout)
- .tls_built_in_root_certs(false);
+ .tls_built_in_root_certs(false)
+ .redirect(redirect_policy.reqwest_policy());
// If necessary, accept invalid certificates.
let client_builder = match security {
@@ -381,9 +459,9 @@ impl<'a> BaseClientBuilder<'a> {
#[derive(Debug, Clone)]
pub struct BaseClient {
/// The underlying HTTP client that enforces valid certificates.
- client: ClientWithMiddleware,
+ client: RedirectClientWithMiddleware,
/// The underlying HTTP client that accepts invalid certificates.
- dangerous_client: ClientWithMiddleware,
+ dangerous_client: RedirectClientWithMiddleware,
/// The HTTP client without middleware.
raw_client: Client,
/// The HTTP client that accepts invalid certificates without middleware.
@@ -408,7 +486,7 @@ enum Security {
impl BaseClient {
/// Selects the appropriate client based on the host's trustworthiness.
- pub fn for_host(&self, url: &DisplaySafeUrl) -> &ClientWithMiddleware {
+ pub fn for_host(&self, url: &DisplaySafeUrl) -> &RedirectClientWithMiddleware {
if self.disable_ssl(url) {
&self.dangerous_client
} else {
@@ -416,6 +494,12 @@ impl BaseClient {
}
}
+ /// Executes a request, applying redirect policy.
+ pub async fn execute(&self, req: Request) -> reqwest_middleware::Result {
+ let client = self.for_host(&DisplaySafeUrl::from(req.url().clone()));
+ client.execute(req).await
+ }
+
/// Returns `true` if the host is trusted to use the insecure client.
pub fn disable_ssl(&self, url: &DisplaySafeUrl) -> bool {
self.allow_insecure_host
@@ -439,6 +523,316 @@ impl BaseClient {
}
}
+/// Wrapper around [`ClientWithMiddleware`] that manages redirects.
+#[derive(Debug, Clone)]
+pub struct RedirectClientWithMiddleware {
+ client: ClientWithMiddleware,
+ redirect_policy: RedirectPolicy,
+ /// Whether credentials should be preserved during cross-origin redirects.
+ ///
+ /// WARNING: This should only be available for tests. In production code, preserving credentials
+ /// during cross-origin redirects can lead to security vulnerabilities including credential
+ /// leakage to untrusted domains.
+ cross_origin_credentials_policy: CrossOriginCredentialsPolicy,
+}
+
+impl RedirectClientWithMiddleware {
+ /// Convenience method to make a `GET` request to a URL.
+ pub fn get(&self, url: U) -> RequestBuilder {
+ RequestBuilder::new(self.client.get(url), self)
+ }
+
+ /// Convenience method to make a `POST` request to a URL.
+ pub fn post(&self, url: U) -> RequestBuilder {
+ RequestBuilder::new(self.client.post(url), self)
+ }
+
+ /// Convenience method to make a `HEAD` request to a URL.
+ pub fn head(&self, url: U) -> RequestBuilder {
+ RequestBuilder::new(self.client.head(url), self)
+ }
+
+ /// Executes a request, applying the redirect policy.
+ pub async fn execute(&self, req: Request) -> reqwest_middleware::Result {
+ match self.redirect_policy {
+ RedirectPolicy::BypassMiddleware => self.client.execute(req).await,
+ RedirectPolicy::RetriggerMiddleware => self.execute_with_redirect_handling(req).await,
+ }
+ }
+
+ /// Executes a request. If the response is a redirect (one of HTTP 301, 302, 303, 307, or 308), the
+ /// request is executed again with the redirect location URL (up to a maximum number of
+ /// redirects).
+ ///
+ /// Unlike the built-in reqwest redirect policies, this sends the redirect request through the
+ /// entire middleware pipeline again.
+ ///
+ /// See RFC 7231 7.1.2 for details on
+ /// redirect semantics.
+ async fn execute_with_redirect_handling(
+ &self,
+ req: Request,
+ ) -> reqwest_middleware::Result {
+ let mut request = req;
+ let mut redirects = 0;
+ let max_redirects = DEFAULT_MAX_REDIRECTS;
+
+ loop {
+ let result = self
+ .client
+ .execute(request.try_clone().expect("HTTP request must be cloneable"))
+ .await;
+ let Ok(response) = result else {
+ return result;
+ };
+
+ if redirects >= max_redirects {
+ return Ok(response);
+ }
+
+ let Some(redirect_request) =
+ request_into_redirect(request, &response, self.cross_origin_credentials_policy)?
+ else {
+ return Ok(response);
+ };
+
+ redirects += 1;
+ request = redirect_request;
+ }
+ }
+
+ pub fn raw_client(&self) -> &ClientWithMiddleware {
+ &self.client
+ }
+}
+
+impl From for ClientWithMiddleware {
+ fn from(item: RedirectClientWithMiddleware) -> ClientWithMiddleware {
+ item.client
+ }
+}
+
+/// Check if this is should be a redirect and, if so, return a new redirect request.
+///
+/// This implementation is based on the [`reqwest`] crate redirect implementation.
+/// It takes ownership of the original [`Request`] and mutates it to create the new
+/// redirect [`Request`].
+fn request_into_redirect(
+ mut req: Request,
+ res: &Response,
+ cross_origin_credentials_policy: CrossOriginCredentialsPolicy,
+) -> reqwest_middleware::Result