Collect path dependency indexes separately

This commit is contained in:
John Mumm 2025-07-24 14:13:00 +02:00
parent 9e5c16d833
commit ec0b75bcfc
No known key found for this signature in database
GPG Key ID: 73D2271AFDC26EA8
4 changed files with 139 additions and 36 deletions

View File

@ -1233,7 +1233,8 @@ impl Lock {
build_constraints: &[Requirement], build_constraints: &[Requirement],
dependency_groups: &BTreeMap<GroupName, Vec<Requirement>>, dependency_groups: &BTreeMap<GroupName, Vec<Requirement>>,
dependency_metadata: &DependencyMetadata, dependency_metadata: &DependencyMetadata,
indexes: Option<Cow<'_, IndexLocations>>, indexes: Option<&IndexLocations>,
path_dependency_indexes: &BTreeSet<UrlString>,
tags: &Tags, tags: &Tags,
hasher: &HashStrategy, hasher: &HashStrategy,
index: &InMemoryIndex, index: &InMemoryIndex,
@ -1399,7 +1400,7 @@ impl Lock {
} }
// Collect the set of available indexes (both `--index-url` and `--find-links` entries). // Collect the set of available indexes (both `--index-url` and `--find-links` entries).
let remotes = indexes.as_ref().map(|locations| { let remotes = indexes.map(|locations| {
locations locations
.allowed_indexes() .allowed_indexes()
.into_iter() .into_iter()
@ -1412,7 +1413,7 @@ impl Lock {
.collect::<BTreeSet<_>>() .collect::<BTreeSet<_>>()
}); });
let locals = indexes.as_ref().map(|locations| { let locals = indexes.map(|locations| {
locations locations
.allowed_indexes() .allowed_indexes()
.into_iter() .into_iter()
@ -1452,10 +1453,9 @@ impl Lock {
if let Source::Registry(index) = &package.id.source { if let Source::Registry(index) = &package.id.source {
match index { match index {
RegistrySource::Url(url) => { RegistrySource::Url(url) => {
if remotes if remotes.as_ref().is_some_and(|remotes| {
.as_ref() !remotes.contains(url) && !path_dependency_indexes.contains(url)
.is_some_and(|remotes| !remotes.contains(url)) }) {
{
let name = &package.id.name; let name = &package.id.name;
let version = &package let version = &package
.id .id

View File

@ -1,6 +1,7 @@
//! Resolve the current [`ProjectWorkspace`] or [`Workspace`]. //! Resolve the current [`ProjectWorkspace`] or [`Workspace`].
use std::collections::{BTreeMap, BTreeSet}; use std::borrow::Cow;
use std::collections::{BTreeMap, BTreeSet, VecDeque};
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};
use std::sync::{Arc, Mutex}; use std::sync::{Arc, Mutex};
@ -939,10 +940,13 @@ impl Workspace {
// We will only add indexes if we have not already seen the URLs. // We will only add indexes if we have not already seen the URLs.
let known_urls: FxHashSet<_> = self.indexes.iter().map(Index::url).collect(); let known_urls: FxHashSet<_> = self.indexes.iter().map(Index::url).collect();
let mut pyprojects = std::collections::VecDeque::new(); let mut pyproject_queue = VecDeque::new();
pyprojects.push_back((self.install_path.clone(), self.pyproject_toml.clone())); for package in self.packages.values() {
pyproject_queue
.push_back((package.root.clone(), Cow::Borrowed(&package.pyproject_toml)));
}
while let Some((base_path, pyproject)) = pyprojects.pop_front() { while let Some((base_path, pyproject)) = pyproject_queue.pop_front() {
if let Some(tool_uv_sources) = pyproject if let Some(tool_uv_sources) = pyproject
.tool .tool
.as_ref() .as_ref()
@ -975,8 +979,8 @@ impl Workspace {
let dep_pyproject_path = canonical_path.join("pyproject.toml"); let dep_pyproject_path = canonical_path.join("pyproject.toml");
match pyproject_toml_from_path(dep_pyproject_path.clone()) { match pyproject_toml_from_path(dep_pyproject_path.clone()) {
Ok(dep_pyproject) => { Ok(pyproject_toml) => {
if let Some(dep_indexes) = dep_pyproject if let Some(dep_indexes) = pyproject_toml
.tool .tool
.as_ref() .as_ref()
.and_then(|tool| tool.uv.as_ref()) .and_then(|tool| tool.uv.as_ref())
@ -990,7 +994,8 @@ impl Workspace {
); );
} }
pyprojects.push_back((canonical_path, dep_pyproject)); pyproject_queue
.push_back((canonical_path, Cow::Owned(pyproject_toml)));
} }
Err(e) => { Err(e) => {
debug!( debug!(

View File

@ -1,6 +1,5 @@
#![allow(clippy::single_match_else)] #![allow(clippy::single_match_else)]
use std::borrow::Cow;
use std::collections::{BTreeMap, BTreeSet}; use std::collections::{BTreeMap, BTreeSet};
use std::fmt::Write; use std::fmt::Write;
use std::path::Path; use std::path::Path;
@ -18,8 +17,8 @@ use uv_configuration::{
use uv_dispatch::BuildDispatch; use uv_dispatch::BuildDispatch;
use uv_distribution::DistributionDatabase; use uv_distribution::DistributionDatabase;
use uv_distribution_types::{ use uv_distribution_types::{
DependencyMetadata, HashGeneration, Index, IndexLocations, NameRequirementSpecification, DependencyMetadata, HashGeneration, Index, IndexLocations, IndexUrl,
Requirement, UnresolvedRequirementSpecification, NameRequirementSpecification, Requirement, UnresolvedRequirementSpecification, UrlString,
}; };
use uv_git::ResolvedRepositoryReference; use uv_git::ResolvedRepositoryReference;
use uv_normalize::{GroupName, PackageName}; use uv_normalize::{GroupName, PackageName};
@ -1067,26 +1066,26 @@ impl ValidatedLock {
// However, if _no_ indexes were provided, we assume that the user wants to reuse the existing // However, if _no_ indexes were provided, we assume that the user wants to reuse the existing
// distributions, even though a failure to reuse the lockfile will result in re-resolving // distributions, even though a failure to reuse the lockfile will result in re-resolving
// against PyPI by default. // against PyPI by default.
let validation_indexes = if index_locations.is_none() { let indexes = if index_locations.is_none() {
None None
} else { } else {
// If indexes were defined as sources in path dependencies, add them to the Some(index_locations)
// index locations to use for validation. };
if let LockTarget::Workspace(workspace) = target {
let path_dependency_source_indexes = // Collect indexes specified in path dependencies
workspace.collect_path_dependency_source_indexes(); let path_dependency_indexes = if let LockTarget::Workspace(workspace) = target {
if path_dependency_source_indexes.is_empty() { workspace
Some(Cow::Borrowed(index_locations)) .collect_path_dependency_source_indexes()
} else { .into_iter()
Some(Cow::Owned(index_locations.clone().combine( .filter_map(|index| match index.url() {
path_dependency_source_indexes, IndexUrl::Pypi(_) | IndexUrl::Url(_) => {
Vec::new(), Some(UrlString::from(index.url().without_credentials().as_ref()))
false, }
))) IndexUrl::Path(_) => None,
} })
} else { .collect::<BTreeSet<_>>()
Some(Cow::Borrowed(index_locations)) } else {
} BTreeSet::default()
}; };
// Determine whether the lockfile satisfies the workspace requirements. // Determine whether the lockfile satisfies the workspace requirements.
@ -1101,7 +1100,8 @@ impl ValidatedLock {
build_constraints, build_constraints,
dependency_groups, dependency_groups,
dependency_metadata, dependency_metadata,
validation_indexes, indexes,
&path_dependency_indexes,
interpreter.tags()?, interpreter.tags()?,
hasher, hasher,
index, index,

View File

@ -27634,6 +27634,104 @@ fn lock_path_dependency_explicit_index() -> Result<()> {
Ok(()) Ok(())
} }
/// Test that lockfile validation includes explicit indexes from path dependencies
/// defined in a non-root workspace member.
#[test]
fn lock_path_dependency_explicit_index_workspace_member() -> Result<()> {
let context = TestContext::new("3.12");
// Create the path dependency with explicit index
let pkg_a = context.temp_dir.child("pkg_a");
fs_err::create_dir_all(&pkg_a)?;
let pyproject_toml = pkg_a.child("pyproject.toml");
pyproject_toml.write_str(
r#"
[project]
name = "pkg-a"
version = "0.1.0"
requires-python = ">=3.12"
dependencies = ["iniconfig"]
[tool.uv.sources]
iniconfig = { index = "inner-index" }
[[tool.uv.index]]
name = "inner-index"
url = "https://pypi-proxy.fly.dev/simple"
explicit = true
"#,
)?;
// Create a project that depends on pkg_a
let member = context.temp_dir.child("member");
fs_err::create_dir_all(&member)?;
let pyproject_toml = member.child("pyproject.toml");
pyproject_toml.write_str(
r#"
[project]
name = "member"
version = "0.1.0"
requires-python = ">=3.12"
dependencies = ["pkg-a"]
[tool.uv.sources]
pkg-a = { path = "../pkg_a/", editable = true }
black = { index = "middle-index" }
[[tool.uv.index]]
name = "middle-index"
url = "https://middle-index.com/simple"
explicit = true
"#,
)?;
// Create a root with workspace member
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"
[project]
name = "root-project"
version = "0.1.0"
requires-python = ">=3.12"
dependencies = ["member"]
[tool.uv.workspace]
members = ["member"]
[tool.uv.sources]
member = { workspace = true }
anyio = { index = "outer-index" }
[[tool.uv.index]]
name = "outer-index"
url = "https://outer-index.com/simple"
explicit = true
"#,
)?;
uv_snapshot!(context.filters(), context.lock(), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 4 packages in [TIME]
");
uv_snapshot!(context.filters(), context.lock().arg("--check"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 4 packages in [TIME]
");
Ok(())
}
/// Test that lockfile validation works correctly when path dependency has /// Test that lockfile validation works correctly when path dependency has
/// both explicit and non-explicit indexes. /// both explicit and non-explicit indexes.
#[test] #[test]