mirror of https://github.com/astral-sh/uv
Skip metadata fetch for `--no-deps` and `pip sync` (#7127)
## Summary I think a better tradeoff here is to skip fetching metadata, even though we can't validate the extras. It will help with situations like https://github.com/astral-sh/uv/issues/5073#issuecomment-2334235588 in which, otherwise, we have to download the wheels twice.
This commit is contained in:
parent
e96eb946f9
commit
7d49fbc753
|
|
@ -1459,6 +1459,8 @@ impl Package {
|
|||
} else {
|
||||
annotated_dist
|
||||
.metadata
|
||||
.as_ref()
|
||||
.expect("metadata is present")
|
||||
.requires_dist
|
||||
.iter()
|
||||
.cloned()
|
||||
|
|
@ -1471,6 +1473,8 @@ impl Package {
|
|||
} else {
|
||||
annotated_dist
|
||||
.metadata
|
||||
.as_ref()
|
||||
.expect("metadata is present")
|
||||
.dev_dependencies
|
||||
.iter()
|
||||
.map(|(group, requirements)| {
|
||||
|
|
@ -2168,8 +2172,8 @@ impl PackageId {
|
|||
annotated_dist: &AnnotatedDist,
|
||||
root: &Path,
|
||||
) -> Result<PackageId, LockError> {
|
||||
let name = annotated_dist.metadata.name.clone();
|
||||
let version = annotated_dist.metadata.version.clone();
|
||||
let name = annotated_dist.name.clone();
|
||||
let version = annotated_dist.version.clone();
|
||||
let source = Source::from_resolved_dist(&annotated_dist.dist, root)?;
|
||||
Ok(Self {
|
||||
name,
|
||||
|
|
|
|||
|
|
@ -300,29 +300,32 @@ impl ResolutionGraph {
|
|||
git,
|
||||
)?;
|
||||
|
||||
// Validate the extra.
|
||||
if let Some(extra) = extra {
|
||||
if !metadata.provides_extras.contains(extra) {
|
||||
diagnostics.push(ResolutionDiagnostic::MissingExtra {
|
||||
dist: dist.clone(),
|
||||
extra: extra.clone(),
|
||||
});
|
||||
if let Some(metadata) = metadata.as_ref() {
|
||||
// Validate the extra.
|
||||
if let Some(extra) = extra {
|
||||
if !metadata.provides_extras.contains(extra) {
|
||||
diagnostics.push(ResolutionDiagnostic::MissingExtra {
|
||||
dist: dist.clone(),
|
||||
extra: extra.clone(),
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Validate the development dependency group.
|
||||
if let Some(dev) = dev {
|
||||
if !metadata.dev_dependencies.contains_key(dev) {
|
||||
diagnostics.push(ResolutionDiagnostic::MissingDev {
|
||||
dist: dist.clone(),
|
||||
dev: dev.clone(),
|
||||
});
|
||||
// Validate the development dependency group.
|
||||
if let Some(dev) = dev {
|
||||
if !metadata.dev_dependencies.contains_key(dev) {
|
||||
diagnostics.push(ResolutionDiagnostic::MissingDev {
|
||||
dist: dist.clone(),
|
||||
dev: dev.clone(),
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Add the distribution to the graph.
|
||||
let index = petgraph.add_node(ResolutionGraphNode::Dist(AnnotatedDist {
|
||||
dist,
|
||||
name: name.clone(),
|
||||
version: version.clone(),
|
||||
extra: extra.clone(),
|
||||
dev: dev.clone(),
|
||||
|
|
@ -351,7 +354,7 @@ impl ResolutionGraph {
|
|||
preferences: &Preferences,
|
||||
index: &InMemoryIndex,
|
||||
git: &GitResolver,
|
||||
) -> Result<(ResolvedDist, Vec<HashDigest>, Metadata), ResolveError> {
|
||||
) -> Result<(ResolvedDist, Vec<HashDigest>, Option<Metadata>), ResolveError> {
|
||||
Ok(if let Some(url) = url {
|
||||
// Create the distribution.
|
||||
let dist = Dist::from_url(name.clone(), url_to_precise(url.clone(), git))?;
|
||||
|
|
@ -365,17 +368,17 @@ impl ResolutionGraph {
|
|||
// Extract the metadata.
|
||||
let metadata = {
|
||||
let response = index.distributions().get(&version_id).unwrap_or_else(|| {
|
||||
panic!("Every package should have metadata: {version_id:?}")
|
||||
panic!("Every URL distribution should have metadata: {version_id:?}")
|
||||
});
|
||||
|
||||
let MetadataResponse::Found(archive) = &*response else {
|
||||
panic!("Every package should have metadata: {version_id:?}")
|
||||
panic!("Every URL distribution should have metadata: {version_id:?}")
|
||||
};
|
||||
|
||||
archive.metadata.clone()
|
||||
};
|
||||
|
||||
(dist.into(), hashes, metadata)
|
||||
(dist.into(), hashes, Some(metadata))
|
||||
} else {
|
||||
let dist = pins
|
||||
.get(name, version)
|
||||
|
|
@ -406,15 +409,13 @@ impl ResolutionGraph {
|
|||
|
||||
// Extract the metadata.
|
||||
let metadata = {
|
||||
let response = index.distributions().get(&version_id).unwrap_or_else(|| {
|
||||
panic!("Every package should have metadata: {version_id:?}")
|
||||
});
|
||||
|
||||
let MetadataResponse::Found(archive) = &*response else {
|
||||
panic!("Every package should have metadata: {version_id:?}")
|
||||
};
|
||||
|
||||
archive.metadata.clone()
|
||||
index.distributions().get(&version_id).and_then(|response| {
|
||||
if let MetadataResponse::Found(archive) = &*response {
|
||||
Some(archive.metadata.clone())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
};
|
||||
|
||||
(dist, hashes, metadata)
|
||||
|
|
@ -726,13 +727,17 @@ fn has_lower_bound(
|
|||
return true;
|
||||
}
|
||||
|
||||
let Some(metadata) = neighbor_dist.metadata.as_ref() else {
|
||||
// We can't check for lower bounds if we lack metadata.
|
||||
return true;
|
||||
};
|
||||
|
||||
// Get all individual specifier for the current package and check if any has a lower
|
||||
// bound.
|
||||
for requirement in neighbor_dist
|
||||
.metadata
|
||||
for requirement in metadata
|
||||
.requires_dist
|
||||
.iter()
|
||||
.chain(neighbor_dist.metadata.dev_dependencies.values().flatten())
|
||||
.chain(metadata.dev_dependencies.values().flatten())
|
||||
{
|
||||
if requirement.name != *package_name {
|
||||
continue;
|
||||
|
|
|
|||
|
|
@ -24,11 +24,12 @@ mod requirements_txt;
|
|||
#[derive(Debug, Clone)]
|
||||
pub(crate) struct AnnotatedDist {
|
||||
pub(crate) dist: ResolvedDist,
|
||||
pub(crate) name: PackageName,
|
||||
pub(crate) version: Version,
|
||||
pub(crate) extra: Option<ExtraName>,
|
||||
pub(crate) dev: Option<GroupName>,
|
||||
pub(crate) hashes: Vec<HashDigest>,
|
||||
pub(crate) metadata: Metadata,
|
||||
pub(crate) metadata: Option<Metadata>,
|
||||
}
|
||||
|
||||
impl AnnotatedDist {
|
||||
|
|
|
|||
|
|
@ -331,12 +331,14 @@ impl<InstalledPackages: InstalledPackagesProvider> ResolverState<InstalledPackag
|
|||
}
|
||||
|
||||
// Pre-visit all candidate packages, to allow metadata to be fetched in parallel.
|
||||
Self::pre_visit(
|
||||
state.pubgrub.partial_solution.prioritized_packages(),
|
||||
&self.urls,
|
||||
&state.python_requirement,
|
||||
&request_sink,
|
||||
)?;
|
||||
if self.dependency_mode.is_transitive() {
|
||||
Self::pre_visit(
|
||||
state.pubgrub.partial_solution.prioritized_packages(),
|
||||
&self.urls,
|
||||
&state.python_requirement,
|
||||
&request_sink,
|
||||
)?;
|
||||
}
|
||||
|
||||
// Choose a package version.
|
||||
let Some(highest_priority_pkg) = state
|
||||
|
|
@ -1109,17 +1111,19 @@ impl<InstalledPackages: InstalledPackagesProvider> ResolverState<InstalledPackag
|
|||
|
||||
// Emit a request to fetch the metadata for this version.
|
||||
if matches!(&**package, PubGrubPackageInner::Package { .. }) {
|
||||
if self.index.distributions().register(candidate.version_id()) {
|
||||
// Verify that the package is allowed under the hash-checking policy.
|
||||
if !self
|
||||
.hasher
|
||||
.allows_package(candidate.name(), candidate.version())
|
||||
{
|
||||
return Err(ResolveError::UnhashedPackage(candidate.name().clone()));
|
||||
}
|
||||
if self.dependency_mode.is_transitive() {
|
||||
if self.index.distributions().register(candidate.version_id()) {
|
||||
// Verify that the package is allowed under the hash-checking policy.
|
||||
if !self
|
||||
.hasher
|
||||
.allows_package(candidate.name(), candidate.version())
|
||||
{
|
||||
return Err(ResolveError::UnhashedPackage(candidate.name().clone()));
|
||||
}
|
||||
|
||||
let request = Request::from(dist.for_resolution());
|
||||
request_sink.blocking_send(request)?;
|
||||
let request = Request::from(dist.for_resolution());
|
||||
request_sink.blocking_send(request)?;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -1186,6 +1190,11 @@ impl<InstalledPackages: InstalledPackagesProvider> ResolverState<InstalledPackag
|
|||
dev,
|
||||
marker,
|
||||
} => {
|
||||
// If we're excluding transitive dependencies, short-circuit.
|
||||
if self.dependency_mode.is_direct() {
|
||||
return Ok(Dependencies::Unforkable(Vec::default()));
|
||||
}
|
||||
|
||||
// Determine the distribution to lookup.
|
||||
let dist = match url {
|
||||
Some(url) => PubGrubDistribution::from_url(name, url),
|
||||
|
|
@ -1273,12 +1282,6 @@ impl<InstalledPackages: InstalledPackagesProvider> ResolverState<InstalledPackag
|
|||
}
|
||||
};
|
||||
|
||||
// If we're excluding transitive dependencies, short-circuit. (It's important that
|
||||
// we fetched the metadata, though, since we need it to validate extras.)
|
||||
if self.dependency_mode.is_direct() {
|
||||
return Ok(Dependencies::Unforkable(Vec::default()));
|
||||
}
|
||||
|
||||
let requirements = self.flatten_requirements(
|
||||
&metadata.requires_dist,
|
||||
&metadata.dev_dependencies,
|
||||
|
|
|
|||
|
|
@ -85,7 +85,7 @@ fn clean_package_pypi() -> Result<()> {
|
|||
----- stderr -----
|
||||
DEBUG uv [VERSION] ([COMMIT] DATE)
|
||||
DEBUG Removing dangling cache entry: [CACHE_DIR]/archive-v0/[ENTRY]
|
||||
Removed 13 files for iniconfig ([SIZE])
|
||||
Removed 12 files for iniconfig ([SIZE])
|
||||
"###);
|
||||
|
||||
// Assert that the `.rkyv` file is removed for `iniconfig`.
|
||||
|
|
@ -158,7 +158,7 @@ fn clean_package_index() -> Result<()> {
|
|||
----- stderr -----
|
||||
DEBUG uv [VERSION] ([COMMIT] DATE)
|
||||
DEBUG Removing dangling cache entry: [CACHE_DIR]/archive-v0/[ENTRY]
|
||||
Removed 13 files for iniconfig ([SIZE])
|
||||
Removed 12 files for iniconfig ([SIZE])
|
||||
"###);
|
||||
|
||||
// Assert that the `.rkyv` file is removed for `iniconfig`.
|
||||
|
|
|
|||
|
|
@ -204,7 +204,7 @@ fn prune_unzipped() -> Result<()> {
|
|||
|
||||
----- stderr -----
|
||||
Pruning cache at: [CACHE_DIR]/
|
||||
Removed 163 files ([SIZE])
|
||||
Removed 162 files ([SIZE])
|
||||
"###);
|
||||
|
||||
// Reinstalling the source distribution should not require re-downloading the source
|
||||
|
|
@ -226,28 +226,20 @@ fn prune_unzipped() -> Result<()> {
|
|||
~ source-distribution==0.0.1
|
||||
"###);
|
||||
|
||||
// But reinstalling the other package should require a download, since we pruned the wheel.
|
||||
requirements_txt.write_str(indoc! { r"
|
||||
iniconfig
|
||||
" })?;
|
||||
uv_snapshot!(context.filters(), context.pip_sync().env_remove("UV_EXCLUDE_NEWER").arg("requirements.txt").arg("--reinstall").arg("--offline"), @r###"
|
||||
success: false
|
||||
exit_code: 1
|
||||
exit_code: 2
|
||||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
× No solution found when resolving dependencies:
|
||||
╰─▶ Because only the following versions of iniconfig are available:
|
||||
iniconfig<=0.1
|
||||
iniconfig>=1.0.0
|
||||
and all of:
|
||||
iniconfig<=0.1
|
||||
iniconfig>=1.0.0
|
||||
need to be downloaded from a registry, we can conclude that iniconfig<1.0.0 cannot be used.
|
||||
And because you require iniconfig, we can conclude that your requirements are unsatisfiable.
|
||||
|
||||
hint: Pre-releases are available for iniconfig in the requested range (e.g., 0.2.dev0), but pre-releases weren't enabled (try: `--prerelease=allow`)
|
||||
|
||||
hint: Packages were unavailable because the network was disabled. When the network is disabled, registry packages may only be read from the cache.
|
||||
Resolved 1 package in [TIME]
|
||||
error: Failed to prepare distributions
|
||||
Caused by: Failed to fetch wheel: iniconfig==2.0.0
|
||||
Caused by: Network connectivity is disabled, but the requested data wasn't found in the cache for: `https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl`
|
||||
"###);
|
||||
|
||||
Ok(())
|
||||
|
|
|
|||
|
|
@ -6151,7 +6151,7 @@ fn no_deps_valid_extra() -> Result<()> {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
/// Resolve a package with `--no-deps`, including an invalid extra.
|
||||
/// Resolve a package with `--no-deps`, including an invalid extra. We don't warn here.
|
||||
#[test]
|
||||
fn no_deps_invalid_extra() -> Result<()> {
|
||||
let context = TestContext::new("3.12");
|
||||
|
|
@ -6171,7 +6171,6 @@ fn no_deps_invalid_extra() -> Result<()> {
|
|||
|
||||
----- stderr -----
|
||||
Resolved 1 package in [TIME]
|
||||
warning: The package `flask==3.0.2` does not have an extra named `empty`
|
||||
"###
|
||||
);
|
||||
|
||||
|
|
|
|||
|
|
@ -4783,7 +4783,9 @@ fn require_hashes_find_links_invalid_hash() -> Result<()> {
|
|||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
error: Failed to download and build `example-a-961b4c22==1.0.0`
|
||||
Resolved 1 package in [TIME]
|
||||
error: Failed to prepare distributions
|
||||
Caused by: Failed to fetch wheel: example-a-961b4c22==1.0.0
|
||||
Caused by: Hash mismatch for `example-a-961b4c22==1.0.0`
|
||||
|
||||
Expected:
|
||||
|
|
@ -4991,7 +4993,9 @@ fn require_hashes_registry_invalid_hash() -> Result<()> {
|
|||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
error: Failed to download and build `example-a-961b4c22==1.0.0`
|
||||
Resolved 1 package in [TIME]
|
||||
error: Failed to prepare distributions
|
||||
Caused by: Failed to fetch wheel: example-a-961b4c22==1.0.0
|
||||
Caused by: Hash mismatch for `example-a-961b4c22==1.0.0`
|
||||
|
||||
Expected:
|
||||
|
|
@ -5472,7 +5476,9 @@ fn incompatible_build_constraint() -> Result<()> {
|
|||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
error: Failed to download and build `requests==1.2.0`
|
||||
Resolved 1 package in [TIME]
|
||||
error: Failed to prepare distributions
|
||||
Caused by: Failed to fetch wheel: requests==1.2.0
|
||||
Caused by: Failed to install requirements from `setup.py` build (resolve)
|
||||
Caused by: No solution found when resolving: setuptools>=40.8.0
|
||||
Caused by: Because you require setuptools>=40.8.0 and setuptools==1, we can conclude that your requirements are unsatisfiable.
|
||||
|
|
|
|||
Loading…
Reference in New Issue