mirror of https://github.com/astral-sh/uv
Centralize up-to-date checking for path installations (#2168)
## Summary Internal-only refactor to consolidate multiple codepaths we have for checking whether a cached or installed entry is up-to-date with a local requirement.
This commit is contained in:
parent
ffb69e3f48
commit
6f94dc3c77
|
|
@ -6,6 +6,7 @@ use std::ops::Deref;
|
||||||
use std::path::{Path, PathBuf};
|
use std::path::{Path, PathBuf};
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
|
use distribution_types::InstalledDist;
|
||||||
use fs_err as fs;
|
use fs_err as fs;
|
||||||
use tempfile::{tempdir, TempDir};
|
use tempfile::{tempdir, TempDir};
|
||||||
|
|
||||||
|
|
@ -692,6 +693,36 @@ impl ArchiveTimestamp {
|
||||||
Self::Approximate(timestamp) => *timestamp,
|
Self::Approximate(timestamp) => *timestamp,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Returns `true` if the `target` (an installed or cached distribution) is up-to-date with the
|
||||||
|
/// source archive (`source`).
|
||||||
|
///
|
||||||
|
/// The `target` should be an installed package in a virtual environment, or an unzipped
|
||||||
|
/// package in the cache.
|
||||||
|
///
|
||||||
|
/// The `source` is a source archive, i.e., a path to a built wheel or a Python package directory.
|
||||||
|
pub fn up_to_date_with(source: &Path, target: ArchiveTarget) -> Result<bool, io::Error> {
|
||||||
|
let Some(modified_at) = Self::from_path(source)? else {
|
||||||
|
// If there's no entrypoint, we can't determine the modification time, so we assume that the
|
||||||
|
// target is not up-to-date.
|
||||||
|
return Ok(false);
|
||||||
|
};
|
||||||
|
let created_at = match target {
|
||||||
|
ArchiveTarget::Install(installed) => {
|
||||||
|
Timestamp::from_path(installed.path().join("METADATA"))?
|
||||||
|
}
|
||||||
|
ArchiveTarget::Cache(cache) => Timestamp::from_path(cache)?,
|
||||||
|
};
|
||||||
|
Ok(modified_at.timestamp() <= created_at)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Copy)]
|
||||||
|
pub enum ArchiveTarget<'a> {
|
||||||
|
/// The target is an installed package in a virtual environment.
|
||||||
|
Install(&'a InstalledDist),
|
||||||
|
/// The target is an unzipped package in the cache.
|
||||||
|
Cache(&'a Path),
|
||||||
}
|
}
|
||||||
|
|
||||||
impl PartialOrd for ArchiveTimestamp {
|
impl PartialOrd for ArchiveTimestamp {
|
||||||
|
|
|
||||||
|
|
@ -13,9 +13,9 @@ use distribution_types::{
|
||||||
};
|
};
|
||||||
use platform_tags::Tags;
|
use platform_tags::Tags;
|
||||||
use pypi_types::Metadata21;
|
use pypi_types::Metadata21;
|
||||||
use uv_cache::{Cache, CacheBucket, Timestamp, WheelCache};
|
use uv_cache::{ArchiveTarget, ArchiveTimestamp, Cache, CacheBucket, WheelCache};
|
||||||
use uv_client::{CacheControl, CachedClientError, Connectivity, RegistryClient};
|
use uv_client::{CacheControl, CachedClientError, Connectivity, RegistryClient};
|
||||||
use uv_fs::metadata_if_exists;
|
|
||||||
use uv_git::GitSource;
|
use uv_git::GitSource;
|
||||||
use uv_traits::{BuildContext, NoBinary, NoBuild};
|
use uv_traits::{BuildContext, NoBinary, NoBuild};
|
||||||
|
|
||||||
|
|
@ -123,13 +123,12 @@ impl<'a, Context: BuildContext + Send + Sync> DistributionDatabase<'a, Context>
|
||||||
// return it.
|
// return it.
|
||||||
match cache_entry.path().canonicalize() {
|
match cache_entry.path().canonicalize() {
|
||||||
Ok(archive) => {
|
Ok(archive) => {
|
||||||
if let (Some(cache_metadata), Some(path_metadata)) = (
|
if ArchiveTimestamp::up_to_date_with(
|
||||||
metadata_if_exists(&archive).map_err(Error::CacheRead)?,
|
path,
|
||||||
metadata_if_exists(path).map_err(Error::CacheRead)?,
|
ArchiveTarget::Cache(&archive),
|
||||||
) {
|
)
|
||||||
let cache_modified = Timestamp::from_metadata(&cache_metadata);
|
.map_err(Error::CacheRead)?
|
||||||
let path_modified = Timestamp::from_metadata(&path_metadata);
|
{
|
||||||
if cache_modified >= path_modified {
|
|
||||||
return Ok(LocalWheel::Unzipped(UnzippedWheel {
|
return Ok(LocalWheel::Unzipped(UnzippedWheel {
|
||||||
dist: dist.clone(),
|
dist: dist.clone(),
|
||||||
archive,
|
archive,
|
||||||
|
|
@ -137,7 +136,6 @@ impl<'a, Context: BuildContext + Send + Sync> DistributionDatabase<'a, Context>
|
||||||
}));
|
}));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
Err(err) if err.kind() == io::ErrorKind::NotFound => {}
|
Err(err) if err.kind() == io::ErrorKind::NotFound => {}
|
||||||
Err(err) => return Err(Error::CacheRead(err)),
|
Err(err) => return Err(Error::CacheRead(err)),
|
||||||
}
|
}
|
||||||
|
|
@ -290,13 +288,12 @@ impl<'a, Context: BuildContext + Send + Sync> DistributionDatabase<'a, Context>
|
||||||
// return it.
|
// return it.
|
||||||
match cache_entry.path().canonicalize() {
|
match cache_entry.path().canonicalize() {
|
||||||
Ok(archive) => {
|
Ok(archive) => {
|
||||||
if let (Some(cache_metadata), Some(path_metadata)) = (
|
if ArchiveTimestamp::up_to_date_with(
|
||||||
metadata_if_exists(&archive).map_err(Error::CacheRead)?,
|
&wheel.path,
|
||||||
metadata_if_exists(&wheel.path).map_err(Error::CacheRead)?,
|
ArchiveTarget::Cache(&archive),
|
||||||
) {
|
)
|
||||||
let cache_modified = Timestamp::from_metadata(&cache_metadata);
|
.map_err(Error::CacheRead)?
|
||||||
let path_modified = Timestamp::from_metadata(&path_metadata);
|
{
|
||||||
if cache_modified >= path_modified {
|
|
||||||
return Ok(LocalWheel::Unzipped(UnzippedWheel {
|
return Ok(LocalWheel::Unzipped(UnzippedWheel {
|
||||||
dist: dist.clone(),
|
dist: dist.clone(),
|
||||||
archive,
|
archive,
|
||||||
|
|
@ -304,7 +301,6 @@ impl<'a, Context: BuildContext + Send + Sync> DistributionDatabase<'a, Context>
|
||||||
}));
|
}));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
Err(err) if err.kind() == io::ErrorKind::NotFound => {}
|
Err(err) if err.kind() == io::ErrorKind::NotFound => {}
|
||||||
Err(err) => return Err(Error::CacheRead(err)),
|
Err(err) => return Err(Error::CacheRead(err)),
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -48,7 +48,8 @@ impl BuiltWheelIndex {
|
||||||
);
|
);
|
||||||
|
|
||||||
// Determine the last-modified time of the source distribution.
|
// Determine the last-modified time of the source distribution.
|
||||||
let Some(modified) = ArchiveTimestamp::from_path(&source_dist.path).expect("archived")
|
let Some(modified) =
|
||||||
|
ArchiveTimestamp::from_path(&source_dist.path).map_err(Error::CacheRead)?
|
||||||
else {
|
else {
|
||||||
return Err(Error::DirWithoutEntrypoint);
|
return Err(Error::DirWithoutEntrypoint);
|
||||||
};
|
};
|
||||||
|
|
|
||||||
|
|
@ -6,7 +6,7 @@ use distribution_types::{
|
||||||
};
|
};
|
||||||
use pypi_types::Metadata21;
|
use pypi_types::Metadata21;
|
||||||
use requirements_txt::EditableRequirement;
|
use requirements_txt::EditableRequirement;
|
||||||
use uv_cache::ArchiveTimestamp;
|
|
||||||
use uv_normalize::PackageName;
|
use uv_normalize::PackageName;
|
||||||
|
|
||||||
/// An editable distribution that has been built.
|
/// An editable distribution that has been built.
|
||||||
|
|
@ -69,18 +69,6 @@ impl std::fmt::Display for ResolvedEditable {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns `true` if the installed distribution is up-to-date with the [`EditableRequirement`].
|
|
||||||
pub fn not_modified(editable: &EditableRequirement, installed: &InstalledDist) -> bool {
|
|
||||||
let Ok(Some(installed_at)) = ArchiveTimestamp::from_path(installed.path().join("METADATA"))
|
|
||||||
else {
|
|
||||||
return false;
|
|
||||||
};
|
|
||||||
let Ok(Some(modified_at)) = ArchiveTimestamp::from_path(&editable.path) else {
|
|
||||||
return false;
|
|
||||||
};
|
|
||||||
installed_at > modified_at
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns `true` if the [`EditableRequirement`] contains dynamic metadata.
|
/// Returns `true` if the [`EditableRequirement`] contains dynamic metadata.
|
||||||
pub fn is_dynamic(editable: &EditableRequirement) -> bool {
|
pub fn is_dynamic(editable: &EditableRequirement) -> bool {
|
||||||
// If there's no `pyproject.toml`, we assume it's dynamic.
|
// If there's no `pyproject.toml`, we assume it's dynamic.
|
||||||
|
|
|
||||||
|
|
@ -1,5 +1,5 @@
|
||||||
pub use downloader::{Downloader, Reporter as DownloadReporter};
|
pub use downloader::{Downloader, Reporter as DownloadReporter};
|
||||||
pub use editable::{is_dynamic, not_modified, BuiltEditable, ResolvedEditable};
|
pub use editable::{is_dynamic, BuiltEditable, ResolvedEditable};
|
||||||
pub use installer::{Installer, Reporter as InstallReporter};
|
pub use installer::{Installer, Reporter as InstallReporter};
|
||||||
pub use plan::{Plan, Planner, Reinstall};
|
pub use plan::{Plan, Planner, Reinstall};
|
||||||
pub use site_packages::SitePackages;
|
pub use site_packages::SitePackages;
|
||||||
|
|
|
||||||
|
|
@ -1,19 +1,18 @@
|
||||||
use std::collections::hash_map::Entry;
|
use std::collections::hash_map::Entry;
|
||||||
use std::hash::BuildHasherDefault;
|
use std::hash::BuildHasherDefault;
|
||||||
use std::io;
|
use std::io;
|
||||||
use std::path::Path;
|
|
||||||
|
|
||||||
use anyhow::{bail, Result};
|
use anyhow::{bail, Result};
|
||||||
use rustc_hash::FxHashMap;
|
use rustc_hash::FxHashMap;
|
||||||
use tracing::{debug, warn};
|
use tracing::{debug, warn};
|
||||||
|
|
||||||
use distribution_types::{
|
use distribution_types::{
|
||||||
BuiltDist, CachedDirectUrlDist, CachedDist, Dist, IndexLocations, InstalledDirectUrlDist,
|
BuiltDist, CachedDirectUrlDist, CachedDist, Dist, IndexLocations, InstalledDist,
|
||||||
InstalledDist, InstalledMetadata, InstalledVersion, Name, SourceDist,
|
InstalledMetadata, InstalledVersion, Name, SourceDist,
|
||||||
};
|
};
|
||||||
use pep508_rs::{Requirement, VersionOrUrl};
|
use pep508_rs::{Requirement, VersionOrUrl};
|
||||||
use platform_tags::Tags;
|
use platform_tags::Tags;
|
||||||
use uv_cache::{ArchiveTimestamp, Cache, CacheBucket, CacheEntry, Timestamp, WheelCache};
|
use uv_cache::{ArchiveTarget, ArchiveTimestamp, Cache, CacheBucket, WheelCache};
|
||||||
use uv_distribution::{BuiltWheelIndex, RegistryWheelIndex};
|
use uv_distribution::{BuiltWheelIndex, RegistryWheelIndex};
|
||||||
use uv_fs::Simplified;
|
use uv_fs::Simplified;
|
||||||
use uv_interpreter::PythonEnvironment;
|
use uv_interpreter::PythonEnvironment;
|
||||||
|
|
@ -186,17 +185,20 @@ impl<'a> Planner<'a> {
|
||||||
|
|
||||||
// If the requirement comes from a direct URL, check by URL.
|
// If the requirement comes from a direct URL, check by URL.
|
||||||
Some(VersionOrUrl::Url(url)) => {
|
Some(VersionOrUrl::Url(url)) => {
|
||||||
if let InstalledDist::Url(distribution) = &distribution {
|
if let InstalledDist::Url(installed) = &distribution {
|
||||||
if &distribution.url == url.raw() {
|
if &installed.url == url.raw() {
|
||||||
// If the requirement came from a local path, check freshness.
|
// If the requirement came from a local path, check freshness.
|
||||||
if let Ok(archive) = url.to_file_path() {
|
if let Ok(archive) = url.to_file_path() {
|
||||||
if not_modified_install(distribution, &archive)? {
|
if ArchiveTimestamp::up_to_date_with(
|
||||||
debug!("Requirement already satisfied (and up-to-date): {distribution}");
|
&archive,
|
||||||
|
ArchiveTarget::Install(distribution),
|
||||||
|
)? {
|
||||||
|
debug!("Requirement already satisfied (and up-to-date): {installed}");
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
// Otherwise, assume the requirement is up-to-date.
|
// Otherwise, assume the requirement is up-to-date.
|
||||||
debug!("Requirement already satisfied (assumed up-to-date): {distribution}");
|
debug!("Requirement already satisfied (assumed up-to-date): {installed}");
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -320,9 +322,12 @@ impl<'a> Planner<'a> {
|
||||||
)
|
)
|
||||||
.entry(wheel.filename.stem());
|
.entry(wheel.filename.stem());
|
||||||
|
|
||||||
if not_modified_cache(&cache_entry, &wheel.path)? {
|
|
||||||
match cache_entry.path().canonicalize() {
|
match cache_entry.path().canonicalize() {
|
||||||
Ok(archive) => {
|
Ok(archive) => {
|
||||||
|
if ArchiveTimestamp::up_to_date_with(
|
||||||
|
&wheel.path,
|
||||||
|
ArchiveTarget::Cache(&archive),
|
||||||
|
)? {
|
||||||
let cached_dist = CachedDirectUrlDist::from_url(
|
let cached_dist = CachedDirectUrlDist::from_url(
|
||||||
wheel.filename,
|
wheel.filename,
|
||||||
wheel.url,
|
wheel.url,
|
||||||
|
|
@ -335,13 +340,13 @@ impl<'a> Planner<'a> {
|
||||||
local.push(CachedDist::Url(cached_dist));
|
local.push(CachedDist::Url(cached_dist));
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
}
|
||||||
Err(err) if err.kind() == io::ErrorKind::NotFound => {
|
Err(err) if err.kind() == io::ErrorKind::NotFound => {
|
||||||
// The cache entry doesn't exist, so it's not fresh.
|
// The cache entry doesn't exist, so it's not fresh.
|
||||||
}
|
}
|
||||||
Err(err) => return Err(err.into()),
|
Err(err) => return Err(err.into()),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
Dist::Source(SourceDist::DirectUrl(sdist)) => {
|
Dist::Source(SourceDist::DirectUrl(sdist)) => {
|
||||||
// Find the most-compatible wheel from the cache, since we don't know
|
// Find the most-compatible wheel from the cache, since we don't know
|
||||||
// the filename in advance.
|
// the filename in advance.
|
||||||
|
|
@ -418,44 +423,6 @@ enum Specifier<'a> {
|
||||||
NonEditable(Option<&'a VersionOrUrl>),
|
NonEditable(Option<&'a VersionOrUrl>),
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns `true` if the cache entry linked to the file at the given [`Path`] is not-modified.
|
|
||||||
///
|
|
||||||
/// A cache entry is not modified if it exists and is newer than the file at the given path.
|
|
||||||
fn not_modified_cache(cache_entry: &CacheEntry, artifact: &Path) -> Result<bool, io::Error> {
|
|
||||||
match fs_err::metadata(cache_entry.path()).map(|metadata| Timestamp::from_metadata(&metadata)) {
|
|
||||||
Ok(cache_timestamp) => {
|
|
||||||
// Determine the modification time of the wheel.
|
|
||||||
if let Some(artifact_timestamp) = ArchiveTimestamp::from_path(artifact)? {
|
|
||||||
Ok(cache_timestamp >= artifact_timestamp.timestamp())
|
|
||||||
} else {
|
|
||||||
// The artifact doesn't exist, so it's not fresh.
|
|
||||||
Ok(false)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Err(err) if err.kind() == io::ErrorKind::NotFound => {
|
|
||||||
// The cache entry doesn't exist, so it's not fresh.
|
|
||||||
Ok(false)
|
|
||||||
}
|
|
||||||
Err(err) => Err(err),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns `true` if the installed distribution linked to the file at the given [`Path`] is
|
|
||||||
/// not-modified based on the modification time of the installed distribution.
|
|
||||||
fn not_modified_install(dist: &InstalledDirectUrlDist, artifact: &Path) -> Result<bool, io::Error> {
|
|
||||||
// Determine the modification time of the installed distribution.
|
|
||||||
let dist_metadata = fs_err::metadata(dist.path.join("METADATA"))?;
|
|
||||||
let dist_timestamp = Timestamp::from_metadata(&dist_metadata);
|
|
||||||
|
|
||||||
// Determine the modification time of the wheel.
|
|
||||||
let Some(artifact_timestamp) = ArchiveTimestamp::from_path(artifact)? else {
|
|
||||||
// The artifact doesn't exist, so it's not fresh.
|
|
||||||
return Ok(false);
|
|
||||||
};
|
|
||||||
|
|
||||||
Ok(dist_timestamp >= artifact_timestamp.timestamp())
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Default)]
|
#[derive(Debug, Default)]
|
||||||
pub struct Plan {
|
pub struct Plan {
|
||||||
/// The distributions that are not already installed in the current environment, but are
|
/// The distributions that are not already installed in the current environment, but are
|
||||||
|
|
|
||||||
|
|
@ -11,10 +11,11 @@ use distribution_types::{InstalledDist, InstalledMetadata, InstalledVersion, Nam
|
||||||
use pep440_rs::{Version, VersionSpecifiers};
|
use pep440_rs::{Version, VersionSpecifiers};
|
||||||
use pep508_rs::{Requirement, VerbatimUrl};
|
use pep508_rs::{Requirement, VerbatimUrl};
|
||||||
use requirements_txt::EditableRequirement;
|
use requirements_txt::EditableRequirement;
|
||||||
|
use uv_cache::{ArchiveTarget, ArchiveTimestamp};
|
||||||
use uv_interpreter::PythonEnvironment;
|
use uv_interpreter::PythonEnvironment;
|
||||||
use uv_normalize::PackageName;
|
use uv_normalize::PackageName;
|
||||||
|
|
||||||
use crate::{is_dynamic, not_modified};
|
use crate::is_dynamic;
|
||||||
|
|
||||||
/// An index over the packages installed in an environment.
|
/// An index over the packages installed in an environment.
|
||||||
///
|
///
|
||||||
|
|
@ -276,7 +277,10 @@ impl<'a> SitePackages<'a> {
|
||||||
}
|
}
|
||||||
[distribution] => {
|
[distribution] => {
|
||||||
// Is the editable out-of-date?
|
// Is the editable out-of-date?
|
||||||
if !not_modified(requirement, distribution) {
|
if !ArchiveTimestamp::up_to_date_with(
|
||||||
|
&requirement.path,
|
||||||
|
ArchiveTarget::Install(distribution),
|
||||||
|
)? {
|
||||||
return Ok(false);
|
return Ok(false);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -11,13 +11,12 @@ use platform_host::Platform;
|
||||||
use platform_tags::Tags;
|
use platform_tags::Tags;
|
||||||
use pypi_types::Yanked;
|
use pypi_types::Yanked;
|
||||||
use requirements_txt::EditableRequirement;
|
use requirements_txt::EditableRequirement;
|
||||||
use uv_cache::Cache;
|
use uv_cache::{ArchiveTarget, ArchiveTimestamp, Cache};
|
||||||
use uv_client::{Connectivity, FlatIndex, FlatIndexClient, RegistryClient, RegistryClientBuilder};
|
use uv_client::{Connectivity, FlatIndex, FlatIndexClient, RegistryClient, RegistryClientBuilder};
|
||||||
use uv_dispatch::BuildDispatch;
|
use uv_dispatch::BuildDispatch;
|
||||||
use uv_fs::Simplified;
|
use uv_fs::Simplified;
|
||||||
use uv_installer::{
|
use uv_installer::{
|
||||||
is_dynamic, not_modified, Downloader, NoBinary, Plan, Planner, Reinstall, ResolvedEditable,
|
is_dynamic, Downloader, NoBinary, Plan, Planner, Reinstall, ResolvedEditable, SitePackages,
|
||||||
SitePackages,
|
|
||||||
};
|
};
|
||||||
use uv_interpreter::PythonEnvironment;
|
use uv_interpreter::PythonEnvironment;
|
||||||
use uv_resolver::InMemoryIndex;
|
use uv_resolver::InMemoryIndex;
|
||||||
|
|
@ -426,7 +425,11 @@ async fn resolve_editables(
|
||||||
match existing.as_slice() {
|
match existing.as_slice() {
|
||||||
[] => uninstalled.push(editable),
|
[] => uninstalled.push(editable),
|
||||||
[dist] => {
|
[dist] => {
|
||||||
if not_modified(&editable, dist) && !is_dynamic(&editable) {
|
if ArchiveTimestamp::up_to_date_with(
|
||||||
|
&editable.path,
|
||||||
|
ArchiveTarget::Install(dist),
|
||||||
|
)? && !is_dynamic(&editable)
|
||||||
|
{
|
||||||
installed.push((*dist).clone());
|
installed.push((*dist).clone());
|
||||||
} else {
|
} else {
|
||||||
uninstalled.push(editable);
|
uninstalled.push(editable);
|
||||||
|
|
@ -447,7 +450,11 @@ async fn resolve_editables(
|
||||||
[dist] => {
|
[dist] => {
|
||||||
if packages.contains(dist.name()) {
|
if packages.contains(dist.name()) {
|
||||||
uninstalled.push(editable);
|
uninstalled.push(editable);
|
||||||
} else if not_modified(&editable, dist) && !is_dynamic(&editable) {
|
} else if ArchiveTimestamp::up_to_date_with(
|
||||||
|
&editable.path,
|
||||||
|
ArchiveTarget::Install(dist),
|
||||||
|
)? && !is_dynamic(&editable)
|
||||||
|
{
|
||||||
installed.push((*dist).clone());
|
installed.push((*dist).clone());
|
||||||
} else {
|
} else {
|
||||||
uninstalled.push(editable);
|
uninstalled.push(editable);
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue