Remove sync zip

This commit is contained in:
Charlie Marsh 2025-11-22 08:37:05 -06:00
parent 3bf79e2ada
commit 0b8c764d4e
19 changed files with 84 additions and 620 deletions

3
Cargo.lock generated
View File

@ -6071,7 +6071,6 @@ dependencies = [
"fs-err", "fs-err",
"futures", "futures",
"md-5", "md-5",
"rayon",
"regex", "regex",
"reqwest", "reqwest",
"rustc-hash", "rustc-hash",
@ -6081,12 +6080,10 @@ dependencies = [
"tokio", "tokio",
"tokio-util", "tokio-util",
"tracing", "tracing",
"uv-configuration",
"uv-distribution-filename", "uv-distribution-filename",
"uv-pypi-types", "uv-pypi-types",
"uv-static", "uv-static",
"xz2", "xz2",
"zip",
"zstd", "zstd",
] ]

View File

@ -5,19 +5,17 @@ use uv_pypi_types::{HashAlgorithm, HashDigest};
use uv_small_str::SmallString; use uv_small_str::SmallString;
/// The latest version of the archive bucket. /// The latest version of the archive bucket.
pub static LATEST: ArchiveVersion = ArchiveVersion::V1; pub static LATEST: ArchiveVersion = ArchiveVersion::V0;
#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash, serde::Serialize, serde::Deserialize)] #[derive(Debug, Copy, Clone, Eq, PartialEq, Hash, serde::Serialize, serde::Deserialize)]
pub enum ArchiveVersion { pub enum ArchiveVersion {
V0 = 0, V0 = 0,
V1 = 1,
} }
impl std::fmt::Display for ArchiveVersion { impl std::fmt::Display for ArchiveVersion {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self { match self {
Self::V0 => write!(f, "0"), Self::V0 => write!(f, "0"),
Self::V1 => write!(f, "1"),
} }
} }
} }
@ -28,7 +26,6 @@ impl FromStr for ArchiveVersion {
fn from_str(s: &str) -> Result<Self, Self::Err> { fn from_str(s: &str) -> Result<Self, Self::Err> {
match s { match s {
"0" => Ok(Self::V0), "0" => Ok(Self::V0),
"1" => Ok(Self::V1),
_ => Err(()), _ => Err(()),
} }
} }
@ -40,18 +37,17 @@ pub struct ArchiveId(SmallString);
impl ArchiveId { impl ArchiveId {
/// Return the content-addressed path for the [`ArchiveId`]. /// Return the content-addressed path for the [`ArchiveId`].
pub fn to_path_buf(&self, version: ArchiveVersion) -> PathBuf { pub fn to_path_buf(&self) -> PathBuf {
match version { if self.0.len() == 21 {
// Version 0: A 21-digit NanoID. // A 21-digit NanoID.
ArchiveVersion::V0 => PathBuf::from(self.0.as_ref()), PathBuf::from(self.0.as_ref())
// Version 1: A SHA256 hex digest, split into three segments. } else {
ArchiveVersion::V1 => { // A SHA256 hex digest, split into three segments.
let mut path = PathBuf::new(); let mut path = PathBuf::new();
path.push(&self.0[0..2]); path.push(&self.0[0..2]);
path.push(&self.0[2..4]); path.push(&self.0[2..4]);
path.push(&self.0[4..]); path.push(&self.0[4..]);
path path
}
} }
} }
} }

View File

@ -96,7 +96,7 @@ fn migrate_windows_cache(source: &Path, destination: &Path) -> Result<(), io::Er
"interpreter-v2", "interpreter-v2",
"simple-v12", "simple-v12",
"wheels-v1", "wheels-v1",
"archive-v1", "archive-v0",
"builds-v0", "builds-v0",
"environments-v1", "environments-v1",
] { ] {

View File

@ -262,11 +262,8 @@ impl Cache {
} }
/// Return the path to an archive in the cache. /// Return the path to an archive in the cache.
pub fn archive(&self, id: &ArchiveId, version: ArchiveVersion) -> PathBuf { pub fn archive(&self, id: &ArchiveId) -> PathBuf {
// TODO(charlie): Reuse `CacheBucket::Archive`. self.bucket(CacheBucket::Archive).join(id.to_path_buf())
self.root
.join(format!("archive-v{version}"))
.join(id.to_path_buf(version))
} }
/// Create a temporary directory to be used as a Python virtual environment. /// Create a temporary directory to be used as a Python virtual environment.
@ -355,9 +352,7 @@ impl Cache {
) -> io::Result<ArchiveId> { ) -> io::Result<ArchiveId> {
// Move the temporary directory into the directory store. // Move the temporary directory into the directory store.
let id = ArchiveId::from(hash); let id = ArchiveId::from(hash);
let archive_entry = self let archive_entry = self.bucket(CacheBucket::Archive).join(id.to_path_buf());
.bucket(CacheBucket::Archive)
.join(id.to_path_buf(LATEST));
if let Some(parent) = archive_entry.parent() { if let Some(parent) = archive_entry.parent() {
fs_err::create_dir_all(parent)?; fs_err::create_dir_all(parent)?;
} }
@ -744,7 +739,7 @@ impl Cache {
let link = Link::from_str(&contents)?; let link = Link::from_str(&contents)?;
// Ignore stale links. // Ignore stale links.
if link.version != ARCHIVE_VERSION { if link.version != LATEST {
return Err(io::Error::new( return Err(io::Error::new(
io::ErrorKind::NotFound, io::ErrorKind::NotFound,
"The link target does not exist.", "The link target does not exist.",
@ -763,7 +758,7 @@ impl Cache {
#[cfg(unix)] #[cfg(unix)]
pub fn create_link(&self, id: &ArchiveId, dst: impl AsRef<Path>) -> io::Result<()> { pub fn create_link(&self, id: &ArchiveId, dst: impl AsRef<Path>) -> io::Result<()> {
// Construct the link target. // Construct the link target.
let src = self.archive(id, ArchiveVersion::V1); let src = self.archive(id);
let dst = dst.as_ref(); let dst = dst.as_ref();
// Attempt to create the symlink directly. // Attempt to create the symlink directly.
@ -809,7 +804,7 @@ impl Link {
fn new(id: ArchiveId) -> Self { fn new(id: ArchiveId) -> Self {
Self { Self {
id, id,
version: ArchiveVersion::V1, version: ArchiveVersion::V0,
} }
} }
} }
@ -1130,7 +1125,7 @@ impl CacheBucket {
Self::Wheels => "wheels-v5", Self::Wheels => "wheels-v5",
// Note that when bumping this, you'll also need to bump // Note that when bumping this, you'll also need to bump
// `ARCHIVE_VERSION` in `crates/uv-cache/src/lib.rs`. // `ARCHIVE_VERSION` in `crates/uv-cache/src/lib.rs`.
Self::Archive => "archive-v1", Self::Archive => "archive-v0",
Self::Builds => "builds-v0", Self::Builds => "builds-v0",
Self::Environments => "environments-v2", Self::Environments => "environments-v2",
Self::Python => "python-v0", Self::Python => "python-v0",
@ -1394,10 +1389,8 @@ mod tests {
#[test] #[test]
fn test_link_deserialize() { fn test_link_deserialize() {
assert!(Link::from_str("archive-v1/foo").is_ok()); assert!(Link::from_str("archive-v0/foo").is_ok());
assert!(Link::from_str("archive/foo").is_err()); assert!(Link::from_str("archive/foo").is_err());
assert!(Link::from_str("v1/foo").is_err()); assert!(Link::from_str("v1/foo").is_err());
assert!(Link::from_str("archive-v1/").is_err());
assert!(Link::from_str("archive-v0/foo").is_ok());
} }
} }

View File

@ -1,4 +1,4 @@
use uv_cache::{ArchiveId, ArchiveVersion, Cache, LATEST}; use uv_cache::{ArchiveId, ArchiveVersion, LATEST};
use uv_distribution_filename::WheelFilename; use uv_distribution_filename::WheelFilename;
use uv_distribution_types::Hashed; use uv_distribution_types::Hashed;
use uv_pypi_types::{HashAlgorithm, HashDigest, HashDigests}; use uv_pypi_types::{HashAlgorithm, HashDigest, HashDigests};
@ -34,11 +34,6 @@ impl Archive {
version: LATEST, version: LATEST,
} }
} }
/// Returns `true` if the archive exists in the cache.
pub(crate) fn exists(&self, cache: &Cache) -> bool {
cache.archive(&self.id, self.version).exists()
}
} }
impl Hashed for Archive { impl Hashed for Archive {

View File

@ -227,7 +227,7 @@ impl<'a, Context: BuildContext> DistributionDatabase<'a, Context> {
archive: self archive: self
.build_context .build_context
.cache() .cache()
.archive(&archive.id, archive.version) .archive(&archive.id)
.into_boxed_path(), .into_boxed_path(),
hashes: archive.hashes, hashes: archive.hashes,
filename: wheel.filename.clone(), filename: wheel.filename.clone(),
@ -265,7 +265,7 @@ impl<'a, Context: BuildContext> DistributionDatabase<'a, Context> {
archive: self archive: self
.build_context .build_context
.cache() .cache()
.archive(&archive.id, archive.version) .archive(&archive.id)
.into_boxed_path(), .into_boxed_path(),
hashes: archive.hashes, hashes: archive.hashes,
filename: wheel.filename.clone(), filename: wheel.filename.clone(),
@ -304,7 +304,7 @@ impl<'a, Context: BuildContext> DistributionDatabase<'a, Context> {
archive: self archive: self
.build_context .build_context
.cache() .cache()
.archive(&archive.id, archive.version) .archive(&archive.id)
.into_boxed_path(), .into_boxed_path(),
hashes: archive.hashes, hashes: archive.hashes,
filename: wheel.filename.clone(), filename: wheel.filename.clone(),
@ -335,7 +335,7 @@ impl<'a, Context: BuildContext> DistributionDatabase<'a, Context> {
archive: self archive: self
.build_context .build_context
.cache() .cache()
.archive(&archive.id, archive.version) .archive(&archive.id)
.into_boxed_path(), .into_boxed_path(),
hashes: archive.hashes, hashes: archive.hashes,
filename: wheel.filename.clone(), filename: wheel.filename.clone(),
@ -421,11 +421,7 @@ impl<'a, Context: BuildContext> DistributionDatabase<'a, Context> {
Ok(LocalWheel { Ok(LocalWheel {
dist: Dist::Source(dist.clone()), dist: Dist::Source(dist.clone()),
archive: self archive: self.build_context.cache().archive(&id).into_boxed_path(),
.build_context
.cache()
.archive(&id, LATEST)
.into_boxed_path(),
hashes: built_wheel.hashes, hashes: built_wheel.hashes,
filename: built_wheel.filename, filename: built_wheel.filename,
cache: built_wheel.cache_info, cache: built_wheel.cache_info,
@ -689,7 +685,7 @@ impl<'a, Context: BuildContext> DistributionDatabase<'a, Context> {
Connectivity::Offline => CacheControl::AllowStale, Connectivity::Offline => CacheControl::AllowStale,
}; };
let archive = self let archive: Archive = self
.client .client
.managed(|client| { .managed(|client| {
client.cached_client().get_serde_with_retry( client.cached_client().get_serde_with_retry(
@ -708,7 +704,8 @@ impl<'a, Context: BuildContext> DistributionDatabase<'a, Context> {
// If the archive is missing the required hashes, or has since been removed, force a refresh. // If the archive is missing the required hashes, or has since been removed, force a refresh.
let archive = Some(archive) let archive = Some(archive)
.filter(|archive| archive.has_digests(hashes)) .filter(|archive| archive.has_digests(hashes))
.filter(|archive| archive.exists(self.build_context.cache())); .filter(|archive| archive.version == LATEST)
.filter(|archive| self.build_context.cache().archive(&archive.id).exists());
let archive = if let Some(archive) = archive { let archive = if let Some(archive) = archive {
archive archive
@ -875,7 +872,7 @@ impl<'a, Context: BuildContext> DistributionDatabase<'a, Context> {
Connectivity::Offline => CacheControl::AllowStale, Connectivity::Offline => CacheControl::AllowStale,
}; };
let archive = self let archive: Archive = self
.client .client
.managed(|client| { .managed(|client| {
client.cached_client().get_serde_with_retry( client.cached_client().get_serde_with_retry(
@ -894,7 +891,8 @@ impl<'a, Context: BuildContext> DistributionDatabase<'a, Context> {
// If the archive is missing the required hashes, or has since been removed, force a refresh. // If the archive is missing the required hashes, or has since been removed, force a refresh.
let archive = Some(archive) let archive = Some(archive)
.filter(|archive| archive.has_digests(hashes)) .filter(|archive| archive.has_digests(hashes))
.filter(|archive| archive.exists(self.build_context.cache())); .filter(|archive| archive.version == LATEST)
.filter(|archive| self.build_context.cache().archive(&archive.id).exists());
let archive = if let Some(archive) = archive { let archive = if let Some(archive) = archive {
archive archive
@ -957,7 +955,7 @@ impl<'a, Context: BuildContext> DistributionDatabase<'a, Context> {
archive: self archive: self
.build_context .build_context
.cache() .cache()
.archive(&archive.id, archive.version) .archive(&archive.id)
.into_boxed_path(), .into_boxed_path(),
hashes: archive.hashes, hashes: archive.hashes,
filename: filename.clone(), filename: filename.clone(),
@ -1024,7 +1022,7 @@ impl<'a, Context: BuildContext> DistributionDatabase<'a, Context> {
archive: self archive: self
.build_context .build_context
.cache() .cache()
.archive(&archive.id, archive.version) .archive(&archive.id)
.into_boxed_path(), .into_boxed_path(),
hashes: archive.hashes, hashes: archive.hashes,
filename: filename.clone(), filename: filename.clone(),

View File

@ -1,6 +1,6 @@
use std::path::Path; use std::path::Path;
use uv_cache::{Cache, CacheEntry}; use uv_cache::{Cache, CacheEntry, LATEST};
use uv_cache_info::CacheInfo; use uv_cache_info::CacheInfo;
use uv_distribution_filename::WheelFilename; use uv_distribution_filename::WheelFilename;
use uv_distribution_types::{ use uv_distribution_types::{
@ -82,9 +82,14 @@ impl CachedWheel {
hashes, hashes,
.. ..
} = archive; } = archive;
let path = cache.archive(&id, version);
// Ignore out-of-date versions.
if version != LATEST {
return None;
}
// Ignore stale pointers. // Ignore stale pointers.
let path = cache.archive(&id);
if !path.exists() { if !path.exists() {
return None; return None;
} }
@ -114,9 +119,14 @@ impl CachedWheel {
hashes, hashes,
.. ..
} = archive; } = archive;
let path = cache.archive(&id, version);
// Ignore out-of-date versions.
if version != LATEST {
return None;
}
// Ignore stale pointers. // Ignore stale pointers.
let path = cache.archive(&id);
if !path.exists() { if !path.exists() {
return None; return None;
} }

View File

@ -16,7 +16,6 @@ doctest = false
workspace = true workspace = true
[dependencies] [dependencies]
uv-configuration = { workspace = true }
uv-distribution-filename = { workspace = true } uv-distribution-filename = { workspace = true }
uv-pypi-types = { workspace = true } uv-pypi-types = { workspace = true }
uv-static = { workspace = true } uv-static = { workspace = true }
@ -28,7 +27,6 @@ blake2 = { workspace = true }
fs-err = { workspace = true, features = ["tokio"] } fs-err = { workspace = true, features = ["tokio"] }
futures = { workspace = true } futures = { workspace = true }
md-5 = { workspace = true } md-5 = { workspace = true }
rayon = { workspace = true }
regex = { workspace = true } regex = { workspace = true }
reqwest = { workspace = true } reqwest = { workspace = true }
rustc-hash = { workspace = true } rustc-hash = { workspace = true }
@ -39,7 +37,6 @@ tokio = { workspace = true }
tokio-util = { workspace = true, features = ["compat"] } tokio-util = { workspace = true, features = ["compat"] }
tracing = { workspace = true } tracing = { workspace = true }
xz2 = { workspace = true } xz2 = { workspace = true }
zip = { workspace = true }
zstd = { workspace = true } zstd = { workspace = true }
[features] [features]

View File

@ -4,8 +4,6 @@ use std::{ffi::OsString, path::PathBuf};
pub enum Error { pub enum Error {
#[error("I/O operation failed during extraction")] #[error("I/O operation failed during extraction")]
Io(#[source] std::io::Error), Io(#[source] std::io::Error),
#[error("Invalid zip file")]
Zip(#[from] zip::result::ZipError),
#[error("Invalid zip file structure")] #[error("Invalid zip file structure")]
AsyncZip(#[from] async_zip::error::ZipError), AsyncZip(#[from] async_zip::error::ZipError),
#[error("Invalid tar file")] #[error("Invalid tar file")]
@ -113,10 +111,6 @@ impl Error {
Ok(zip_err) => return Self::AsyncZip(zip_err), Ok(zip_err) => return Self::AsyncZip(zip_err),
Err(err) => err, Err(err) => err,
}; };
let err = match err.downcast::<zip::result::ZipError>() {
Ok(zip_err) => return Self::Zip(zip_err),
Err(err) => err,
};
Self::Io(err) Self::Io(err)
} }

View File

@ -1,19 +1,42 @@
use std::path::{Path, PathBuf};
use std::sync::LazyLock; use std::sync::LazyLock;
pub use error::Error; pub use error::Error;
use regex::Regex; use regex::Regex;
pub use sync::*;
use uv_static::EnvVars; use uv_static::EnvVars;
mod error; mod error;
pub mod hash; pub mod hash;
pub mod stream; pub mod stream;
mod sync;
mod vendor;
static CONTROL_CHARACTERS_RE: LazyLock<Regex> = LazyLock::new(|| Regex::new(r"\p{C}").unwrap()); static CONTROL_CHARACTERS_RE: LazyLock<Regex> = LazyLock::new(|| Regex::new(r"\p{C}").unwrap());
static REPLACEMENT_CHARACTER: &str = "\u{FFFD}"; static REPLACEMENT_CHARACTER: &str = "\u{FFFD}";
/// Extract the top-level directory from an unpacked archive.
///
/// The specification says:
/// > A .tar.gz source distribution (sdist) contains a single top-level directory called
/// > `{name}-{version}` (e.g. foo-1.0), containing the source files of the package.
///
/// This function returns the path to that top-level directory.
pub fn strip_component(source: impl AsRef<Path>) -> Result<PathBuf, Error> {
// TODO(konstin): Verify the name of the directory.
let top_level = fs_err::read_dir(source.as_ref())
.map_err(Error::Io)?
.collect::<std::io::Result<Vec<fs_err::DirEntry>>>()
.map_err(Error::Io)?;
match top_level.as_slice() {
[root] => Ok(root.path()),
[] => Err(Error::EmptyArchive),
_ => Err(Error::NonSingularArchive(
top_level
.into_iter()
.map(|entry| entry.file_name())
.collect(),
)),
}
}
/// Validate that a given filename (e.g. reported by a ZIP archive's /// Validate that a given filename (e.g. reported by a ZIP archive's
/// local file entries or central directory entries) is "safe" to use. /// local file entries or central directory entries) is "safe" to use.
/// ///

View File

@ -1,122 +0,0 @@
use std::path::{Path, PathBuf};
use std::sync::{LazyLock, Mutex};
use crate::vendor::{CloneableSeekableReader, HasLength};
use crate::{Error, insecure_no_validate, validate_archive_member_name};
use rayon::prelude::*;
use rustc_hash::FxHashSet;
use tracing::warn;
use uv_configuration::RAYON_INITIALIZE;
use zip::ZipArchive;
/// Unzip a `.zip` archive into the target directory.
pub fn unzip<R: Send + std::io::Read + std::io::Seek + HasLength>(
reader: R,
target: &Path,
) -> Result<(), Error> {
// Unzip in parallel.
let reader = std::io::BufReader::new(reader);
let archive = ZipArchive::new(CloneableSeekableReader::new(reader))?;
let directories = Mutex::new(FxHashSet::default());
let skip_validation = insecure_no_validate();
// Initialize the threadpool with the user settings.
LazyLock::force(&RAYON_INITIALIZE);
(0..archive.len())
.into_par_iter()
.map(|file_number| {
let mut archive = archive.clone();
let mut file = archive.by_index(file_number)?;
if let Err(e) = validate_archive_member_name(file.name()) {
if !skip_validation {
return Err(e);
}
}
// Determine the path of the file within the wheel.
let Some(enclosed_name) = file.enclosed_name() else {
warn!("Skipping unsafe file name: {}", file.name());
return Ok(());
};
// Create necessary parent directories.
let path = target.join(enclosed_name);
if file.is_dir() {
let mut directories = directories.lock().unwrap();
if directories.insert(path.clone()) {
fs_err::create_dir_all(path).map_err(Error::Io)?;
}
return Ok(());
}
if let Some(parent) = path.parent() {
let mut directories = directories.lock().unwrap();
if directories.insert(parent.to_path_buf()) {
fs_err::create_dir_all(parent).map_err(Error::Io)?;
}
}
// Copy the file contents.
let outfile = fs_err::File::create(&path).map_err(Error::Io)?;
let size = file.size();
if size > 0 {
let mut writer = if let Ok(size) = usize::try_from(size) {
std::io::BufWriter::with_capacity(std::cmp::min(size, 1024 * 1024), outfile)
} else {
std::io::BufWriter::new(outfile)
};
std::io::copy(&mut file, &mut writer).map_err(Error::io_or_compression)?;
}
// See `uv_extract::stream::unzip`. For simplicity, this is identical with the code there except for being
// sync.
#[cfg(unix)]
{
use std::fs::Permissions;
use std::os::unix::fs::PermissionsExt;
if let Some(mode) = file.unix_mode() {
// https://github.com/pypa/pip/blob/3898741e29b7279e7bffe044ecfbe20f6a438b1e/src/pip/_internal/utils/unpacking.py#L88-L100
let has_any_executable_bit = mode & 0o111;
if has_any_executable_bit != 0 {
let permissions = fs_err::metadata(&path).map_err(Error::Io)?.permissions();
if permissions.mode() & 0o111 != 0o111 {
fs_err::set_permissions(
&path,
Permissions::from_mode(permissions.mode() | 0o111),
)
.map_err(Error::Io)?;
}
}
}
}
Ok(())
})
.collect::<Result<_, Error>>()
}
/// Extract the top-level directory from an unpacked archive.
///
/// The specification says:
/// > A .tar.gz source distribution (sdist) contains a single top-level directory called
/// > `{name}-{version}` (e.g. foo-1.0), containing the source files of the package.
///
/// This function returns the path to that top-level directory.
pub fn strip_component(source: impl AsRef<Path>) -> Result<PathBuf, Error> {
// TODO(konstin): Verify the name of the directory.
let top_level = fs_err::read_dir(source.as_ref())
.map_err(Error::Io)?
.collect::<std::io::Result<Vec<fs_err::DirEntry>>>()
.map_err(Error::Io)?;
match top_level.as_slice() {
[root] => Ok(root.path()),
[] => Err(Error::EmptyArchive),
_ => Err(Error::NonSingularArchive(
top_level
.into_iter()
.map(|entry| entry.file_name())
.collect(),
)),
}
}

View File

@ -1,236 +0,0 @@
This software is distributed under the terms of both the MIT license and the
Apache License (Version 2.0).
MIT license
Copyright 2022 Google LLC
Permission is hereby granted, free of charge, to any
person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the
Software without restriction, including without
limitation the rights to use, copy, modify, merge,
publish, distribute, sublicense, and/or sell copies of
the Software, and to permit persons to whom the Software
is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice
shall be included in all copies or substantial portions
of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
Apache 2 license
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

View File

@ -1,174 +0,0 @@
// Copyright 2022 Google LLC
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// https://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![allow(clippy::cast_sign_loss)]
use std::{
io::{BufReader, Cursor, Read, Seek, SeekFrom},
sync::{Arc, Mutex},
};
/// A trait to represent some reader which has a total length known in
/// advance. This is roughly equivalent to the nightly
/// [`Seek::stream_len`] API.
#[allow(clippy::len_without_is_empty)]
pub trait HasLength {
/// Return the current total length of this stream.
fn len(&self) -> u64;
}
/// A [`Read`] which refers to its underlying stream by reference count,
/// and thus can be cloned cheaply. It supports seeking; each cloned instance
/// maintains its own pointer into the file, and the underlying instance
/// is seeked prior to each read.
pub(crate) struct CloneableSeekableReader<R: Read + Seek + HasLength> {
file: Arc<Mutex<R>>,
pos: u64,
// TODO determine and store this once instead of per cloneable file
file_length: Option<u64>,
}
impl<R: Read + Seek + HasLength> Clone for CloneableSeekableReader<R> {
fn clone(&self) -> Self {
Self {
file: self.file.clone(),
pos: self.pos,
file_length: self.file_length,
}
}
}
impl<R: Read + Seek + HasLength> CloneableSeekableReader<R> {
/// Constructor. Takes ownership of the underlying `Read`.
/// You should pass in only streams whose total length you expect
/// to be fixed and unchanging. Odd behavior may occur if the length
/// of the stream changes; any subsequent seeks will not take account
/// of the changed stream length.
pub(crate) fn new(file: R) -> Self {
Self {
file: Arc::new(Mutex::new(file)),
pos: 0u64,
file_length: None,
}
}
/// Determine the length of the underlying stream.
fn ascertain_file_length(&mut self) -> u64 {
self.file_length.unwrap_or_else(|| {
let len = self.file.lock().unwrap().len();
self.file_length = Some(len);
len
})
}
}
impl<R: Read + Seek + HasLength> Read for CloneableSeekableReader<R> {
fn read(&mut self, buf: &mut [u8]) -> std::io::Result<usize> {
let mut underlying_file = self.file.lock().expect("Unable to get underlying file");
// TODO share an object which knows current position to avoid unnecessary
// seeks
underlying_file.seek(SeekFrom::Start(self.pos))?;
let read_result = underlying_file.read(buf);
if let Ok(bytes_read) = read_result {
// TODO, once stabilised, use checked_add_signed
self.pos += bytes_read as u64;
}
read_result
}
}
impl<R: Read + Seek + HasLength> Seek for CloneableSeekableReader<R> {
fn seek(&mut self, pos: SeekFrom) -> std::io::Result<u64> {
let new_pos = match pos {
SeekFrom::Start(pos) => pos,
SeekFrom::End(offset_from_end) => {
let file_len = self.ascertain_file_length();
if -offset_from_end as u64 > file_len {
return Err(std::io::Error::new(
std::io::ErrorKind::InvalidInput,
"Seek too far backwards",
));
}
// TODO, once stabilised, use checked_add_signed
file_len - (-offset_from_end as u64)
}
// TODO, once stabilised, use checked_add_signed
SeekFrom::Current(offset_from_pos) => {
if offset_from_pos > 0 {
self.pos + (offset_from_pos as u64)
} else {
self.pos - ((-offset_from_pos) as u64)
}
}
};
self.pos = new_pos;
Ok(new_pos)
}
}
impl<R: HasLength> HasLength for BufReader<R> {
fn len(&self) -> u64 {
self.get_ref().len()
}
}
#[allow(clippy::disallowed_types)]
impl HasLength for std::fs::File {
fn len(&self) -> u64 {
self.metadata().unwrap().len()
}
}
impl HasLength for fs_err::File {
fn len(&self) -> u64 {
self.metadata().unwrap().len()
}
}
impl HasLength for Cursor<Vec<u8>> {
fn len(&self) -> u64 {
self.get_ref().len() as u64
}
}
impl HasLength for Cursor<&Vec<u8>> {
fn len(&self) -> u64 {
self.get_ref().len() as u64
}
}
#[cfg(test)]
mod test {
use std::io::{Cursor, Read, Seek, SeekFrom};
use super::CloneableSeekableReader;
#[test]
fn test_cloneable_seekable_reader() {
let buf: Vec<u8> = vec![0, 1, 2, 3, 4, 5, 6, 7, 8, 9];
let buf = Cursor::new(buf);
let mut reader = CloneableSeekableReader::new(buf);
let mut out = vec![0; 2];
assert!(reader.read_exact(&mut out).is_ok());
assert_eq!(out[0], 0);
assert_eq!(out[1], 1);
assert!(reader.seek(SeekFrom::Start(0)).is_ok());
assert!(reader.read_exact(&mut out).is_ok());
assert_eq!(out[0], 0);
assert_eq!(out[1], 1);
assert!(reader.stream_position().is_ok());
assert!(reader.read_exact(&mut out).is_ok());
assert_eq!(out[0], 2);
assert_eq!(out[1], 3);
assert!(reader.seek(SeekFrom::End(-2)).is_ok());
assert!(reader.read_exact(&mut out).is_ok());
assert_eq!(out[0], 8);
assert_eq!(out[1], 9);
assert!(reader.read_exact(&mut out).is_err());
}
}

View File

@ -1,3 +0,0 @@
pub(crate) use cloneable_seekable_reader::{CloneableSeekableReader, HasLength};
mod cloneable_seekable_reader;

View File

@ -263,9 +263,7 @@ impl<'a> Planner<'a> {
hashes: archive.hashes, hashes: archive.hashes,
cache_info, cache_info,
build_info, build_info,
path: cache path: cache.archive(&archive.id).into_boxed_path(),
.archive(&archive.id, archive.version)
.into_boxed_path(),
}; };
debug!("URL wheel requirement already cached: {cached_dist}"); debug!("URL wheel requirement already cached: {cached_dist}");
@ -340,9 +338,7 @@ impl<'a> Planner<'a> {
hashes: archive.hashes, hashes: archive.hashes,
cache_info, cache_info,
build_info, build_info,
path: cache path: cache.archive(&archive.id).into_boxed_path(),
.archive(&archive.id, archive.version)
.into_boxed_path(),
}; };
debug!( debug!(

View File

@ -2,7 +2,7 @@ use std::path::Path;
use tracing::debug; use tracing::debug;
use uv_cache::{Cache, CacheBucket, LATEST}; use uv_cache::{Cache, CacheBucket};
use uv_cache_key::{cache_digest, hash_digest}; use uv_cache_key::{cache_digest, hash_digest};
use uv_client::BaseClientBuilder; use uv_client::BaseClientBuilder;
use uv_configuration::{Concurrency, Constraints, TargetTriple}; use uv_configuration::{Concurrency, Constraints, TargetTriple};
@ -225,7 +225,7 @@ impl CachedEnvironment {
let id = cache let id = cache
.persist(temp_dir.keep(), cache_entry.path(), sha256) .persist(temp_dir.keep(), cache_entry.path(), sha256)
.await?; .await?;
let root = cache.archive(&id, LATEST); let root = cache.archive(&id);
Ok(Self(PythonEnvironment::from_root(root, cache)?)) Ok(Self(PythonEnvironment::from_root(root, cache)?))
} }

View File

@ -136,7 +136,7 @@ fn clean_package_pypi() -> Result<()> {
----- stderr ----- ----- stderr -----
DEBUG uv [VERSION] ([COMMIT] DATE) DEBUG uv [VERSION] ([COMMIT] DATE)
DEBUG Acquired lock for `[CACHE_DIR]/` DEBUG Acquired lock for `[CACHE_DIR]/`
DEBUG Removing dangling cache entry: [CACHE_DIR]/archive-v1/[ENTRY] DEBUG Removing dangling cache entry: [CACHE_DIR]/archive-v0/[ENTRY]
Removed [N] files ([SIZE]) Removed [N] files ([SIZE])
DEBUG Released lock at `[CACHE_DIR]/.lock` DEBUG Released lock at `[CACHE_DIR]/.lock`
"); ");
@ -215,7 +215,7 @@ fn clean_package_index() -> Result<()> {
----- stderr ----- ----- stderr -----
DEBUG uv [VERSION] ([COMMIT] DATE) DEBUG uv [VERSION] ([COMMIT] DATE)
DEBUG Acquired lock for `[CACHE_DIR]/` DEBUG Acquired lock for `[CACHE_DIR]/`
DEBUG Removing dangling cache entry: [CACHE_DIR]/archive-v1/[ENTRY] DEBUG Removing dangling cache entry: [CACHE_DIR]/archive-v0/[ENTRY]
Removed [N] files ([SIZE]) Removed [N] files ([SIZE])
DEBUG Released lock at `[CACHE_DIR]/.lock` DEBUG Released lock at `[CACHE_DIR]/.lock`
"); ");

View File

@ -142,7 +142,7 @@ fn prune_cached_env() {
DEBUG Acquired lock for `[CACHE_DIR]/` DEBUG Acquired lock for `[CACHE_DIR]/`
Pruning cache at: [CACHE_DIR]/ Pruning cache at: [CACHE_DIR]/
DEBUG Removing dangling cache environment: [CACHE_DIR]/environments-v2/[ENTRY] DEBUG Removing dangling cache environment: [CACHE_DIR]/environments-v2/[ENTRY]
DEBUG Removing dangling cache archive: [CACHE_DIR]/archive-v1/[ENTRY] DEBUG Removing dangling cache archive: [CACHE_DIR]/archive-v0/[ENTRY]
Removed [N] files ([SIZE]) Removed [N] files ([SIZE])
DEBUG Released lock at `[CACHE_DIR]/.lock` DEBUG Released lock at `[CACHE_DIR]/.lock`
"); ");
@ -188,7 +188,7 @@ fn prune_stale_symlink() -> Result<()> {
DEBUG uv [VERSION] ([COMMIT] DATE) DEBUG uv [VERSION] ([COMMIT] DATE)
DEBUG Acquired lock for `[CACHE_DIR]/` DEBUG Acquired lock for `[CACHE_DIR]/`
Pruning cache at: [CACHE_DIR]/ Pruning cache at: [CACHE_DIR]/
DEBUG Removing dangling cache archive: [CACHE_DIR]/archive-v1/[ENTRY] DEBUG Removing dangling cache archive: [CACHE_DIR]/archive-v0/[ENTRY]
Removed 44 files ([SIZE]) Removed 44 files ([SIZE])
DEBUG Released lock at `[CACHE_DIR]/.lock` DEBUG Released lock at `[CACHE_DIR]/.lock`
"); ");
@ -409,7 +409,7 @@ fn prune_stale_revision() -> Result<()> {
DEBUG Acquired lock for `[CACHE_DIR]/` DEBUG Acquired lock for `[CACHE_DIR]/`
Pruning cache at: [CACHE_DIR]/ Pruning cache at: [CACHE_DIR]/
DEBUG Removing dangling source revision: [CACHE_DIR]/sdists-v9/[ENTRY] DEBUG Removing dangling source revision: [CACHE_DIR]/sdists-v9/[ENTRY]
DEBUG Removing dangling cache archive: [CACHE_DIR]/archive-v1/[ENTRY] DEBUG Removing dangling cache archive: [CACHE_DIR]/archive-v0/[ENTRY]
Removed [N] files ([SIZE]) Removed [N] files ([SIZE])
DEBUG Released lock at `[CACHE_DIR]/.lock` DEBUG Released lock at `[CACHE_DIR]/.lock`
"); ");

View File

@ -246,7 +246,7 @@ fn find_uv_bin_in_ephemeral_environment() -> anyhow::Result<()> {
success: true success: true
exit_code: 0 exit_code: 0
----- stdout ----- ----- stdout -----
[CACHE_DIR]/archive-v1/[HASH]/[BIN]/uv [CACHE_DIR]/archive-v0/[HASH]/[BIN]/uv
----- stderr ----- ----- stderr -----
Resolved 1 package in [TIME] Resolved 1 package in [TIME]