mirror of https://github.com/astral-sh/uv
improve archive error messages (#12627)
This commit is contained in:
parent
9eaa9f789b
commit
992f54ec17
|
|
@ -225,7 +225,7 @@ impl<'a, Context: BuildContext> DistributionDatabase<'a, Context> {
|
||||||
filename: wheel.filename.clone(),
|
filename: wheel.filename.clone(),
|
||||||
cache: CacheInfo::default(),
|
cache: CacheInfo::default(),
|
||||||
}),
|
}),
|
||||||
Err(Error::Extract(err)) => {
|
Err(Error::Extract(name, err)) => {
|
||||||
if err.is_http_streaming_unsupported() {
|
if err.is_http_streaming_unsupported() {
|
||||||
warn!(
|
warn!(
|
||||||
"Streaming unsupported for {dist}; downloading wheel to disk ({err})"
|
"Streaming unsupported for {dist}; downloading wheel to disk ({err})"
|
||||||
|
|
@ -233,7 +233,7 @@ impl<'a, Context: BuildContext> DistributionDatabase<'a, Context> {
|
||||||
} else if err.is_http_streaming_failed() {
|
} else if err.is_http_streaming_failed() {
|
||||||
warn!("Streaming failed for {dist}; downloading wheel to disk ({err})");
|
warn!("Streaming failed for {dist}; downloading wheel to disk ({err})");
|
||||||
} else {
|
} else {
|
||||||
return Err(Error::Extract(err));
|
return Err(Error::Extract(name, err));
|
||||||
}
|
}
|
||||||
|
|
||||||
// If the request failed because streaming is unsupported, download the
|
// If the request failed because streaming is unsupported, download the
|
||||||
|
|
@ -570,10 +570,14 @@ impl<'a, Context: BuildContext> DistributionDatabase<'a, Context> {
|
||||||
match progress {
|
match progress {
|
||||||
Some((reporter, progress)) => {
|
Some((reporter, progress)) => {
|
||||||
let mut reader = ProgressReader::new(&mut hasher, progress, &**reporter);
|
let mut reader = ProgressReader::new(&mut hasher, progress, &**reporter);
|
||||||
uv_extract::stream::unzip(&mut reader, temp_dir.path()).await?;
|
uv_extract::stream::unzip(&mut reader, temp_dir.path())
|
||||||
|
.await
|
||||||
|
.map_err(|err| Error::Extract(filename.to_string(), err))?;
|
||||||
}
|
}
|
||||||
None => {
|
None => {
|
||||||
uv_extract::stream::unzip(&mut hasher, temp_dir.path()).await?;
|
uv_extract::stream::unzip(&mut hasher, temp_dir.path())
|
||||||
|
.await
|
||||||
|
.map_err(|err| Error::Extract(filename.to_string(), err))?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -734,7 +738,8 @@ impl<'a, Context: BuildContext> DistributionDatabase<'a, Context> {
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
.await??;
|
.await?
|
||||||
|
.map_err(|err| Error::Extract(filename.to_string(), err))?;
|
||||||
|
|
||||||
HashDigests::empty()
|
HashDigests::empty()
|
||||||
} else {
|
} else {
|
||||||
|
|
@ -742,7 +747,9 @@ impl<'a, Context: BuildContext> DistributionDatabase<'a, Context> {
|
||||||
let algorithms = hashes.algorithms();
|
let algorithms = hashes.algorithms();
|
||||||
let mut hashers = algorithms.into_iter().map(Hasher::from).collect::<Vec<_>>();
|
let mut hashers = algorithms.into_iter().map(Hasher::from).collect::<Vec<_>>();
|
||||||
let mut hasher = uv_extract::hash::HashReader::new(file, &mut hashers);
|
let mut hasher = uv_extract::hash::HashReader::new(file, &mut hashers);
|
||||||
uv_extract::stream::unzip(&mut hasher, temp_dir.path()).await?;
|
uv_extract::stream::unzip(&mut hasher, temp_dir.path())
|
||||||
|
.await
|
||||||
|
.map_err(|err| Error::Extract(filename.to_string(), err))?;
|
||||||
|
|
||||||
// If necessary, exhaust the reader to compute the hash.
|
// If necessary, exhaust the reader to compute the hash.
|
||||||
hasher.finish().await.map_err(Error::HashExhaustion)?;
|
hasher.finish().await.map_err(Error::HashExhaustion)?;
|
||||||
|
|
@ -902,7 +909,9 @@ impl<'a, Context: BuildContext> DistributionDatabase<'a, Context> {
|
||||||
let mut hasher = uv_extract::hash::HashReader::new(file, &mut hashers);
|
let mut hasher = uv_extract::hash::HashReader::new(file, &mut hashers);
|
||||||
|
|
||||||
// Unzip the wheel to a temporary directory.
|
// Unzip the wheel to a temporary directory.
|
||||||
uv_extract::stream::unzip(&mut hasher, temp_dir.path()).await?;
|
uv_extract::stream::unzip(&mut hasher, temp_dir.path())
|
||||||
|
.await
|
||||||
|
.map_err(|err| Error::Extract(filename.to_string(), err))?;
|
||||||
|
|
||||||
// Exhaust the reader to compute the hash.
|
// Exhaust the reader to compute the hash.
|
||||||
hasher.finish().await.map_err(Error::HashExhaustion)?;
|
hasher.finish().await.map_err(Error::HashExhaustion)?;
|
||||||
|
|
@ -949,8 +958,9 @@ impl<'a, Context: BuildContext> DistributionDatabase<'a, Context> {
|
||||||
move || -> Result<TempDir, Error> {
|
move || -> Result<TempDir, Error> {
|
||||||
// Unzip the wheel into a temporary directory.
|
// Unzip the wheel into a temporary directory.
|
||||||
let temp_dir = tempfile::tempdir_in(root).map_err(Error::CacheWrite)?;
|
let temp_dir = tempfile::tempdir_in(root).map_err(Error::CacheWrite)?;
|
||||||
let reader = fs_err::File::open(path).map_err(Error::CacheWrite)?;
|
let reader = fs_err::File::open(&path).map_err(Error::CacheWrite)?;
|
||||||
uv_extract::unzip(reader, temp_dir.path())?;
|
uv_extract::unzip(reader, temp_dir.path())
|
||||||
|
.map_err(|err| Error::Extract(path.to_string_lossy().into_owned(), err))?;
|
||||||
Ok(temp_dir)
|
Ok(temp_dir)
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
|
|
||||||
|
|
@ -84,8 +84,8 @@ pub enum Error {
|
||||||
ReadInstalled(Box<InstalledDist>, #[source] InstalledDistError),
|
ReadInstalled(Box<InstalledDist>, #[source] InstalledDistError),
|
||||||
#[error("Failed to read zip archive from built wheel")]
|
#[error("Failed to read zip archive from built wheel")]
|
||||||
Zip(#[from] ZipError),
|
Zip(#[from] ZipError),
|
||||||
#[error("Failed to extract archive")]
|
#[error("Failed to extract archive: {0}")]
|
||||||
Extract(#[from] uv_extract::Error),
|
Extract(String, #[source] uv_extract::Error),
|
||||||
#[error("The source distribution is missing a `PKG-INFO` file")]
|
#[error("The source distribution is missing a `PKG-INFO` file")]
|
||||||
MissingPkgInfo,
|
MissingPkgInfo,
|
||||||
#[error("The source distribution `{}` has no subdirectory `{}`", _0, _1.display())]
|
#[error("The source distribution `{}` has no subdirectory `{}`", _0, _1.display())]
|
||||||
|
|
|
||||||
|
|
@ -2089,7 +2089,9 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
|
||||||
|
|
||||||
// Download and unzip the source distribution into a temporary directory.
|
// Download and unzip the source distribution into a temporary directory.
|
||||||
let span = info_span!("download_source_dist", source_dist = %source);
|
let span = info_span!("download_source_dist", source_dist = %source);
|
||||||
uv_extract::stream::archive(&mut hasher, ext, temp_dir.path()).await?;
|
uv_extract::stream::archive(&mut hasher, ext, temp_dir.path())
|
||||||
|
.await
|
||||||
|
.map_err(|err| Error::Extract(source.to_string(), err))?;
|
||||||
drop(span);
|
drop(span);
|
||||||
|
|
||||||
// If necessary, exhaust the reader to compute the hash.
|
// If necessary, exhaust the reader to compute the hash.
|
||||||
|
|
@ -2103,7 +2105,12 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
|
||||||
let extracted = match uv_extract::strip_component(temp_dir.path()) {
|
let extracted = match uv_extract::strip_component(temp_dir.path()) {
|
||||||
Ok(top_level) => top_level,
|
Ok(top_level) => top_level,
|
||||||
Err(uv_extract::Error::NonSingularArchive(_)) => temp_dir.into_path(),
|
Err(uv_extract::Error::NonSingularArchive(_)) => temp_dir.into_path(),
|
||||||
Err(err) => return Err(err.into()),
|
Err(err) => {
|
||||||
|
return Err(Error::Extract(
|
||||||
|
temp_dir.path().to_string_lossy().into_owned(),
|
||||||
|
err,
|
||||||
|
))
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
// Persist it to the cache.
|
// Persist it to the cache.
|
||||||
|
|
@ -2151,7 +2158,9 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
|
||||||
let mut hasher = uv_extract::hash::HashReader::new(reader, &mut hashers);
|
let mut hasher = uv_extract::hash::HashReader::new(reader, &mut hashers);
|
||||||
|
|
||||||
// Unzip the archive into a temporary directory.
|
// Unzip the archive into a temporary directory.
|
||||||
uv_extract::stream::archive(&mut hasher, ext, &temp_dir.path()).await?;
|
uv_extract::stream::archive(&mut hasher, ext, &temp_dir.path())
|
||||||
|
.await
|
||||||
|
.map_err(|err| Error::Extract(temp_dir.path().to_string_lossy().into_owned(), err))?;
|
||||||
|
|
||||||
// If necessary, exhaust the reader to compute the hash.
|
// If necessary, exhaust the reader to compute the hash.
|
||||||
if !algorithms.is_empty() {
|
if !algorithms.is_empty() {
|
||||||
|
|
@ -2164,7 +2173,12 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
|
||||||
let extracted = match uv_extract::strip_component(temp_dir.path()) {
|
let extracted = match uv_extract::strip_component(temp_dir.path()) {
|
||||||
Ok(top_level) => top_level,
|
Ok(top_level) => top_level,
|
||||||
Err(uv_extract::Error::NonSingularArchive(_)) => temp_dir.path().to_path_buf(),
|
Err(uv_extract::Error::NonSingularArchive(_)) => temp_dir.path().to_path_buf(),
|
||||||
Err(err) => return Err(err.into()),
|
Err(err) => {
|
||||||
|
return Err(Error::Extract(
|
||||||
|
temp_dir.path().to_string_lossy().into_owned(),
|
||||||
|
err,
|
||||||
|
))
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
// Persist it to the cache.
|
// Persist it to the cache.
|
||||||
|
|
|
||||||
|
|
@ -14,7 +14,7 @@ pub enum Error {
|
||||||
NonSingularArchive(Vec<OsString>),
|
NonSingularArchive(Vec<OsString>),
|
||||||
#[error("The top-level of the archive must only contain a list directory, but it's empty")]
|
#[error("The top-level of the archive must only contain a list directory, but it's empty")]
|
||||||
EmptyArchive,
|
EmptyArchive,
|
||||||
#[error("Bad CRC (got {computed:08x}, expected {expected:08x}): {}", path.display())]
|
#[error("Bad CRC (got {computed:08x}, expected {expected:08x}) for file: {}", path.display())]
|
||||||
BadCrc32 {
|
BadCrc32 {
|
||||||
path: PathBuf,
|
path: PathBuf,
|
||||||
computed: u32,
|
computed: u32,
|
||||||
|
|
|
||||||
|
|
@ -33,7 +33,7 @@ pub enum Error {
|
||||||
InvalidName(#[from] InvalidNameError),
|
InvalidName(#[from] InvalidNameError),
|
||||||
#[error("The metadata at {0} is invalid")]
|
#[error("The metadata at {0} is invalid")]
|
||||||
InvalidMetadata(String, Box<uv_pypi_types::MetadataError>),
|
InvalidMetadata(String, Box<uv_pypi_types::MetadataError>),
|
||||||
#[error("Bad CRC (got {computed:08x}, expected {expected:08x}): {path}")]
|
#[error("Bad CRC (got {computed:08x}, expected {expected:08x}) for file: {path}")]
|
||||||
BadCrc32 {
|
BadCrc32 {
|
||||||
path: String,
|
path: String,
|
||||||
computed: u32,
|
computed: u32,
|
||||||
|
|
|
||||||
|
|
@ -8907,8 +8907,8 @@ fn bad_crc32() -> Result<()> {
|
||||||
----- stderr -----
|
----- stderr -----
|
||||||
Resolved 7 packages in [TIME]
|
Resolved 7 packages in [TIME]
|
||||||
× Failed to download `osqp @ https://files.pythonhosted.org/packages/00/04/5959347582ab970e9b922f27585d34f7c794ed01125dac26fb4e7dd80205/osqp-1.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl`
|
× Failed to download `osqp @ https://files.pythonhosted.org/packages/00/04/5959347582ab970e9b922f27585d34f7c794ed01125dac26fb4e7dd80205/osqp-1.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl`
|
||||||
├─▶ Failed to extract archive
|
├─▶ Failed to extract archive: osqp-1.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl
|
||||||
╰─▶ Bad CRC (got ca5f1131, expected d5c95dfa): osqp/ext_builtin.cpython-311-x86_64-linux-gnu.so
|
╰─▶ Bad CRC (got ca5f1131, expected d5c95dfa) for file: osqp/ext_builtin.cpython-311-x86_64-linux-gnu.so
|
||||||
"
|
"
|
||||||
);
|
);
|
||||||
|
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue