Filter out mismatching hashes in version map

This commit is contained in:
Charlie Marsh 2024-04-04 15:23:14 -04:00
parent ec168979ea
commit b8eeee453f
25 changed files with 452 additions and 39 deletions

2
Cargo.lock generated
View File

@ -4791,9 +4791,11 @@ dependencies = [
"itertools 0.12.1",
"once-map",
"pep508_rs",
"pypi-types",
"rustc-hash",
"serde",
"serde_json",
"thiserror",
"uv-cache",
"uv-interpreter",
"uv-normalize",

4
constraints.txt Normal file
View File

@ -0,0 +1,4 @@
click==8.1.7 \
--hash=sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28 \
--hash=sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de
# via flask

View File

@ -84,6 +84,8 @@ impl Display for IncompatibleDist {
IncompatibleWheel::RequiresPython(python) => {
write!(f, "it requires at python {python}")
}
IncompatibleWheel::MissingHash => f.write_str("it has no hash"),
IncompatibleWheel::MismatchedHash => f.write_str("the hash does not match"),
},
Self::Source(incompatibility) => match incompatibility {
IncompatibleSource::NoBuild => {
@ -104,6 +106,8 @@ impl Display for IncompatibleDist {
IncompatibleSource::RequiresPython(python) => {
write!(f, "it requires python {python}")
}
IncompatibleSource::MissingHash => f.write_str("it has no hash"),
IncompatibleSource::MismatchedHash => f.write_str("the hash does not match"),
},
Self::Unavailable => f.write_str("no distributions are available"),
}
@ -122,6 +126,8 @@ pub enum IncompatibleWheel {
Tag(IncompatibleTag),
RequiresPython(VersionSpecifiers),
Yanked(Yanked),
MissingHash,
MismatchedHash,
NoBinary,
}
@ -136,6 +142,8 @@ pub enum IncompatibleSource {
ExcludeNewer(Option<i64>),
RequiresPython(VersionSpecifiers),
Yanked(Yanked),
MissingHash,
MismatchedHash,
NoBuild,
}
@ -369,20 +377,26 @@ impl IncompatibleSource {
Self::ExcludeNewer(timestamp_self) => match other {
// Smaller timestamps are closer to the cut-off time
Self::ExcludeNewer(timestamp_other) => timestamp_other < timestamp_self,
Self::NoBuild | Self::RequiresPython(_) | Self::Yanked(_) => true,
Self::NoBuild
| Self::RequiresPython(_)
| Self::Yanked(_)
| Self::MissingHash
| Self::MismatchedHash => true,
},
Self::RequiresPython(_) => match other {
Self::ExcludeNewer(_) => false,
// Version specifiers cannot be reasonably compared
Self::RequiresPython(_) => false,
Self::NoBuild | Self::Yanked(_) => true,
Self::NoBuild | Self::Yanked(_) | Self::MissingHash | Self::MismatchedHash => true,
},
Self::Yanked(_) => match other {
Self::ExcludeNewer(_) | Self::RequiresPython(_) => false,
// Yanks with a reason are more helpful for errors
Self::Yanked(yanked_other) => matches!(yanked_other, Yanked::Reason(_)),
Self::NoBuild => true,
Self::NoBuild | Self::MissingHash | Self::MismatchedHash => true,
},
Self::MissingHash => false,
Self::MismatchedHash => false,
Self::NoBuild => false,
}
}
@ -400,26 +414,37 @@ impl IncompatibleWheel {
timestamp_other < timestamp_self
}
},
Self::NoBinary | Self::RequiresPython(_) | Self::Tag(_) | Self::Yanked(_) => true,
Self::NoBinary
| Self::RequiresPython(_)
| Self::Tag(_)
| Self::Yanked(_)
| Self::MissingHash
| Self::MismatchedHash => true,
},
Self::Tag(tag_self) => match other {
Self::ExcludeNewer(_) => false,
Self::Tag(tag_other) => tag_other > tag_self,
Self::NoBinary | Self::RequiresPython(_) | Self::Yanked(_) => true,
Self::NoBinary
| Self::RequiresPython(_)
| Self::Yanked(_)
| Self::MissingHash
| Self::MismatchedHash => true,
},
Self::RequiresPython(_) => match other {
Self::ExcludeNewer(_) | Self::Tag(_) => false,
// Version specifiers cannot be reasonably compared
Self::RequiresPython(_) => false,
Self::NoBinary | Self::Yanked(_) => true,
Self::NoBinary | Self::Yanked(_) | Self::MissingHash | Self::MismatchedHash => true,
},
Self::Yanked(_) => match other {
Self::ExcludeNewer(_) | Self::Tag(_) | Self::RequiresPython(_) => false,
// Yanks with a reason are more helpful for errors
Self::Yanked(yanked_other) => matches!(yanked_other, Yanked::Reason(_)),
Self::NoBinary => true,
Self::NoBinary | Self::MissingHash | Self::MismatchedHash => true,
},
Self::NoBinary => false,
Self::MismatchedHash => false,
Self::MissingHash => false,
}
}
}

View File

@ -72,7 +72,7 @@ pub enum Pep508ErrorSource {
String(String),
/// A URL parsing error.
#[error(transparent)]
UrlError(#[from] verbatim_url::VerbatimUrlError),
UrlError(#[from] VerbatimUrlError),
/// The version requirement is not supported.
#[error("{0}")]
UnsupportedRequirement(String),

View File

@ -122,6 +122,63 @@ impl Default for Yanked {
}
}
#[derive(Debug, Clone, Copy, Ord, PartialOrd, Eq, PartialEq, Hash)]
pub enum HashAlgorithm {
Md5,
Sha256,
Sha384,
Sha512,
}
impl FromStr for HashAlgorithm {
type Err = HashError;
fn from_str(s: &str) -> Result<Self, Self::Err> {
match s {
"md5" => Ok(Self::Md5),
"sha256" => Ok(Self::Sha256),
"sha384" => Ok(Self::Sha384),
"sha512" => Ok(Self::Sha512),
_ => Err(HashError::UnsupportedHashAlgorithm(s.to_string())),
}
}
}
/// A hash name and hex encoded digest of the file.
#[derive(Debug, Clone, Ord, PartialOrd, Eq, PartialEq, Hash)]
pub struct HashDigest {
pub algorithm: HashAlgorithm,
pub digest: String,
}
impl FromStr for HashDigest {
type Err = HashError;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let mut parts = s.split(':');
// Extract the key and value.
let name = parts
.next()
.ok_or_else(|| HashError::InvalidStructure(s.to_string()))?;
let value = parts
.next()
.ok_or_else(|| HashError::InvalidStructure(s.to_string()))?;
// Ensure there are no more parts.
if parts.next().is_some() {
return Err(HashError::InvalidStructure(s.to_string()));
}
let algorithm = HashAlgorithm::from_str(name)?;
Ok(HashDigest {
algorithm,
digest: value.to_string(),
})
}
}
/// A dictionary mapping a hash name to a hex encoded digest of the file.
///
/// PEP 691 says multiple hashes can be included and the interpretation is left to the client.
@ -176,6 +233,14 @@ impl Hashes {
.or(self.sha256.as_deref())
.or(self.md5.as_deref())
}
/// Returns `true` if the hash is empty.
pub fn is_empty(&self) -> bool {
self.sha512.is_none()
&& self.sha384.is_none()
&& self.sha256.is_none()
&& self.md5.is_none()
}
}
impl FromStr for Hashes {

View File

@ -20,7 +20,7 @@ use platform_tags::Tags;
use pypi_types::Hashes;
use uv_cache::{Cache, CacheBucket};
use uv_normalize::PackageName;
use uv_types::{NoBinary, NoBuild};
use uv_types::{NoBinary, NoBuild, RequiredHashes};
use crate::cached_client::{CacheControl, CachedClientError};
use crate::html::SimpleHtml;
@ -276,6 +276,7 @@ impl FlatIndex {
pub fn from_entries(
entries: FlatIndexEntries,
tags: &Tags,
required_hashes: &RequiredHashes,
no_build: &NoBuild,
no_binary: &NoBinary,
) -> Self {
@ -288,6 +289,7 @@ impl FlatIndex {
file,
filename,
tags,
required_hashes,
no_build,
no_binary,
url,
@ -305,6 +307,7 @@ impl FlatIndex {
file: File,
filename: DistFilename,
tags: &Tags,
required_hashes: &RequiredHashes,
no_build: &NoBuild,
no_binary: &NoBinary,
index: IndexUrl,
@ -315,7 +318,13 @@ impl FlatIndex {
DistFilename::WheelFilename(filename) => {
let version = filename.version.clone();
let compatibility = Self::wheel_compatibility(&filename, tags, no_binary);
let compatibility = Self::wheel_compatibility(
&filename,
&file.hashes,
tags,
required_hashes,
no_binary,
);
let dist = Dist::Built(BuiltDist::Registry(RegistryBuiltDist {
filename,
file: Box::new(file),
@ -331,7 +340,12 @@ impl FlatIndex {
}
}
DistFilename::SourceDistFilename(filename) => {
let compatibility = Self::source_dist_compatibility(&filename, no_build);
let compatibility = Self::source_dist_compatibility(
&filename,
&file.hashes,
required_hashes,
no_build,
);
let dist = Dist::Source(SourceDist::Registry(RegistrySourceDist {
filename: filename.clone(),
file: Box::new(file),
@ -351,6 +365,8 @@ impl FlatIndex {
fn source_dist_compatibility(
filename: &SourceDistFilename,
hashes: &Hashes,
required_hashes: &RequiredHashes,
no_build: &NoBuild,
) -> SourceDistCompatibility {
// Check if source distributions are allowed for this package.
@ -364,12 +380,28 @@ impl FlatIndex {
return SourceDistCompatibility::Incompatible(IncompatibleSource::NoBuild);
}
// Check if hashes line up
if let Some(required_hashes) = required_hashes.get(&filename.name) {
if !required_hashes.is_empty() {
if hashes.is_empty() {
return SourceDistCompatibility::Incompatible(IncompatibleSource::MissingHash);
}
if !required_hashes.contains(hashes) {
return SourceDistCompatibility::Incompatible(
IncompatibleSource::MismatchedHash,
);
}
}
}
SourceDistCompatibility::Compatible
}
fn wheel_compatibility(
filename: &WheelFilename,
hashes: &Hashes,
tags: &Tags,
required_hashes: &RequiredHashes,
no_binary: &NoBinary,
) -> WheelCompatibility {
// Check if binaries are allowed for this package.
@ -383,6 +415,18 @@ impl FlatIndex {
return WheelCompatibility::Incompatible(IncompatibleWheel::NoBinary);
}
// Check if hashes line up
if let Some(required_hashes) = required_hashes.get(&filename.name) {
if !required_hashes.is_empty() {
if hashes.is_empty() {
return WheelCompatibility::Incompatible(IncompatibleWheel::MissingHash);
}
if !required_hashes.contains(hashes) {
return WheelCompatibility::Incompatible(IncompatibleWheel::MismatchedHash);
}
}
}
// Determine a compatibility for the wheel based on tags.
WheelCompatibility::from(filename.compatibility(tags))
}

View File

@ -17,7 +17,9 @@ use uv_dispatch::BuildDispatch;
use uv_installer::SitePackages;
use uv_interpreter::PythonEnvironment;
use uv_resolver::{InMemoryIndex, Manifest, Options, Resolver};
use uv_types::{BuildIsolation, ConfigSettings, InFlight, NoBinary, NoBuild, SetupPyStrategy};
use uv_types::{
BuildIsolation, ConfigSettings, InFlight, NoBinary, NoBuild, RequiredHashes, SetupPyStrategy,
};
#[derive(ValueEnum, Default, Clone)]
pub(crate) enum ResolveCliFormat {
@ -72,6 +74,7 @@ pub(crate) async fn resolve_cli(args: ResolveCliArgs) -> Result<()> {
FlatIndex::from_entries(
entries,
venv.interpreter().tags()?,
&RequiredHashes::default(),
&no_build,
&NoBinary::None,
)
@ -98,7 +101,7 @@ pub(crate) async fn resolve_cli(args: ResolveCliArgs) -> Result<()> {
// Copied from `BuildDispatch`
let tags = venv.interpreter().tags()?;
let resolver = Resolver::new(
Manifest::simple(args.requirements.clone()),
Manifest::simple(args.requirements.clone(), RequiredHashes::default()),
Options::default(),
venv.interpreter().markers(),
venv.interpreter(),

View File

@ -22,7 +22,7 @@ use uv_interpreter::{Interpreter, PythonEnvironment};
use uv_resolver::{InMemoryIndex, Manifest, Options, Resolver};
use uv_types::{
BuildContext, BuildIsolation, BuildKind, ConfigSettings, EmptyInstalledPackages, InFlight,
NoBinary, NoBuild, Reinstall, SetupPyStrategy,
NoBinary, NoBuild, Reinstall, RequiredHashes, SetupPyStrategy,
};
/// The main implementation of [`BuildContext`], used by the CLI, see [`BuildContext`]
@ -137,7 +137,7 @@ impl<'a> BuildContext for BuildDispatch<'a> {
let markers = self.interpreter.markers();
let tags = self.interpreter.tags()?;
let resolver = Resolver::new(
Manifest::simple(requirements.to_vec()),
Manifest::simple(requirements.to_vec(), RequiredHashes::default()),
self.options,
markers,
self.interpreter,

View File

@ -7,7 +7,7 @@ use tracing::{instrument, Level};
use cache_key::CanonicalUrl;
use distribution_types::{FlatIndexLocation, IndexUrl};
use pep508_rs::{Requirement, RequirementsTxtRequirement};
use requirements_txt::{EditableRequirement, FindLink, RequirementsTxt};
use requirements_txt::{EditableRequirement, FindLink, RequirementEntry, RequirementsTxt};
use uv_client::BaseClientBuilder;
use uv_fs::Simplified;
use uv_normalize::{ExtraName, PackageName};
@ -20,6 +20,8 @@ use crate::{ExtrasSpecification, RequirementsSource};
pub struct RequirementsSpecification {
/// The name of the project specifying requirements.
pub project: Option<PackageName>,
/// The `requirements.txt` entries for the project.
pub entries: Vec<RequirementEntry>,
/// The requirements for the project.
pub requirements: Vec<RequirementsTxtRequirement>,
/// The constraints for the project.
@ -60,6 +62,7 @@ impl RequirementsSpecification {
.with_context(|| format!("Failed to parse `{name}`"))?;
Self {
project: None,
entries: vec![],
requirements: vec![requirement],
constraints: vec![],
overrides: vec![],
@ -79,6 +82,7 @@ impl RequirementsSpecification {
.with_context(|| format!("Failed to parse `{name}`"))?;
Self {
project: None,
entries: vec![],
requirements: vec![],
constraints: vec![],
overrides: vec![],
@ -98,6 +102,7 @@ impl RequirementsSpecification {
RequirementsTxt::parse(path, std::env::current_dir()?, client_builder).await?;
Self {
project: None,
entries: requirements_txt.requirements.clone(),
requirements: requirements_txt
.requirements
.into_iter()
@ -148,6 +153,7 @@ impl RequirementsSpecification {
{
Self {
project: Some(project.name),
entries: vec![],
requirements: project
.requirements
.into_iter()
@ -175,6 +181,7 @@ impl RequirementsSpecification {
})?;
Self {
project: None,
entries: vec![],
requirements: vec![],
constraints: vec![],
overrides: vec![],
@ -200,6 +207,7 @@ impl RequirementsSpecification {
})?;
Self {
project: None,
entries: vec![],
requirements: vec![],
constraints: vec![],
overrides: vec![],
@ -232,6 +240,7 @@ impl RequirementsSpecification {
// a requirements file can also add constraints.
for source in requirements {
let source = Self::from_source(source, extras, client_builder).await?;
spec.entries.extend(source.entries);
spec.requirements.extend(source.requirements);
spec.constraints.extend(source.constraints);
spec.overrides.extend(source.overrides);
@ -261,7 +270,8 @@ impl RequirementsSpecification {
spec.no_build.extend(source.no_build);
}
// Read all constraints, treating _everything_ as a constraint.
// Read all constraints, treating _everything_ as a constraint. The raw entries (i.e.,
// hashes) are ignored, as they are not relevant for constraints.
for source in constraints {
let source = Self::from_source(source, extras, client_builder).await?;
for requirement in source.requirements {
@ -311,6 +321,7 @@ impl RequirementsSpecification {
}
}
}
spec.entries.extend(source.entries);
spec.overrides.extend(source.constraints);
spec.overrides.extend(source.overrides);

View File

@ -0,0 +1,71 @@
use std::str::FromStr;
use rustc_hash::{FxHashMap, FxHashSet};
use pep508_rs::{MarkerEnvironment, RequirementsTxtRequirement};
use pypi_types::{HashError, Hashes};
use requirements_txt::RequirementEntry;
use uv_normalize::PackageName;
/// A set of package versions that are permitted, even if they're marked as yanked by the
/// relevant index.
#[derive(Debug, Default, Clone)]
pub struct RequiredHashes(FxHashMap<PackageName, FxHashSet<Hashes>>);
impl RequiredHashes {
/// Generate the [`RequiredHashes`] from a set of requirement entries.
pub fn from_entries(
entries: &[RequirementEntry],
markers: &MarkerEnvironment,
) -> Result<Self, RequiredHashesError> {
let mut allowed_hashes = FxHashMap::<PackageName, FxHashSet<Hashes>>::default();
// For each requirement, map from name to allowed hashes. We use the last entry for each
// package.
//
// For now, unnamed requirements are unsupported. This should be fine, since `--require-hashes`
// tends to be used after `pip-compile`, which will always output named requirements.
//
// TODO(charlie): Preserve hashes from `requirements.txt` through to this pass, so that we
// can iterate over requirements directly, rather than iterating over the entries.
for entry in entries
.iter()
.filter(|entry| entry.requirement.evaluate_markers(markers, &[]))
{
// Extract the requirement name.
let name = match &entry.requirement {
RequirementsTxtRequirement::Pep508(requirement) => requirement.name.clone(),
RequirementsTxtRequirement::Unnamed(_) => {
return Err(RequiredHashesError::UnnamedRequirement)
}
};
// Parse the hashes.
let hashes = entry
.hashes
.iter()
.map(|hash| Hashes::from_str(hash))
.collect::<Result<FxHashSet<_>, _>>()
.unwrap();
// TODO(charlie): Extract hashes from URL fragments.
allowed_hashes.insert(name, hashes);
}
Ok(Self(allowed_hashes))
}
/// Returns versions for the given package which are allowed even if marked as yanked by the
/// relevant index.
pub fn get(&self, package_name: &PackageName) -> Option<&FxHashSet<Hashes>> {
self.0.get(package_name)
}
}
#[derive(thiserror::Error, Debug)]
pub enum RequiredHashesError {
#[error(transparent)]
Hash(#[from] HashError),
#[error("Unnamed requirements are not supported in `--require-hashes`")]
UnnamedRequirement,
}

View File

@ -2,7 +2,7 @@ use distribution_types::LocalEditable;
use pep508_rs::{MarkerEnvironment, Requirement};
use pypi_types::Metadata23;
use uv_normalize::PackageName;
use uv_types::{Constraints, Overrides, RequestedRequirements};
use uv_types::{Constraints, Overrides, RequestedRequirements, RequiredHashes};
use crate::{preferences::Preference, Exclusions};
@ -34,6 +34,9 @@ pub struct Manifest {
/// direct requirements in their own right.
pub(crate) editables: Vec<(LocalEditable, Metadata23)>,
/// The required hashes for the project.
pub(crate) hashes: RequiredHashes,
/// The installed packages to exclude from consideration during resolution.
///
/// These typically represent packages that are being upgraded or reinstalled
@ -57,6 +60,7 @@ impl Manifest {
preferences: Vec<Preference>,
project: Option<PackageName>,
editables: Vec<(LocalEditable, Metadata23)>,
hashes: RequiredHashes,
exclusions: Exclusions,
lookaheads: Vec<RequestedRequirements>,
) -> Self {
@ -67,12 +71,13 @@ impl Manifest {
preferences,
project,
editables,
hashes,
exclusions,
lookaheads,
}
}
pub fn simple(requirements: Vec<Requirement>) -> Self {
pub fn simple(requirements: Vec<Requirement>, hashes: RequiredHashes) -> Self {
Self {
requirements,
constraints: Constraints::default(),
@ -80,6 +85,7 @@ impl Manifest {
preferences: Vec::new(),
project: None,
editables: Vec::new(),
hashes,
exclusions: Exclusions::default(),
lookaheads: Vec::new(),
}

View File

@ -60,7 +60,7 @@ pub struct ResolutionGraph {
}
impl ResolutionGraph {
/// Create a new graph from the resolved `PubGrub` state.
/// Create a new graph from the resolved `PubGrub` state.issi
#[allow(clippy::too_many_arguments)]
pub(crate) fn from_state(
selection: &SelectedDependencies<UvDependencyProvider>,

View File

@ -143,6 +143,7 @@ impl<
tags,
PythonRequirement::new(interpreter, markers),
AllowedYanks::from_manifest(&manifest, markers),
manifest.hashes.clone(),
options.exclude_newer,
build_context.no_binary(),
build_context.no_build(),
@ -757,11 +758,17 @@ impl<
for (package, version) in constraints.iter() {
debug!("Adding direct dependency: {package}{version}");
// STOPSHIP(charlie): If `--require-hashes` is enabled, fail if:
// - Any requirement is a VCS requirement. (But it's fine if it's already installed...)
// - Any requirement is a source tree. (But it's fine if it's already installed...)
// Emit a request to fetch the metadata for this package.
self.visit_package(package, priorities, request_sink)
.await?;
}
// STOPSHIP(charlie): If `--require-hashes` is enabled, fail if editables are provided.
// Add a dependency on each editable.
for (editable, metadata) in self.editables.iter() {
constraints.push(

View File

@ -9,7 +9,7 @@ use pypi_types::Metadata23;
use uv_client::{FlatIndex, RegistryClient};
use uv_distribution::DistributionDatabase;
use uv_normalize::PackageName;
use uv_types::{BuildContext, NoBinary, NoBuild};
use uv_types::{BuildContext, NoBinary, NoBuild, RequiredHashes};
use crate::python_requirement::PythonRequirement;
use crate::version_map::VersionMap;
@ -67,6 +67,7 @@ pub struct DefaultResolverProvider<'a, Context: BuildContext + Send + Sync> {
tags: Tags,
python_requirement: PythonRequirement,
allowed_yanks: AllowedYanks,
required_hashes: RequiredHashes,
exclude_newer: Option<DateTime<Utc>>,
no_binary: NoBinary,
no_build: NoBuild,
@ -82,6 +83,7 @@ impl<'a, Context: BuildContext + Send + Sync> DefaultResolverProvider<'a, Contex
tags: &'a Tags,
python_requirement: PythonRequirement,
allowed_yanks: AllowedYanks,
required_hashes: RequiredHashes,
exclude_newer: Option<DateTime<Utc>>,
no_binary: &'a NoBinary,
no_build: &'a NoBuild,
@ -93,6 +95,7 @@ impl<'a, Context: BuildContext + Send + Sync> DefaultResolverProvider<'a, Contex
tags: tags.clone(),
python_requirement,
allowed_yanks,
required_hashes,
exclude_newer,
no_binary: no_binary.clone(),
no_build: no_build.clone(),
@ -124,6 +127,7 @@ impl<'a, Context: BuildContext + Send + Sync> ResolverProvider
&self.tags,
&self.python_requirement,
&self.allowed_yanks,
&self.required_hashes,
self.exclude_newer.as_ref(),
self.flat_index.get(package_name).cloned(),
&self.no_binary,

View File

@ -2,8 +2,9 @@ use std::collections::btree_map::{BTreeMap, Entry};
use std::sync::OnceLock;
use chrono::{DateTime, Utc};
use rkyv::{de::deserializers::SharedDeserializeMap, Deserialize};
use rustc_hash::FxHashSet;
use tracing::{instrument, warn};
use tracing::instrument;
use distribution_filename::{DistFilename, WheelFilename};
use distribution_types::{
@ -13,10 +14,9 @@ use distribution_types::{
use pep440_rs::{Version, VersionSpecifiers};
use platform_tags::Tags;
use pypi_types::{Hashes, Yanked};
use rkyv::{de::deserializers::SharedDeserializeMap, Deserialize};
use uv_client::{FlatDistributions, OwnedArchive, SimpleMetadata, VersionFiles};
use uv_normalize::PackageName;
use uv_types::{NoBinary, NoBuild};
use uv_types::{NoBinary, NoBuild, RequiredHashes};
use uv_warnings::warn_user_once;
use crate::{python_requirement::PythonRequirement, yanks::AllowedYanks};
@ -46,6 +46,7 @@ impl VersionMap {
tags: &Tags,
python_requirement: &PythonRequirement,
allowed_yanks: &AllowedYanks,
required_hashes: &RequiredHashes,
exclude_newer: Option<&DateTime<Utc>>,
flat_index: Option<FlatDistributions>,
no_binary: &NoBinary,
@ -109,6 +110,10 @@ impl VersionMap {
.allowed_versions(package_name)
.cloned()
.unwrap_or_default();
let required_hashes = required_hashes
.get(package_name)
.cloned()
.unwrap_or_default();
Self {
inner: VersionMapInner::Lazy(VersionMapLazy {
map,
@ -120,6 +125,7 @@ impl VersionMap {
python_requirement: python_requirement.clone(),
exclude_newer: exclude_newer.copied(),
allowed_yanks,
required_hashes,
}),
}
}
@ -300,6 +306,8 @@ struct VersionMapLazy {
exclude_newer: Option<DateTime<Utc>>,
/// Which yanked versions are allowed
allowed_yanks: FxHashSet<Version>,
/// The hashes of allowed distributions.
required_hashes: FxHashSet<Hashes>,
}
impl VersionMapLazy {
@ -376,13 +384,14 @@ impl VersionMapLazy {
let version = filename.version().clone();
let requires_python = file.requires_python.clone();
let yanked = file.yanked.clone();
let hash = file.hashes.clone();
let hashes = file.hashes.clone();
match filename {
DistFilename::WheelFilename(filename) => {
let compatibility = self.wheel_compatibility(
&filename,
&version,
requires_python,
&hashes,
yanked,
excluded,
upload_time,
@ -392,12 +401,13 @@ impl VersionMapLazy {
file,
self.index.clone(),
);
priority_dist.insert_built(dist, Some(hash), compatibility);
priority_dist.insert_built(dist, Some(hashes), compatibility);
}
DistFilename::SourceDistFilename(filename) => {
let compatibility = self.source_dist_compatibility(
&version,
requires_python,
&hashes,
yanked,
excluded,
upload_time,
@ -407,7 +417,7 @@ impl VersionMapLazy {
file,
self.index.clone(),
);
priority_dist.insert_source(dist, Some(hash), compatibility);
priority_dist.insert_source(dist, Some(hashes), compatibility);
}
}
}
@ -424,6 +434,7 @@ impl VersionMapLazy {
&self,
version: &Version,
requires_python: Option<VersionSpecifiers>,
hashes: &Hashes,
yanked: Option<Yanked>,
excluded: bool,
upload_time: Option<i64>,
@ -440,6 +451,16 @@ impl VersionMapLazy {
));
}
// Check if hashes line up
if !self.required_hashes.is_empty() {
if hashes.is_empty() {
return SourceDistCompatibility::Incompatible(IncompatibleSource::MissingHash);
}
if !self.required_hashes.contains(hashes) {
return SourceDistCompatibility::Incompatible(IncompatibleSource::MismatchedHash);
}
}
// Check if yanked
if let Some(yanked) = yanked {
if yanked.is_yanked() && !self.allowed_yanks.contains(version) {
@ -468,6 +489,7 @@ impl VersionMapLazy {
filename: &WheelFilename,
version: &Version,
requires_python: Option<VersionSpecifiers>,
hashes: &Hashes,
yanked: Option<Yanked>,
excluded: bool,
upload_time: Option<i64>,
@ -482,6 +504,16 @@ impl VersionMapLazy {
return WheelCompatibility::Incompatible(IncompatibleWheel::ExcludeNewer(upload_time));
}
// Check if hashes line up
if !self.required_hashes.is_empty() {
if hashes.is_empty() {
return WheelCompatibility::Incompatible(IncompatibleWheel::MissingHash);
}
if !self.required_hashes.contains(hashes) {
return WheelCompatibility::Incompatible(IncompatibleWheel::MismatchedHash);
}
}
// Check if yanked
if let Some(yanked) = yanked {
if yanked.is_yanked() && !self.allowed_yanks.contains(version) {

View File

@ -18,7 +18,7 @@ use uv_client::{FlatIndex, RegistryClientBuilder};
use uv_interpreter::{find_default_python, Interpreter, PythonEnvironment};
use uv_resolver::{
DisplayResolutionGraph, Exclusions, InMemoryIndex, Manifest, Options, OptionsBuilder,
PreReleaseMode, Preference, ResolutionGraph, ResolutionMode, Resolver,
PreReleaseMode, Preference, RequiredHashes, ResolutionGraph, ResolutionMode, Resolver,
};
use uv_types::{
BuildContext, BuildIsolation, BuildKind, Constraints, EmptyInstalledPackages, NoBinary,
@ -276,6 +276,7 @@ async fn black_mypy_extensions() -> Result<()> {
vec![],
None,
vec![],
RequiredHashes::default(),
Exclusions::default(),
vec![],
);
@ -315,6 +316,7 @@ async fn black_mypy_extensions_extra() -> Result<()> {
vec![],
None,
vec![],
RequiredHashes::default(),
Exclusions::default(),
vec![],
);
@ -352,6 +354,7 @@ async fn black_flake8() -> Result<()> {
vec![],
None,
vec![],
RequiredHashes::default(),
Exclusions::default(),
vec![],
);
@ -443,6 +446,7 @@ async fn black_respect_preference() -> Result<()> {
)?)],
None,
vec![],
RequiredHashes::default(),
Exclusions::default(),
vec![],
);
@ -481,6 +485,7 @@ async fn black_ignore_preference() -> Result<()> {
)?)],
None,
vec![],
RequiredHashes::default(),
Exclusions::default(),
vec![],
);

View File

@ -16,6 +16,7 @@ workspace = true
distribution-types = { workspace = true }
once-map = { workspace = true }
pep508_rs = { workspace = true }
pypi-types = { workspace = true }
uv-cache = { workspace = true }
uv-interpreter = { workspace = true }
uv-normalize = { workspace = true }
@ -26,6 +27,7 @@ itertools = { workspace = true }
rustc-hash = { workspace = true }
serde = { workspace = true, optional = true }
serde_json = { workspace = true, optional = true }
thiserror = { workspace = true }
[features]
default = []

View File

@ -0,0 +1,62 @@
use std::str::FromStr;
use rustc_hash::{FxHashMap, FxHashSet};
use pep508_rs::{MarkerEnvironment, Requirement};
use pypi_types::{HashError, Hashes};
use uv_normalize::PackageName;
/// A set of package versions that are permitted, even if they're marked as yanked by the
/// relevant index.
#[derive(Debug, Default, Clone)]
pub struct RequiredHashes(FxHashMap<PackageName, FxHashSet<Hashes>>);
impl RequiredHashes {
/// Generate the [`RequiredHashes`] from a set of requirement entries.
pub fn from_requirements(
requirements: impl Iterator<Item = (Requirement, Vec<String>)>,
markers: &MarkerEnvironment,
) -> Result<Self, RequiredHashesError> {
let mut allowed_hashes = FxHashMap::<PackageName, FxHashSet<Hashes>>::default();
// For each requirement, map from name to allowed hashes. We use the last entry for each
// package.
//
// For now, unnamed requirements are unsupported. This should be fine, since `--require-hashes`
// tends to be used after `pip-compile`, which will always output named requirements.
//
// TODO(charlie): Preserve hashes from `requirements.txt` through to this pass, so that we
// can iterate over requirements directly, rather than iterating over the entries.
for (requirement, hashes) in requirements {
if !requirement.evaluate_markers(markers, &[]) {
continue;
}
// Parse the hashes.
let hashes = hashes
.iter()
.map(|hash| Hashes::from_str(hash))
.collect::<Result<FxHashSet<_>, _>>()
.unwrap();
// TODO(charlie): Extract hashes from URL fragments.
allowed_hashes.insert(requirement.name, hashes);
}
Ok(Self(allowed_hashes))
}
/// Returns versions for the given package which are allowed even if marked as yanked by the
/// relevant index.
pub fn get(&self, package_name: &PackageName) -> Option<&FxHashSet<Hashes>> {
self.0.get(package_name)
}
}
#[derive(thiserror::Error, Debug)]
pub enum RequiredHashesError {
#[error(transparent)]
Hash(#[from] HashError),
#[error("Unnamed requirements are not supported in `--require-hashes`")]
UnnamedRequirement,
}

View File

@ -3,6 +3,7 @@ pub use build_options::*;
pub use config_settings::*;
pub use constraints::*;
pub use downloads::*;
pub use hashes::*;
pub use name_specifiers::*;
pub use overrides::*;
pub use package_options::*;
@ -13,6 +14,7 @@ mod build_options;
mod config_settings;
mod constraints;
mod downloads;
mod hashes;
mod name_specifiers;
mod overrides;
mod package_options;

View File

@ -37,7 +37,7 @@ use uv_resolver::{
};
use uv_types::{
BuildIsolation, ConfigSettings, Constraints, EmptyInstalledPackages, InFlight, IndexStrategy,
NoBinary, NoBuild, Overrides, SetupPyStrategy, Upgrade,
NoBinary, NoBuild, Overrides, RequiredHashes, SetupPyStrategy, Upgrade,
};
use uv_warnings::warn_user;
@ -100,6 +100,7 @@ pub(crate) async fn pip_compile(
// Read all requirements from the provided sources.
let RequirementsSpecification {
project,
entries: _,
requirements,
constraints,
overrides,
@ -228,7 +229,13 @@ pub(crate) async fn pip_compile(
let flat_index = {
let client = FlatIndexClient::new(&client, &cache);
let entries = client.fetch(index_locations.flat_index()).await?;
FlatIndex::from_entries(entries, &tags, &no_build, &NoBinary::None)
FlatIndex::from_entries(
entries,
&tags,
&RequiredHashes::default(),
&no_build,
&NoBinary::None,
)
};
// Track in-flight downloads, builds, etc., across resolutions.
@ -369,7 +376,9 @@ pub(crate) async fn pip_compile(
preferences,
project,
editables,
// Do not consider any installed packages during compilation
// Do not require hashes during resolution.
RequiredHashes::default(),
// Do not consider any installed packages during resolution.
Exclusions::All,
lookaheads,
);

View File

@ -14,7 +14,7 @@ use distribution_types::{
LocalEditables, Name, Resolution,
};
use install_wheel_rs::linker::LinkMode;
use pep508_rs::{MarkerEnvironment, Requirement};
use pep508_rs::{MarkerEnvironment, Requirement, RequirementsTxtRequirement};
use platform_tags::Tags;
use pypi_types::{Metadata23, Yanked};
use requirements_txt::EditableRequirement;
@ -39,7 +39,7 @@ use uv_resolver::{
};
use uv_types::{
BuildIsolation, ConfigSettings, Constraints, InFlight, IndexStrategy, NoBinary, NoBuild,
Overrides, Reinstall, SetupPyStrategy, Upgrade,
Overrides, Reinstall, RequiredHashes, SetupPyStrategy, Upgrade,
};
use uv_warnings::warn_user;
@ -97,6 +97,7 @@ pub(crate) async fn pip_install(
// Read all requirements from the provided sources.
let RequirementsSpecification {
project,
entries,
requirements,
constraints,
overrides,
@ -188,6 +189,21 @@ pub(crate) async fn pip_install(
let tags = venv.interpreter().tags()?;
let markers = venv.interpreter().markers();
// Collect the set of required hashes.
let required_hashes = if require_hashes {
RequiredHashes::from_requirements(
entries
.into_iter()
.flat_map(|requirement| match requirement.requirement {
RequirementsTxtRequirement::Pep508(req) => Some((req, requirement.hashes)),
RequirementsTxtRequirement::Unnamed(_) => None,
}),
markers,
)?
} else {
RequiredHashes::default()
};
// Incorporate any index locations from the provided sources.
let index_locations =
index_locations.combine(index_url, extra_index_urls, find_links, no_index);
@ -212,7 +228,7 @@ pub(crate) async fn pip_install(
let flat_index = {
let client = FlatIndexClient::new(&client, &cache);
let entries = client.fetch(index_locations.flat_index()).await?;
FlatIndex::from_entries(entries, tags, &no_build, &no_binary)
FlatIndex::from_entries(entries, tags, &required_hashes, &no_build, &no_binary)
};
// Determine whether to enable build isolation.
@ -307,6 +323,7 @@ pub(crate) async fn pip_install(
overrides,
project,
&editables,
required_hashes,
&site_packages,
&reinstall,
&upgrade,
@ -508,6 +525,7 @@ async fn resolve(
overrides: Vec<Requirement>,
project: Option<PackageName>,
editables: &[BuiltEditable],
required_hashes: RequiredHashes,
site_packages: &SitePackages<'_>,
reinstall: &Reinstall,
upgrade: &Upgrade,
@ -570,6 +588,7 @@ async fn resolve(
preferences,
project,
editables,
required_hashes,
exclusions,
lookaheads,
);
@ -1011,6 +1030,9 @@ enum Error {
#[error(transparent)]
Platform(#[from] platform_tags::PlatformError),
#[error(transparent)]
RequiredHashes(#[from] uv_types::RequiredHashesError),
#[error(transparent)]
Io(#[from] std::io::Error),

View File

@ -9,6 +9,7 @@ use distribution_types::{
IndexLocations, InstalledMetadata, LocalDist, LocalEditable, LocalEditables, Name, ResolvedDist,
};
use install_wheel_rs::linker::LinkMode;
use pep508_rs::RequirementsTxtRequirement;
use platform_tags::Tags;
use pypi_types::Yanked;
use requirements_txt::EditableRequirement;
@ -29,7 +30,7 @@ use uv_requirements::{
use uv_resolver::{DependencyMode, InMemoryIndex, Manifest, OptionsBuilder, Resolver};
use uv_types::{
BuildIsolation, ConfigSettings, EmptyInstalledPackages, InFlight, IndexStrategy, NoBinary,
NoBuild, Reinstall, SetupPyStrategy,
NoBuild, Reinstall, RequiredHashes, SetupPyStrategy,
};
use uv_warnings::warn_user;
@ -76,6 +77,7 @@ pub(crate) async fn pip_sync(
// Read all requirements from the provided sources.
let RequirementsSpecification {
project: _,
entries,
requirements,
constraints: _,
overrides: _,
@ -135,6 +137,22 @@ pub(crate) async fn pip_sync(
// Determine the current environment markers.
let tags = venv.interpreter().tags()?;
let markers = venv.interpreter().markers();
// Collect the set of required hashes.
let required_hashes = if require_hashes {
RequiredHashes::from_requirements(
entries
.into_iter()
.flat_map(|requirement| match requirement.requirement {
RequirementsTxtRequirement::Pep508(req) => Some((req, requirement.hashes)),
RequirementsTxtRequirement::Unnamed(_) => None,
}),
markers,
)?
} else {
RequiredHashes::default()
};
// Incorporate any index locations from the provided sources.
let index_locations =
@ -160,7 +178,7 @@ pub(crate) async fn pip_sync(
let flat_index = {
let client = FlatIndexClient::new(&client, &cache);
let entries = client.fetch(index_locations.flat_index()).await?;
FlatIndex::from_entries(entries, tags, &no_build, &no_binary)
FlatIndex::from_entries(entries, tags, &required_hashes, &no_build, &no_binary)
};
// Create a shared in-memory index.
@ -301,7 +319,7 @@ pub(crate) async fn pip_sync(
// Run the resolver.
let resolver = Resolver::new(
Manifest::simple(remote),
Manifest::simple(remote, required_hashes),
options,
markers,
interpreter,

View File

@ -22,7 +22,7 @@ use uv_interpreter::{find_default_python, find_requested_python, Error};
use uv_resolver::{InMemoryIndex, OptionsBuilder};
use uv_types::{
BuildContext, BuildIsolation, ConfigSettings, InFlight, IndexStrategy, NoBinary, NoBuild,
SetupPyStrategy,
RequiredHashes, SetupPyStrategy,
};
use crate::commands::ExitStatus;
@ -169,7 +169,13 @@ async fn venv_impl(
.fetch(index_locations.flat_index())
.await
.map_err(VenvError::FlatIndex)?;
FlatIndex::from_entries(entries, tags, &NoBuild::All, &NoBinary::None)
FlatIndex::from_entries(
entries,
tags,
&RequiredHashes::default(),
&NoBuild::All,
&NoBinary::None,
)
};
// Create a shared in-memory index.

2
requirements.in Normal file
View File

@ -0,0 +1,2 @@
example-a==1.0.0

11
requirements.txt Normal file
View File

@ -0,0 +1,11 @@
#
# This file is autogenerated by pip-compile with Python 3.12
# by the following command:
#
# pip-compile --generate-hashes --index-url=http://localhost:8000/index/simple-html/ requirements.in
#
--index-url http://localhost:8000/index/simple-html/
example-a==1.0.0 \
--hash=sha256:105f52f5cb7b5a677b4810004ec487f6420fbee6a368038cf8cf8384de5be1939 \
--hash=sha256:2df9bbf9c4e7940190f11d70c9d6168880c66bb5a19a0d88de7c8eda233e38f6e