mirror of https://github.com/astral-sh/uv
Implement `--find-links` as flat indexes (directories in pip-compile) (#912)
Add directory `--find-links` support for local paths to pip-compile. It seems that pip joins all sources and then picks the best package. We explicitly give find links packages precedence if the same exists on an index and locally by prefilling the `VersionMap`, otherwise they are added as another index and the existing rules of precedence apply. Internally, the feature is called _flat index_, which is more meaningful than _find links_: We're not looking for links, we're picking up local directories, and (TBD) support another index format that's just a flat list of files instead of a nested index. `RegistryBuiltDist` and `RegistrySourceDist` now use `WheelFilename` and `SourceDistFilename` respectively. The `File` inside `RegistryBuiltDist` and `RegistrySourceDist` gained the ability to represent both a url and a path so that `--find-links` with a url and with a path works the same, both being locked as `<package_name>@<version>` instead of `<package_name> @ <url>`. (This is more of a detail, this PR in general still work if we strip that and have directory find links represented as `<package_name> @ file:///path/to/file.ext`) `PrioritizedDistribution` and `FlatIndex` have been moved to locations where we can use them in the upstack PR. I added a `scripts/wheels` directory with stripped down wheels to use for testing. We're lacking tests for correct tag priority precedence with flat indexes, i only confirmed this manually since it is not covered in the pip-compile or pip-sync output. Closes #876
This commit is contained in:
parent
5ffbfadf66
commit
e9b6b6fa36
|
|
@ -854,6 +854,7 @@ dependencies = [
|
|||
"once_cell",
|
||||
"pep440_rs 0.3.12",
|
||||
"pep508_rs",
|
||||
"platform-tags",
|
||||
"puffin-git",
|
||||
"puffin-normalize",
|
||||
"pypi-types",
|
||||
|
|
@ -2403,6 +2404,7 @@ dependencies = [
|
|||
"async_http_range_reader",
|
||||
"async_zip",
|
||||
"cache-key",
|
||||
"chrono",
|
||||
"distribution-filename",
|
||||
"distribution-types",
|
||||
"fs-err",
|
||||
|
|
@ -2414,6 +2416,7 @@ dependencies = [
|
|||
"install-wheel-rs",
|
||||
"pep440_rs 0.3.12",
|
||||
"pep508_rs",
|
||||
"platform-tags",
|
||||
"puffin-cache",
|
||||
"puffin-fs",
|
||||
"puffin-normalize",
|
||||
|
|
@ -2422,6 +2425,7 @@ dependencies = [
|
|||
"reqwest-middleware",
|
||||
"reqwest-retry",
|
||||
"rmp-serde",
|
||||
"rustc-hash",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"sha2",
|
||||
|
|
|
|||
|
|
@ -1,3 +1,6 @@
|
|||
use pep440_rs::Version;
|
||||
use puffin_normalize::PackageName;
|
||||
use std::fmt::{Display, Formatter};
|
||||
use std::str::FromStr;
|
||||
|
||||
pub use source_dist::{SourceDistExtension, SourceDistFilename, SourceDistFilenameError};
|
||||
|
|
@ -13,10 +16,8 @@ pub enum DistFilename {
|
|||
}
|
||||
|
||||
impl DistFilename {
|
||||
pub fn try_from_filename(
|
||||
filename: &str,
|
||||
package_name: &puffin_normalize::PackageName,
|
||||
) -> Option<Self> {
|
||||
/// Parse a filename as wheel or source dist name.
|
||||
pub fn try_from_filename(filename: &str, package_name: &PackageName) -> Option<Self> {
|
||||
if let Ok(filename) = WheelFilename::from_str(filename) {
|
||||
Some(Self::WheelFilename(filename))
|
||||
} else if let Ok(filename) = SourceDistFilename::parse(filename, package_name) {
|
||||
|
|
@ -25,4 +26,41 @@ impl DistFilename {
|
|||
None
|
||||
}
|
||||
}
|
||||
|
||||
/// Like [`DistFilename::try_from_normalized_filename`], but without knowing the package name.
|
||||
///
|
||||
/// Source dist filenames can be ambiguous, e.g. `a-1-1.tar.gz`. Without knowing the package name, we assume that
|
||||
/// source dist filename version doesn't contain minus (the version is normalized).
|
||||
pub fn try_from_normalized_filename(filename: &str) -> Option<Self> {
|
||||
if let Ok(filename) = WheelFilename::from_str(filename) {
|
||||
Some(Self::WheelFilename(filename))
|
||||
} else if let Ok(filename) = SourceDistFilename::parsed_normalized_filename(filename) {
|
||||
Some(Self::SourceDistFilename(filename))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
pub fn name(&self) -> &PackageName {
|
||||
match self {
|
||||
DistFilename::SourceDistFilename(filename) => &filename.name,
|
||||
DistFilename::WheelFilename(filename) => &filename.name,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn version(&self) -> &Version {
|
||||
match self {
|
||||
DistFilename::SourceDistFilename(filename) => &filename.version,
|
||||
DistFilename::WheelFilename(filename) => &filename.version,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Display for DistFilename {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
DistFilename::SourceDistFilename(filename) => Display::fmt(filename, f),
|
||||
DistFilename::WheelFilename(filename) => Display::fmt(filename, f),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -105,6 +105,43 @@ impl SourceDistFilename {
|
|||
extension,
|
||||
})
|
||||
}
|
||||
|
||||
/// Like [`SourceDistFilename::parse`], but without knowing the package name.
|
||||
///
|
||||
/// Source dist filenames can be ambiguous, e.g. `a-1-1.tar.gz`. Without knowing the package name, we assume that
|
||||
/// source dist filename version doesn't contain minus (the version is normalized).
|
||||
pub fn parsed_normalized_filename(filename: &str) -> Result<Self, SourceDistFilenameError> {
|
||||
let Some((stem, extension)) = SourceDistExtension::from_filename(filename) else {
|
||||
return Err(SourceDistFilenameError {
|
||||
filename: filename.to_string(),
|
||||
kind: SourceDistFilenameErrorKind::Extension,
|
||||
});
|
||||
};
|
||||
|
||||
let Some((package_name, version)) = stem.rsplit_once('-') else {
|
||||
return Err(SourceDistFilenameError {
|
||||
filename: filename.to_string(),
|
||||
kind: SourceDistFilenameErrorKind::Minus,
|
||||
});
|
||||
};
|
||||
let package_name =
|
||||
PackageName::from_str(package_name).map_err(|err| SourceDistFilenameError {
|
||||
filename: filename.to_string(),
|
||||
kind: SourceDistFilenameErrorKind::PackageName(err),
|
||||
})?;
|
||||
|
||||
// We checked the length above
|
||||
let version = Version::from_str(version).map_err(|err| SourceDistFilenameError {
|
||||
filename: filename.to_string(),
|
||||
kind: SourceDistFilenameErrorKind::Version(err),
|
||||
})?;
|
||||
|
||||
Ok(Self {
|
||||
name: package_name,
|
||||
version,
|
||||
extension,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl Display for SourceDistFilename {
|
||||
|
|
@ -139,13 +176,16 @@ enum SourceDistFilenameErrorKind {
|
|||
Version(#[from] VersionParseError),
|
||||
#[error(transparent)]
|
||||
PackageName(#[from] InvalidNameError),
|
||||
#[error("Missing name-version separator")]
|
||||
Minus,
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use puffin_normalize::PackageName;
|
||||
use std::str::FromStr;
|
||||
|
||||
use puffin_normalize::PackageName;
|
||||
|
||||
use crate::SourceDistFilename;
|
||||
|
||||
/// Only test already normalized names since the parsing is lossy
|
||||
|
|
|
|||
|
|
@ -14,9 +14,10 @@ workspace = true
|
|||
|
||||
[dependencies]
|
||||
cache-key = { path = "../cache-key" }
|
||||
distribution-filename = { path = "../distribution-filename" }
|
||||
distribution-filename = { path = "../distribution-filename", features = ["serde"] }
|
||||
pep440_rs = { path = "../pep440-rs" }
|
||||
pep508_rs = { path = "../pep508-rs" }
|
||||
platform-tags = { path = "../platform-tags" }
|
||||
puffin-git = { path = "../puffin-git" }
|
||||
puffin-normalize = { path = "../puffin-normalize" }
|
||||
pypi-types = { path = "../pypi-types" }
|
||||
|
|
|
|||
|
|
@ -1,9 +1,13 @@
|
|||
use std::fmt::{Display, Formatter};
|
||||
use std::path::PathBuf;
|
||||
|
||||
use chrono::{DateTime, Utc};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use thiserror::Error;
|
||||
use url::Url;
|
||||
|
||||
use pep440_rs::{VersionSpecifiers, VersionSpecifiersParseError};
|
||||
use pep508_rs::VerbatimUrl;
|
||||
use pypi_types::{BaseUrl, DistInfoMetadata, Hashes, Yanked};
|
||||
|
||||
/// Error converting [`pypi_types::File`] to [`distribution_type::File`].
|
||||
|
|
@ -24,7 +28,7 @@ pub struct File {
|
|||
pub requires_python: Option<VersionSpecifiers>,
|
||||
pub size: Option<u64>,
|
||||
pub upload_time: Option<DateTime<Utc>>,
|
||||
pub url: Url,
|
||||
pub url: FileLocation,
|
||||
pub yanked: Option<Yanked>,
|
||||
}
|
||||
|
||||
|
|
@ -38,10 +42,29 @@ impl File {
|
|||
requires_python: file.requires_python.transpose()?,
|
||||
size: file.size,
|
||||
upload_time: file.upload_time,
|
||||
url: base
|
||||
.join_relative(&file.url)
|
||||
.map_err(|err| FileConversionError::Url(file.url.clone(), err))?,
|
||||
url: FileLocation::Url(
|
||||
base.join_relative(&file.url)
|
||||
.map_err(|err| FileConversionError::Url(file.url.clone(), err))?,
|
||||
),
|
||||
yanked: file.yanked,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/// While a registry file is generally a remote URL, it can also be a file if it comes from a directory flat indexes.
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub enum FileLocation {
|
||||
/// URL relative to base
|
||||
Url(Url),
|
||||
/// Absolute path to file
|
||||
Path(PathBuf, VerbatimUrl),
|
||||
}
|
||||
|
||||
impl Display for FileLocation {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
FileLocation::Url(url) => Display::fmt(url, f),
|
||||
FileLocation::Path(path, _url) => Display::fmt(&path.display(), f),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
use std::iter::Chain;
|
||||
use std::ops::Deref;
|
||||
use std::path::PathBuf;
|
||||
use std::str::FromStr;
|
||||
|
||||
use once_cell::sync::Lazy;
|
||||
|
|
@ -53,6 +53,44 @@ impl Deref for IndexUrl {
|
|||
}
|
||||
}
|
||||
|
||||
/// A directory with distributions or a URL to an HTML file with a flat listing of distributions.
|
||||
///
|
||||
/// Also known as `--find-links`.
|
||||
#[derive(Debug, Clone, Hash, Eq, PartialEq, Serialize, Deserialize)]
|
||||
pub enum FlatIndexLocation {
|
||||
Path(PathBuf),
|
||||
Url(Url),
|
||||
}
|
||||
|
||||
impl FromStr for FlatIndexLocation {
|
||||
type Err = FlatIndexError;
|
||||
|
||||
fn from_str(location: &str) -> Result<Self, Self::Err> {
|
||||
if location.contains("://") {
|
||||
let url =
|
||||
Url::parse(location).map_err(|err| FlatIndexError::Url(location.into(), err))?;
|
||||
if url.scheme() == "file" {
|
||||
match url.to_file_path() {
|
||||
Ok(path_buf) => Ok(Self::Path(path_buf)),
|
||||
Err(()) => Err(FlatIndexError::FilePath(url)),
|
||||
}
|
||||
} else {
|
||||
Ok(Self::Url(url))
|
||||
}
|
||||
} else {
|
||||
Ok(Self::Path(PathBuf::from(location)))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
pub enum FlatIndexError {
|
||||
#[error("Invalid file location URL: {0}")]
|
||||
Url(String, #[source] url::ParseError),
|
||||
#[error("Invalid `file://` path in URL: {0}")]
|
||||
FilePath(Url),
|
||||
}
|
||||
|
||||
/// The index URLs to use for fetching packages.
|
||||
///
|
||||
/// "pip treats all package sources equally" (<https://github.com/pypa/pip/issues/8606#issuecomment-788754817>),
|
||||
|
|
@ -60,34 +98,45 @@ impl Deref for IndexUrl {
|
|||
///
|
||||
/// If the fields are none and empty, ignore the package index, instead rely on local archives and
|
||||
/// caches.
|
||||
///
|
||||
/// From a pip perspective, this type merges `--index-url`, `--extra-index-url`, and `--find-links`.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct IndexUrls {
|
||||
pub index: Option<IndexUrl>,
|
||||
pub extra_index: Vec<IndexUrl>,
|
||||
pub struct IndexLocations {
|
||||
index: Option<IndexUrl>,
|
||||
extra_index: Vec<IndexUrl>,
|
||||
flat_index: Vec<FlatIndexLocation>,
|
||||
}
|
||||
|
||||
impl Default for IndexUrls {
|
||||
impl Default for IndexLocations {
|
||||
/// Just pypi
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
index: Some(IndexUrl::Pypi),
|
||||
extra_index: Vec::new(),
|
||||
flat_index: Vec::new(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl IndexUrls {
|
||||
impl IndexLocations {
|
||||
/// Determine the index URLs to use for fetching packages.
|
||||
pub fn from_args(index: IndexUrl, extra_index: Vec<IndexUrl>, no_index: bool) -> Self {
|
||||
pub fn from_args(
|
||||
index: IndexUrl,
|
||||
extra_index: Vec<IndexUrl>,
|
||||
flat_index: Vec<FlatIndexLocation>,
|
||||
no_index: bool,
|
||||
) -> Self {
|
||||
if no_index {
|
||||
Self {
|
||||
index: None,
|
||||
extra_index: Vec::new(),
|
||||
flat_index: Vec::new(),
|
||||
}
|
||||
} else {
|
||||
Self {
|
||||
index: Some(index),
|
||||
extra_index,
|
||||
flat_index,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -97,19 +146,12 @@ impl IndexUrls {
|
|||
}
|
||||
}
|
||||
|
||||
impl<'a> IntoIterator for &'a IndexUrls {
|
||||
type Item = &'a IndexUrl;
|
||||
type IntoIter = Chain<std::option::Iter<'a, IndexUrl>, std::slice::Iter<'a, IndexUrl>>;
|
||||
|
||||
fn into_iter(self) -> Self::IntoIter {
|
||||
impl<'a> IndexLocations {
|
||||
pub fn indexes(&'a self) -> impl Iterator<Item = &'a IndexUrl> + 'a {
|
||||
self.index.iter().chain(self.extra_index.iter())
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> IndexUrls {
|
||||
pub fn iter(
|
||||
&'a self,
|
||||
) -> Chain<std::option::Iter<'a, IndexUrl>, std::slice::Iter<'a, IndexUrl>> {
|
||||
self.into_iter()
|
||||
pub fn flat_indexes(&'a self) -> impl Iterator<Item = &'a FlatIndexLocation> + 'a {
|
||||
self.flat_index.iter()
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -25,8 +25,6 @@
|
|||
//! * [`CachedRegistryDist`]
|
||||
//! * [`CachedDirectUrlDist`]
|
||||
//!
|
||||
//! TODO(konstin): Track all kinds from [`Dist`].
|
||||
//!
|
||||
//! ## `InstalledDist`
|
||||
//! An [`InstalledDist`] is built distribution (wheel) that is installed in a virtual environment,
|
||||
//! with the two possible origins we currently track:
|
||||
|
|
@ -34,8 +32,6 @@
|
|||
//! * [`InstalledDirectUrlDist`]
|
||||
//!
|
||||
//! Since we read this information from [`direct_url.json`](https://packaging.python.org/en/latest/specifications/direct-url-data-structure/), it doesn't match the information [`Dist`] exactly.
|
||||
//!
|
||||
//! TODO(konstin): Track all kinds from [`Dist`].
|
||||
use std::borrow::Cow;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::str::FromStr;
|
||||
|
|
@ -43,7 +39,7 @@ use std::str::FromStr;
|
|||
use anyhow::Result;
|
||||
use url::Url;
|
||||
|
||||
use distribution_filename::WheelFilename;
|
||||
use distribution_filename::{DistFilename, SourceDistFilename, WheelFilename};
|
||||
use pep440_rs::Version;
|
||||
use pep508_rs::VerbatimUrl;
|
||||
use puffin_normalize::PackageName;
|
||||
|
|
@ -58,6 +54,7 @@ pub use crate::file::*;
|
|||
pub use crate::id::*;
|
||||
pub use crate::index_url::*;
|
||||
pub use crate::installed::*;
|
||||
pub use crate::prioritized_distribution::*;
|
||||
pub use crate::resolution::*;
|
||||
pub use crate::traits::*;
|
||||
|
||||
|
|
@ -70,6 +67,7 @@ mod file;
|
|||
mod id;
|
||||
mod index_url;
|
||||
mod installed;
|
||||
mod prioritized_distribution;
|
||||
mod resolution;
|
||||
mod traits;
|
||||
|
||||
|
|
@ -148,8 +146,7 @@ pub enum SourceDist {
|
|||
/// A built distribution (wheel) that exists in a registry, like `PyPI`.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct RegistryBuiltDist {
|
||||
pub name: PackageName,
|
||||
pub version: Version,
|
||||
pub filename: WheelFilename,
|
||||
pub file: File,
|
||||
pub index: IndexUrl,
|
||||
}
|
||||
|
|
@ -174,8 +171,7 @@ pub struct PathBuiltDist {
|
|||
/// A source distribution that exists in a registry, like `PyPI`.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct RegistrySourceDist {
|
||||
pub name: PackageName,
|
||||
pub version: Version,
|
||||
pub filename: SourceDistFilename,
|
||||
pub file: File,
|
||||
pub index: IndexUrl,
|
||||
}
|
||||
|
|
@ -207,24 +203,22 @@ pub struct PathSourceDist {
|
|||
|
||||
impl Dist {
|
||||
/// Create a [`Dist`] for a registry-based distribution.
|
||||
pub fn from_registry(name: PackageName, version: Version, file: File, index: IndexUrl) -> Self {
|
||||
if Path::new(&file.filename)
|
||||
.extension()
|
||||
.is_some_and(|ext| ext.eq_ignore_ascii_case("whl"))
|
||||
{
|
||||
Self::Built(BuiltDist::Registry(RegistryBuiltDist {
|
||||
name,
|
||||
version,
|
||||
file,
|
||||
index,
|
||||
}))
|
||||
} else {
|
||||
Self::Source(SourceDist::Registry(RegistrySourceDist {
|
||||
name,
|
||||
version,
|
||||
file,
|
||||
index,
|
||||
}))
|
||||
pub fn from_registry(filename: DistFilename, file: File, index: IndexUrl) -> Self {
|
||||
match filename {
|
||||
DistFilename::WheelFilename(filename) => {
|
||||
Self::Built(BuiltDist::Registry(RegistryBuiltDist {
|
||||
filename,
|
||||
file,
|
||||
index,
|
||||
}))
|
||||
}
|
||||
DistFilename::SourceDistFilename(filename) => {
|
||||
Self::Source(SourceDist::Registry(RegistrySourceDist {
|
||||
filename,
|
||||
file,
|
||||
index,
|
||||
}))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -305,6 +299,13 @@ impl Dist {
|
|||
Dist::Source(source) => source.file(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn version(&self) -> Option<&Version> {
|
||||
match self {
|
||||
Dist::Built(wheel) => Some(wheel.version()),
|
||||
Dist::Source(source_dist) => source_dist.version(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl BuiltDist {
|
||||
|
|
@ -315,6 +316,14 @@ impl BuiltDist {
|
|||
BuiltDist::DirectUrl(_) | BuiltDist::Path(_) => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn version(&self) -> &Version {
|
||||
match self {
|
||||
BuiltDist::Registry(wheel) => &wheel.filename.version,
|
||||
BuiltDist::DirectUrl(wheel) => &wheel.filename.version,
|
||||
BuiltDist::Path(wheel) => &wheel.filename.version,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl SourceDist {
|
||||
|
|
@ -326,6 +335,13 @@ impl SourceDist {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn version(&self) -> Option<&Version> {
|
||||
match self {
|
||||
SourceDist::Registry(source_dist) => Some(&source_dist.filename.version),
|
||||
SourceDist::DirectUrl(_) | SourceDist::Git(_) | SourceDist::Path(_) => None,
|
||||
}
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub fn with_url(self, url: Url) -> Self {
|
||||
match self {
|
||||
|
|
@ -348,7 +364,7 @@ impl SourceDist {
|
|||
|
||||
impl Name for RegistryBuiltDist {
|
||||
fn name(&self) -> &PackageName {
|
||||
&self.name
|
||||
&self.filename.name
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -366,7 +382,7 @@ impl Name for PathBuiltDist {
|
|||
|
||||
impl Name for RegistrySourceDist {
|
||||
fn name(&self) -> &PackageName {
|
||||
&self.name
|
||||
&self.filename.name
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -420,7 +436,7 @@ impl Name for Dist {
|
|||
|
||||
impl DistributionMetadata for RegistryBuiltDist {
|
||||
fn version_or_url(&self) -> VersionOrUrl {
|
||||
VersionOrUrl::Version(&self.version)
|
||||
VersionOrUrl::Version(&self.filename.version)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -438,7 +454,7 @@ impl DistributionMetadata for PathBuiltDist {
|
|||
|
||||
impl DistributionMetadata for RegistrySourceDist {
|
||||
fn version_or_url(&self) -> VersionOrUrl {
|
||||
VersionOrUrl::Version(&self.version)
|
||||
VersionOrUrl::Version(&self.filename.version)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -678,6 +694,22 @@ impl Identifier for Path {
|
|||
}
|
||||
}
|
||||
|
||||
impl Identifier for FileLocation {
|
||||
fn distribution_id(&self) -> DistributionId {
|
||||
match self {
|
||||
FileLocation::Url(url) => url.distribution_id(),
|
||||
FileLocation::Path(path, _) => path.distribution_id(),
|
||||
}
|
||||
}
|
||||
|
||||
fn resource_id(&self) -> ResourceId {
|
||||
match self {
|
||||
FileLocation::Url(url) => url.resource_id(),
|
||||
FileLocation::Path(path, _) => path.resource_id(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Identifier for RegistryBuiltDist {
|
||||
fn distribution_id(&self) -> DistributionId {
|
||||
self.file.distribution_id()
|
||||
|
|
|
|||
|
|
@ -0,0 +1,207 @@
|
|||
use pep440_rs::VersionSpecifiers;
|
||||
use platform_tags::TagPriority;
|
||||
use pypi_types::Hashes;
|
||||
|
||||
use crate::Dist;
|
||||
|
||||
/// Attach its requires-python to a [`Dist`], since downstream needs this information to filter
|
||||
/// [`PrioritizedDistribution`].
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct DistRequiresPython {
|
||||
pub dist: Dist,
|
||||
pub requires_python: Option<VersionSpecifiers>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct PrioritizedDistribution {
|
||||
/// An arbitrary source distribution for the package version.
|
||||
source: Option<DistRequiresPython>,
|
||||
/// The highest-priority, platform-compatible wheel for the package version.
|
||||
compatible_wheel: Option<(DistRequiresPython, TagPriority)>,
|
||||
/// An arbitrary, platform-incompatible wheel for the package version.
|
||||
incompatible_wheel: Option<DistRequiresPython>,
|
||||
/// The hashes for each distribution.
|
||||
hashes: Vec<Hashes>,
|
||||
}
|
||||
|
||||
impl PrioritizedDistribution {
|
||||
/// Create a new [`PrioritizedDistribution`] from the given wheel distribution.
|
||||
pub fn from_built(
|
||||
dist: Dist,
|
||||
requires_python: Option<VersionSpecifiers>,
|
||||
hash: Option<Hashes>,
|
||||
priority: Option<TagPriority>,
|
||||
) -> Self {
|
||||
if let Some(priority) = priority {
|
||||
Self {
|
||||
source: None,
|
||||
compatible_wheel: Some((
|
||||
DistRequiresPython {
|
||||
dist,
|
||||
|
||||
requires_python,
|
||||
},
|
||||
priority,
|
||||
)),
|
||||
incompatible_wheel: None,
|
||||
hashes: hash.map(|hash| vec![hash]).unwrap_or_default(),
|
||||
}
|
||||
} else {
|
||||
Self {
|
||||
source: None,
|
||||
compatible_wheel: None,
|
||||
incompatible_wheel: Some(DistRequiresPython {
|
||||
dist,
|
||||
requires_python,
|
||||
}),
|
||||
hashes: hash.map(|hash| vec![hash]).unwrap_or_default(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Create a new [`PrioritizedDistribution`] from the given source distribution.
|
||||
pub fn from_source(
|
||||
dist: Dist,
|
||||
requires_python: Option<VersionSpecifiers>,
|
||||
hash: Option<Hashes>,
|
||||
) -> Self {
|
||||
Self {
|
||||
source: Some(DistRequiresPython {
|
||||
dist,
|
||||
requires_python,
|
||||
}),
|
||||
compatible_wheel: None,
|
||||
incompatible_wheel: None,
|
||||
hashes: hash.map(|hash| vec![hash]).unwrap_or_default(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Insert the given built distribution into the [`PrioritizedDistribution`].
|
||||
pub fn insert_built(
|
||||
&mut self,
|
||||
dist: Dist,
|
||||
requires_python: Option<VersionSpecifiers>,
|
||||
hash: Option<Hashes>,
|
||||
priority: Option<TagPriority>,
|
||||
) {
|
||||
// Prefer the highest-priority, platform-compatible wheel.
|
||||
if let Some(priority) = priority {
|
||||
if let Some((.., existing_priority)) = &self.compatible_wheel {
|
||||
if priority > *existing_priority {
|
||||
self.compatible_wheel = Some((
|
||||
DistRequiresPython {
|
||||
dist,
|
||||
requires_python,
|
||||
},
|
||||
priority,
|
||||
));
|
||||
}
|
||||
} else {
|
||||
self.compatible_wheel = Some((
|
||||
DistRequiresPython {
|
||||
dist,
|
||||
requires_python,
|
||||
},
|
||||
priority,
|
||||
));
|
||||
}
|
||||
} else if self.incompatible_wheel.is_none() {
|
||||
self.incompatible_wheel = Some(DistRequiresPython {
|
||||
dist,
|
||||
requires_python,
|
||||
});
|
||||
}
|
||||
|
||||
if let Some(hash) = hash {
|
||||
self.hashes.push(hash);
|
||||
}
|
||||
}
|
||||
|
||||
/// Insert the given source distribution into the [`PrioritizedDistribution`].
|
||||
pub fn insert_source(
|
||||
&mut self,
|
||||
dist: Dist,
|
||||
requires_python: Option<VersionSpecifiers>,
|
||||
hash: Option<Hashes>,
|
||||
) {
|
||||
if self.source.is_none() {
|
||||
self.source = Some(DistRequiresPython {
|
||||
dist,
|
||||
requires_python,
|
||||
});
|
||||
}
|
||||
|
||||
if let Some(hash) = hash {
|
||||
self.hashes.push(hash);
|
||||
}
|
||||
}
|
||||
|
||||
/// Return the highest-priority distribution for the package version, if any.
|
||||
pub fn get(&self) -> Option<ResolvableDist> {
|
||||
match (
|
||||
&self.compatible_wheel,
|
||||
&self.source,
|
||||
&self.incompatible_wheel,
|
||||
) {
|
||||
// Prefer the highest-priority, platform-compatible wheel.
|
||||
(Some((wheel, tag_priority)), _, _) => {
|
||||
Some(ResolvableDist::CompatibleWheel(wheel, *tag_priority))
|
||||
}
|
||||
// If we have a compatible source distribution and an incompatible wheel, return the
|
||||
// wheel. We assume that all distributions have the same metadata for a given package
|
||||
// version. If a compatible source distribution exists, we assume we can build it, but
|
||||
// using the wheel is faster.
|
||||
(_, Some(source_dist), Some(wheel)) => {
|
||||
Some(ResolvableDist::IncompatibleWheel { source_dist, wheel })
|
||||
}
|
||||
// Otherwise, if we have a source distribution, return it.
|
||||
(_, Some(source_dist), _) => Some(ResolvableDist::SourceDist(source_dist)),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Return the hashes for each distribution.
|
||||
pub fn hashes(&self) -> &[Hashes] {
|
||||
&self.hashes
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum ResolvableDist<'a> {
|
||||
/// The distribution should be resolved and installed using a source distribution.
|
||||
SourceDist(&'a DistRequiresPython),
|
||||
/// The distribution should be resolved and installed using a wheel distribution.
|
||||
CompatibleWheel(&'a DistRequiresPython, TagPriority),
|
||||
/// The distribution should be resolved using an incompatible wheel distribution, but
|
||||
/// installed using a source distribution.
|
||||
IncompatibleWheel {
|
||||
source_dist: &'a DistRequiresPython,
|
||||
wheel: &'a DistRequiresPython,
|
||||
},
|
||||
}
|
||||
|
||||
impl<'a> ResolvableDist<'a> {
|
||||
/// Return the [`DistRequiresPython`] to use during resolution.
|
||||
pub fn resolve(&self) -> &DistRequiresPython {
|
||||
match *self {
|
||||
ResolvableDist::SourceDist(sdist) => sdist,
|
||||
ResolvableDist::CompatibleWheel(wheel, _) => wheel,
|
||||
ResolvableDist::IncompatibleWheel {
|
||||
source_dist: _,
|
||||
wheel,
|
||||
} => wheel,
|
||||
}
|
||||
}
|
||||
|
||||
/// Return the [`DistRequiresPython`] to use during installation.
|
||||
pub fn install(&self) -> &DistRequiresPython {
|
||||
match *self {
|
||||
ResolvableDist::SourceDist(sdist) => sdist,
|
||||
ResolvableDist::CompatibleWheel(wheel, _) => wheel,
|
||||
ResolvableDist::IncompatibleWheel {
|
||||
source_dist,
|
||||
wheel: _,
|
||||
} => source_dist,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -65,11 +65,11 @@ impl From<Dist> for Requirement {
|
|||
fn from(dist: Dist) -> Self {
|
||||
match dist {
|
||||
Dist::Built(BuiltDist::Registry(wheel)) => Requirement {
|
||||
name: wheel.name,
|
||||
name: wheel.filename.name,
|
||||
extras: None,
|
||||
version_or_url: Some(pep508_rs::VersionOrUrl::VersionSpecifier(
|
||||
pep440_rs::VersionSpecifiers::from(
|
||||
pep440_rs::VersionSpecifier::equals_version(wheel.version),
|
||||
pep440_rs::VersionSpecifier::equals_version(wheel.filename.version),
|
||||
),
|
||||
)),
|
||||
marker: None,
|
||||
|
|
@ -87,11 +87,11 @@ impl From<Dist> for Requirement {
|
|||
marker: None,
|
||||
},
|
||||
Dist::Source(SourceDist::Registry(sdist)) => Requirement {
|
||||
name: sdist.name,
|
||||
name: sdist.filename.name,
|
||||
extras: None,
|
||||
version_or_url: Some(pep508_rs::VersionOrUrl::VersionSpecifier(
|
||||
pep440_rs::VersionSpecifiers::from(
|
||||
pep440_rs::VersionSpecifier::equals_version(sdist.version),
|
||||
pep440_rs::VersionSpecifier::equals_version(sdist.filename.version),
|
||||
),
|
||||
)),
|
||||
marker: None,
|
||||
|
|
|
|||
|
|
@ -4,11 +4,13 @@ use std::path::Path;
|
|||
|
||||
use once_cell::sync::Lazy;
|
||||
use regex::Regex;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use url::Url;
|
||||
|
||||
/// A wrapper around [`Url`] that preserves the original string.
|
||||
#[derive(Debug, Clone, Eq, derivative::Derivative)]
|
||||
#[derivative(PartialEq, Hash)]
|
||||
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
|
||||
pub struct VerbatimUrl {
|
||||
/// The parsed URL.
|
||||
url: Url,
|
||||
|
|
|
|||
|
|
@ -13,7 +13,7 @@ use owo_colors::OwoColorize;
|
|||
use tempfile::tempdir_in;
|
||||
use tracing::debug;
|
||||
|
||||
use distribution_types::{IndexUrls, LocalEditable};
|
||||
use distribution_types::{IndexLocations, LocalEditable};
|
||||
use pep508_rs::Requirement;
|
||||
use platform_host::Platform;
|
||||
use platform_tags::Tags;
|
||||
|
|
@ -48,7 +48,7 @@ pub(crate) async fn pip_compile(
|
|||
prerelease_mode: PreReleaseMode,
|
||||
upgrade_mode: UpgradeMode,
|
||||
generate_hashes: bool,
|
||||
index_urls: IndexUrls,
|
||||
index_locations: IndexLocations,
|
||||
setup_py: SetupPyStrategy,
|
||||
no_build: bool,
|
||||
python_version: Option<PythonVersion>,
|
||||
|
|
@ -144,7 +144,7 @@ pub(crate) async fn pip_compile(
|
|||
|
||||
// Instantiate a client.
|
||||
let client = RegistryClientBuilder::new(cache.clone())
|
||||
.index_urls(index_urls.clone())
|
||||
.index_locations(index_locations.clone())
|
||||
.build();
|
||||
|
||||
let options = ResolutionOptions::new(resolution_mode, prerelease_mode, exclude_newer);
|
||||
|
|
@ -152,7 +152,7 @@ pub(crate) async fn pip_compile(
|
|||
&client,
|
||||
&cache,
|
||||
&interpreter,
|
||||
&index_urls,
|
||||
&index_locations,
|
||||
interpreter.sys_executable().to_path_buf(),
|
||||
setup_py,
|
||||
no_build,
|
||||
|
|
@ -228,7 +228,7 @@ pub(crate) async fn pip_compile(
|
|||
&tags,
|
||||
&client,
|
||||
&build_dispatch,
|
||||
)
|
||||
)?
|
||||
.with_reporter(ResolverReporter::from(printer));
|
||||
let resolution = match resolver.resolve().await {
|
||||
Err(puffin_resolver::ResolveError::NoSolution(err)) => {
|
||||
|
|
|
|||
|
|
@ -11,7 +11,7 @@ use tempfile::tempdir_in;
|
|||
use tracing::debug;
|
||||
|
||||
use distribution_types::{
|
||||
IndexUrls, InstalledMetadata, LocalDist, LocalEditable, Name, Resolution,
|
||||
IndexLocations, InstalledMetadata, LocalDist, LocalEditable, Name, Resolution,
|
||||
};
|
||||
use install_wheel_rs::linker::LinkMode;
|
||||
use pep508_rs::{MarkerEnvironment, Requirement};
|
||||
|
|
@ -45,7 +45,7 @@ pub(crate) async fn pip_install(
|
|||
extras: &ExtrasSpecification<'_>,
|
||||
resolution_mode: ResolutionMode,
|
||||
prerelease_mode: PreReleaseMode,
|
||||
index_urls: IndexUrls,
|
||||
index_locations: IndexLocations,
|
||||
reinstall: &Reinstall,
|
||||
link_mode: LinkMode,
|
||||
setup_py: SetupPyStrategy,
|
||||
|
|
@ -134,7 +134,7 @@ pub(crate) async fn pip_install(
|
|||
|
||||
// Instantiate a client.
|
||||
let client = RegistryClientBuilder::new(cache.clone())
|
||||
.index_urls(index_urls.clone())
|
||||
.index_locations(index_locations.clone())
|
||||
.build();
|
||||
|
||||
let options = ResolutionOptions::new(resolution_mode, prerelease_mode, exclude_newer);
|
||||
|
|
@ -143,7 +143,7 @@ pub(crate) async fn pip_install(
|
|||
&client,
|
||||
&cache,
|
||||
&interpreter,
|
||||
&index_urls,
|
||||
&index_locations,
|
||||
venv.python_executable(),
|
||||
setup_py,
|
||||
no_build,
|
||||
|
|
@ -209,7 +209,7 @@ pub(crate) async fn pip_install(
|
|||
site_packages,
|
||||
reinstall,
|
||||
link_mode,
|
||||
&index_urls,
|
||||
&index_locations,
|
||||
tags,
|
||||
&client,
|
||||
&build_dispatch,
|
||||
|
|
@ -379,7 +379,7 @@ async fn resolve(
|
|||
tags,
|
||||
client,
|
||||
build_dispatch,
|
||||
)
|
||||
)?
|
||||
.with_reporter(ResolverReporter::from(printer));
|
||||
let resolution = resolver.resolve().await?;
|
||||
|
||||
|
|
@ -406,7 +406,7 @@ async fn install(
|
|||
site_packages: SitePackages<'_>,
|
||||
reinstall: &Reinstall,
|
||||
link_mode: LinkMode,
|
||||
index_urls: &IndexUrls,
|
||||
index_urls: &IndexLocations,
|
||||
tags: &Tags,
|
||||
client: &RegistryClient,
|
||||
build_dispatch: &BuildDispatch<'_>,
|
||||
|
|
@ -603,6 +603,9 @@ enum Error {
|
|||
#[error(transparent)]
|
||||
Resolve(#[from] puffin_resolver::ResolveError),
|
||||
|
||||
#[error(transparent)]
|
||||
Client(#[from] puffin_client::Error),
|
||||
|
||||
#[error(transparent)]
|
||||
Platform(#[from] platform_host::PlatformError),
|
||||
|
||||
|
|
|
|||
|
|
@ -5,7 +5,7 @@ use itertools::Itertools;
|
|||
use owo_colors::OwoColorize;
|
||||
use tracing::debug;
|
||||
|
||||
use distribution_types::{IndexUrls, InstalledMetadata, LocalDist, LocalEditable, Name};
|
||||
use distribution_types::{IndexLocations, InstalledMetadata, LocalDist, LocalEditable, Name};
|
||||
use install_wheel_rs::linker::LinkMode;
|
||||
use platform_host::Platform;
|
||||
use platform_tags::Tags;
|
||||
|
|
@ -29,7 +29,7 @@ pub(crate) async fn pip_sync(
|
|||
sources: &[RequirementsSource],
|
||||
reinstall: &Reinstall,
|
||||
link_mode: LinkMode,
|
||||
index_urls: IndexUrls,
|
||||
index_locations: IndexLocations,
|
||||
setup_py: SetupPyStrategy,
|
||||
no_build: bool,
|
||||
strict: bool,
|
||||
|
|
@ -60,7 +60,7 @@ pub(crate) async fn pip_sync(
|
|||
|
||||
// Prep the registry client.
|
||||
let client = RegistryClientBuilder::new(cache.clone())
|
||||
.index_urls(index_urls.clone())
|
||||
.index_locations(index_locations.clone())
|
||||
.build();
|
||||
|
||||
// Prep the build context.
|
||||
|
|
@ -68,7 +68,7 @@ pub(crate) async fn pip_sync(
|
|||
&client,
|
||||
&cache,
|
||||
venv.interpreter(),
|
||||
&index_urls,
|
||||
&index_locations,
|
||||
venv.python_executable(),
|
||||
setup_py,
|
||||
no_build,
|
||||
|
|
@ -104,7 +104,7 @@ pub(crate) async fn pip_sync(
|
|||
resolved_editables.editables,
|
||||
site_packages,
|
||||
reinstall,
|
||||
&index_urls,
|
||||
&index_locations,
|
||||
&cache,
|
||||
&venv,
|
||||
tags,
|
||||
|
|
@ -130,7 +130,7 @@ pub(crate) async fn pip_sync(
|
|||
|
||||
// Instantiate a client.
|
||||
let client = RegistryClientBuilder::new(cache.clone())
|
||||
.index_urls(index_urls.clone())
|
||||
.index_locations(index_locations.clone())
|
||||
.build();
|
||||
|
||||
// Resolve any registry-based requirements.
|
||||
|
|
|
|||
|
|
@ -8,7 +8,7 @@ use miette::{Diagnostic, IntoDiagnostic};
|
|||
use owo_colors::OwoColorize;
|
||||
use thiserror::Error;
|
||||
|
||||
use distribution_types::{DistributionMetadata, IndexUrls, Name};
|
||||
use distribution_types::{DistributionMetadata, IndexLocations, Name};
|
||||
use pep508_rs::Requirement;
|
||||
use platform_host::Platform;
|
||||
use puffin_cache::Cache;
|
||||
|
|
@ -25,12 +25,12 @@ use crate::printer::Printer;
|
|||
pub(crate) async fn venv(
|
||||
path: &Path,
|
||||
base_python: Option<&Path>,
|
||||
index_urls: &IndexUrls,
|
||||
index_locations: &IndexLocations,
|
||||
seed: bool,
|
||||
cache: &Cache,
|
||||
printer: Printer,
|
||||
) -> Result<ExitStatus> {
|
||||
match venv_impl(path, base_python, index_urls, seed, cache, printer).await {
|
||||
match venv_impl(path, base_python, index_locations, seed, cache, printer).await {
|
||||
Ok(status) => Ok(status),
|
||||
Err(err) => {
|
||||
#[allow(clippy::print_stderr)]
|
||||
|
|
@ -69,7 +69,7 @@ enum VenvError {
|
|||
async fn venv_impl(
|
||||
path: &Path,
|
||||
base_python: Option<&Path>,
|
||||
index_urls: &IndexUrls,
|
||||
index_locations: &IndexLocations,
|
||||
seed: bool,
|
||||
cache: &Cache,
|
||||
mut printer: Printer,
|
||||
|
|
@ -122,7 +122,7 @@ async fn venv_impl(
|
|||
&client,
|
||||
cache,
|
||||
venv.interpreter(),
|
||||
index_urls,
|
||||
index_locations,
|
||||
venv.python_executable(),
|
||||
SetupPyStrategy::default(),
|
||||
true,
|
||||
|
|
|
|||
|
|
@ -8,7 +8,7 @@ use chrono::{DateTime, Days, NaiveDate, NaiveTime, Utc};
|
|||
use clap::{Args, Parser, Subcommand};
|
||||
use owo_colors::OwoColorize;
|
||||
|
||||
use distribution_types::{IndexUrl, IndexUrls};
|
||||
use distribution_types::{FlatIndexLocation, IndexLocations, IndexUrl};
|
||||
use puffin_cache::{Cache, CacheArgs};
|
||||
use puffin_installer::Reinstall;
|
||||
use puffin_interpreter::PythonVersion;
|
||||
|
|
@ -159,6 +159,15 @@ struct PipCompileArgs {
|
|||
#[clap(long)]
|
||||
extra_index_url: Vec<IndexUrl>,
|
||||
|
||||
/// Locations to search for candidate distributions, beyond those found in the indexes.
|
||||
///
|
||||
/// If a path, the target must be a directory that contains package as wheel files (`.whl`) or
|
||||
/// source distributions (`.tar.gz` or `.zip`) at the top level.
|
||||
///
|
||||
/// If a URL, the page must contain a flat list of links to package files.
|
||||
#[clap(long)]
|
||||
find_links: Vec<FlatIndexLocation>,
|
||||
|
||||
/// Ignore the package index, instead relying on local archives and caches.
|
||||
#[clap(long, conflicts_with = "index_url", conflicts_with = "extra_index_url")]
|
||||
no_index: bool,
|
||||
|
|
@ -234,6 +243,15 @@ struct PipSyncArgs {
|
|||
#[clap(long)]
|
||||
extra_index_url: Vec<IndexUrl>,
|
||||
|
||||
/// Locations to search for candidate distributions, beyond those found in the indexes.
|
||||
///
|
||||
/// If a path, the target must be a directory that contains package as wheel files (`.whl`) or
|
||||
/// source distributions (`.tar.gz` or `.zip`) at the top level.
|
||||
///
|
||||
/// If a URL, the page must contain a flat list of links to package files.
|
||||
#[clap(long)]
|
||||
find_links: Vec<FlatIndexLocation>,
|
||||
|
||||
/// Ignore the package index, instead relying on local archives and caches.
|
||||
#[clap(long, conflicts_with = "index_url", conflicts_with = "extra_index_url")]
|
||||
no_index: bool,
|
||||
|
|
@ -335,6 +353,15 @@ struct PipInstallArgs {
|
|||
#[clap(long)]
|
||||
extra_index_url: Vec<IndexUrl>,
|
||||
|
||||
/// Locations to search for candidate distributions, beyond those found in the indexes.
|
||||
///
|
||||
/// If a path, the target must be a directory that contains package as wheel files (`.whl`) or
|
||||
/// source distributions (`.tar.gz` or `.zip`) at the top level.
|
||||
///
|
||||
/// If a URL, the page must contain a flat list of links to package files.
|
||||
#[clap(long)]
|
||||
find_links: Vec<FlatIndexLocation>,
|
||||
|
||||
/// Ignore the package index, instead relying on local archives and caches.
|
||||
#[clap(long, conflicts_with = "index_url", conflicts_with = "extra_index_url")]
|
||||
no_index: bool,
|
||||
|
|
@ -497,8 +524,12 @@ async fn inner() -> Result<ExitStatus> {
|
|||
.into_iter()
|
||||
.map(RequirementsSource::from)
|
||||
.collect::<Vec<_>>();
|
||||
let index_urls =
|
||||
IndexUrls::from_args(args.index_url, args.extra_index_url, args.no_index);
|
||||
let index_urls = IndexLocations::from_args(
|
||||
args.index_url,
|
||||
args.extra_index_url,
|
||||
args.find_links,
|
||||
args.no_index,
|
||||
);
|
||||
let extras = if args.all_extras {
|
||||
ExtrasSpecification::All
|
||||
} else if args.extra.is_empty() {
|
||||
|
|
@ -531,8 +562,12 @@ async fn inner() -> Result<ExitStatus> {
|
|||
.await
|
||||
}
|
||||
Commands::PipSync(args) => {
|
||||
let index_urls =
|
||||
IndexUrls::from_args(args.index_url, args.extra_index_url, args.no_index);
|
||||
let index_urls = IndexLocations::from_args(
|
||||
args.index_url,
|
||||
args.extra_index_url,
|
||||
args.find_links,
|
||||
args.no_index,
|
||||
);
|
||||
let sources = args
|
||||
.src_file
|
||||
.into_iter()
|
||||
|
|
@ -574,8 +609,12 @@ async fn inner() -> Result<ExitStatus> {
|
|||
.into_iter()
|
||||
.map(RequirementsSource::from)
|
||||
.collect::<Vec<_>>();
|
||||
let index_urls =
|
||||
IndexUrls::from_args(args.index_url, args.extra_index_url, args.no_index);
|
||||
let index_urls = IndexLocations::from_args(
|
||||
args.index_url,
|
||||
args.extra_index_url,
|
||||
args.find_links,
|
||||
args.no_index,
|
||||
);
|
||||
let extras = if args.all_extras {
|
||||
ExtrasSpecification::All
|
||||
} else if args.extra.is_empty() {
|
||||
|
|
@ -620,12 +659,17 @@ async fn inner() -> Result<ExitStatus> {
|
|||
Commands::Clean(args) => commands::clean(&cache, &args.package, printer),
|
||||
Commands::PipFreeze(args) => commands::freeze(&cache, args.strict, printer),
|
||||
Commands::Venv(args) => {
|
||||
let index_urls =
|
||||
IndexUrls::from_args(args.index_url, args.extra_index_url, args.no_index);
|
||||
let index_locations = IndexLocations::from_args(
|
||||
args.index_url,
|
||||
args.extra_index_url,
|
||||
// No find links for the venv subcommand, to keep things simple
|
||||
Vec::new(),
|
||||
args.no_index,
|
||||
);
|
||||
commands::venv(
|
||||
&args.name,
|
||||
args.python.as_deref(),
|
||||
&index_urls,
|
||||
&index_locations,
|
||||
args.seed,
|
||||
&cache,
|
||||
printer,
|
||||
|
|
|
|||
|
|
@ -3035,3 +3035,56 @@ fn generate_hashes() -> Result<()> {
|
|||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Make sure find links are correctly resolved and reported
|
||||
#[test]
|
||||
fn find_links() -> Result<()> {
|
||||
let temp_dir = TempDir::new()?;
|
||||
let cache_dir = TempDir::new()?;
|
||||
let venv = create_venv_py312(&temp_dir, &cache_dir);
|
||||
|
||||
let requirements_in = temp_dir.child("requirements.in");
|
||||
requirements_in.write_str(indoc! {r"
|
||||
tqdm
|
||||
numpy
|
||||
werkzeug @ https://files.pythonhosted.org/packages/c3/fc/254c3e9b5feb89ff5b9076a23218dafbc99c96ac5941e900b71206e6313b/werkzeug-3.0.1-py3-none-any.whl
|
||||
"})?;
|
||||
|
||||
let project_root = fs_err::canonicalize(std::env::current_dir()?.join("../.."))?;
|
||||
let project_root_string = project_root.display().to_string();
|
||||
let filters: Vec<_> = iter::once((project_root_string.as_str(), "[PROJECT_ROOT]"))
|
||||
.chain(INSTA_FILTERS.to_vec())
|
||||
.collect();
|
||||
|
||||
insta::with_settings!({
|
||||
filters => filters
|
||||
}, {
|
||||
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
|
||||
.arg("pip-compile")
|
||||
.arg("requirements.in")
|
||||
.arg("--find-links")
|
||||
.arg(project_root.join("scripts/wheels/"))
|
||||
.arg("--cache-dir")
|
||||
.arg(cache_dir.path())
|
||||
.arg("--exclude-newer")
|
||||
.arg(EXCLUDE_NEWER)
|
||||
.env("VIRTUAL_ENV", venv.as_os_str())
|
||||
.current_dir(&temp_dir), @r###"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
# This file was autogenerated by Puffin v0.0.1 via the following command:
|
||||
# puffin pip-compile requirements.in --find-links [PROJECT_ROOT]/scripts/wheels/ --cache-dir [CACHE_DIR]
|
||||
markupsafe==2.1.3
|
||||
# via werkzeug
|
||||
numpy==1.26.2
|
||||
tqdm==1000.0.0
|
||||
werkzeug @ https://files.pythonhosted.org/packages/c3/fc/254c3e9b5feb89ff5b9076a23218dafbc99c96ac5941e900b71206e6313b/werkzeug-3.0.1-py3-none-any.whl
|
||||
|
||||
----- stderr -----
|
||||
Resolved 4 packages in [TIME]
|
||||
"###);
|
||||
});
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
|
|
|||
|
|
@ -9,6 +9,8 @@ distribution-filename = { path = "../distribution-filename", features = ["serde"
|
|||
distribution-types = { path = "../distribution-types" }
|
||||
install-wheel-rs = { path = "../install-wheel-rs" }
|
||||
pep440_rs = { path = "../pep440-rs" }
|
||||
pep508_rs = { path = "../pep508-rs" }
|
||||
platform-tags = { path = "../platform-tags" }
|
||||
puffin-cache = { path = "../puffin-cache" }
|
||||
puffin-fs = { path = "../puffin-fs" }
|
||||
puffin-normalize = { path = "../puffin-normalize" }
|
||||
|
|
@ -16,6 +18,7 @@ pypi-types = { path = "../pypi-types" }
|
|||
|
||||
async_http_range_reader = { workspace = true }
|
||||
async_zip = { workspace = true, features = ["tokio"] }
|
||||
chrono = { workspace = true }
|
||||
fs-err = { workspace = true, features = ["tokio"] }
|
||||
futures = { workspace = true }
|
||||
html-escape = { workspace = true }
|
||||
|
|
@ -25,6 +28,7 @@ reqwest = { workspace = true }
|
|||
reqwest-middleware = { workspace = true }
|
||||
reqwest-retry = { workspace = true }
|
||||
rmp-serde = { workspace = true }
|
||||
rustc-hash = { workspace = true }
|
||||
serde = { workspace = true }
|
||||
serde_json = { workspace = true }
|
||||
sha2 = { workspace = true }
|
||||
|
|
@ -37,8 +41,6 @@ tracing = { workspace = true }
|
|||
url = { workspace = true }
|
||||
|
||||
[dev-dependencies]
|
||||
pep508_rs = { path = "../pep508-rs" }
|
||||
|
||||
anyhow = { workspace = true }
|
||||
insta = { version = "1.34.0" }
|
||||
tokio = { workspace = true, features = ["fs", "macros"] }
|
||||
|
|
|
|||
|
|
@ -31,7 +31,7 @@ pub enum Error {
|
|||
|
||||
/// The metadata file could not be parsed.
|
||||
#[error("Couldn't parse metadata of {0} from {1}")]
|
||||
MetadataParseError(WheelFilename, String, #[source] pypi_types::Error),
|
||||
MetadataParseError(WheelFilename, String, #[source] Box<pypi_types::Error>),
|
||||
|
||||
/// The metadata file was not found in the registry.
|
||||
#[error("File `{0}` was not found in the registry at {1}.")]
|
||||
|
|
@ -95,6 +95,9 @@ pub enum Error {
|
|||
|
||||
#[error("Unsupported `Content-Type` \"{1}\" for {0}. Expected JSON or HTML.")]
|
||||
UnsupportedMediaType(Url, String),
|
||||
|
||||
#[error("Failed to read find links directory")]
|
||||
FindLinks(#[source] io::Error),
|
||||
}
|
||||
|
||||
impl Error {
|
||||
|
|
|
|||
|
|
@ -0,0 +1,104 @@
|
|||
use std::collections::btree_map::Entry;
|
||||
use std::collections::BTreeMap;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use rustc_hash::FxHashMap;
|
||||
use tracing::instrument;
|
||||
|
||||
use distribution_filename::DistFilename;
|
||||
use distribution_types::{
|
||||
BuiltDist, Dist, File, FileLocation, IndexUrl, PrioritizedDistribution, RegistryBuiltDist,
|
||||
RegistrySourceDist, SourceDist,
|
||||
};
|
||||
use pep440_rs::Version;
|
||||
use pep508_rs::VerbatimUrl;
|
||||
use platform_tags::Tags;
|
||||
use puffin_normalize::PackageName;
|
||||
use pypi_types::Hashes;
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct FlatIndex<T: Into<Version> + From<Version> + Ord>(
|
||||
pub BTreeMap<T, PrioritizedDistribution>,
|
||||
);
|
||||
|
||||
impl<T: Into<Version> + From<Version> + Ord> Default for FlatIndex<T> {
|
||||
fn default() -> Self {
|
||||
Self(BTreeMap::default())
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: Into<Version> + From<Version> + Ord> FlatIndex<T> {
|
||||
/// Collect all the files from `--find-links` into a override hashmap we can pass into version map creation.
|
||||
#[instrument(skip_all)]
|
||||
pub fn from_dists(
|
||||
dists: Vec<(DistFilename, PathBuf)>,
|
||||
tags: &Tags,
|
||||
) -> FxHashMap<PackageName, Self> {
|
||||
// If we have packages of the same name from find links, gives them priority, otherwise start empty
|
||||
let mut flat_index: FxHashMap<PackageName, Self> = FxHashMap::default();
|
||||
|
||||
// Collect compatible distributions.
|
||||
for (filename, path) in dists {
|
||||
let version_map = flat_index.entry(filename.name().clone()).or_default();
|
||||
|
||||
let url = VerbatimUrl::from_path(&path, path.display().to_string())
|
||||
.expect("Find link paths must be absolute");
|
||||
let file = File {
|
||||
dist_info_metadata: None,
|
||||
filename: filename.to_string(),
|
||||
hashes: Hashes { sha256: None },
|
||||
requires_python: None,
|
||||
size: None,
|
||||
upload_time: None,
|
||||
url: FileLocation::Path(path.to_path_buf(), url),
|
||||
yanked: None,
|
||||
};
|
||||
|
||||
// No `requires-python` here: for source distributions, we don't have that information;
|
||||
// for wheels, we read it lazily only when selected.
|
||||
match filename {
|
||||
DistFilename::WheelFilename(filename) => {
|
||||
let priority = filename.compatibility(tags);
|
||||
let version = filename.version.clone();
|
||||
|
||||
let dist = Dist::Built(BuiltDist::Registry(RegistryBuiltDist {
|
||||
filename,
|
||||
file,
|
||||
index: IndexUrl::Pypi,
|
||||
}));
|
||||
match version_map.0.entry(version.into()) {
|
||||
Entry::Occupied(mut entry) => {
|
||||
entry.get_mut().insert_built(dist, None, None, priority);
|
||||
}
|
||||
Entry::Vacant(entry) => {
|
||||
entry.insert(PrioritizedDistribution::from_built(
|
||||
dist, None, None, priority,
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
DistFilename::SourceDistFilename(filename) => {
|
||||
let dist = Dist::Source(SourceDist::Registry(RegistrySourceDist {
|
||||
filename: filename.clone(),
|
||||
file,
|
||||
index: IndexUrl::Pypi,
|
||||
}));
|
||||
match version_map.0.entry(filename.version.clone().into()) {
|
||||
Entry::Occupied(mut entry) => {
|
||||
entry.get_mut().insert_source(dist, None, None);
|
||||
}
|
||||
Entry::Vacant(entry) => {
|
||||
entry.insert(PrioritizedDistribution::from_source(dist, None, None));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
flat_index
|
||||
}
|
||||
|
||||
pub fn iter(&self) -> impl Iterator<Item = (&T, &PrioritizedDistribution)> {
|
||||
self.0.iter()
|
||||
}
|
||||
}
|
||||
|
|
@ -1,11 +1,13 @@
|
|||
pub use cached_client::{CachedClient, CachedClientError, DataWithCachePolicy};
|
||||
pub use error::Error;
|
||||
pub use flat_index::FlatIndex;
|
||||
pub use registry_client::{
|
||||
read_metadata_async, RegistryClient, RegistryClientBuilder, SimpleMetadata, VersionFiles,
|
||||
};
|
||||
|
||||
mod cached_client;
|
||||
mod error;
|
||||
mod flat_index;
|
||||
mod html;
|
||||
mod registry_client;
|
||||
mod remote_metadata;
|
||||
|
|
|
|||
|
|
@ -1,6 +1,7 @@
|
|||
use std::collections::BTreeMap;
|
||||
use std::fmt::Debug;
|
||||
use std::path::Path;
|
||||
use std::io;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::str::FromStr;
|
||||
|
||||
use async_http_range_reader::{AsyncHttpRangeReader, AsyncHttpRangeReaderError};
|
||||
|
|
@ -17,7 +18,9 @@ use tracing::{debug, info_span, instrument, trace, warn, Instrument};
|
|||
use url::Url;
|
||||
|
||||
use distribution_filename::{DistFilename, SourceDistFilename, WheelFilename};
|
||||
use distribution_types::{BuiltDist, File, IndexUrl, IndexUrls, Name};
|
||||
use distribution_types::{
|
||||
BuiltDist, File, FileLocation, FlatIndexLocation, IndexLocations, IndexUrl, Name,
|
||||
};
|
||||
use install_wheel_rs::find_dist_info;
|
||||
use pep440_rs::Version;
|
||||
use puffin_cache::{Cache, CacheBucket, WheelCache};
|
||||
|
|
@ -31,7 +34,7 @@ use crate::{CachedClient, CachedClientError, Error};
|
|||
/// A builder for an [`RegistryClient`].
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct RegistryClientBuilder {
|
||||
index_urls: IndexUrls,
|
||||
index_locations: IndexLocations,
|
||||
retries: u32,
|
||||
cache: Cache,
|
||||
}
|
||||
|
|
@ -39,7 +42,7 @@ pub struct RegistryClientBuilder {
|
|||
impl RegistryClientBuilder {
|
||||
pub fn new(cache: Cache) -> Self {
|
||||
Self {
|
||||
index_urls: IndexUrls::default(),
|
||||
index_locations: IndexLocations::default(),
|
||||
cache,
|
||||
retries: 3,
|
||||
}
|
||||
|
|
@ -48,8 +51,8 @@ impl RegistryClientBuilder {
|
|||
|
||||
impl RegistryClientBuilder {
|
||||
#[must_use]
|
||||
pub fn index_urls(mut self, index_urls: IndexUrls) -> Self {
|
||||
self.index_urls = index_urls;
|
||||
pub fn index_locations(mut self, index_urls: IndexLocations) -> Self {
|
||||
self.index_locations = index_urls;
|
||||
self
|
||||
}
|
||||
|
||||
|
|
@ -84,7 +87,7 @@ impl RegistryClientBuilder {
|
|||
|
||||
let client = CachedClient::new(uncached_client.clone());
|
||||
RegistryClient {
|
||||
index_urls: self.index_urls,
|
||||
index_locations: self.index_locations,
|
||||
client_raw: client_raw.clone(),
|
||||
cache: self.cache,
|
||||
client,
|
||||
|
|
@ -96,7 +99,7 @@ impl RegistryClientBuilder {
|
|||
#[derive(Debug, Clone)]
|
||||
pub struct RegistryClient {
|
||||
/// The index URLs to use for fetching packages.
|
||||
index_urls: IndexUrls,
|
||||
index_locations: IndexLocations,
|
||||
/// The underlying HTTP client.
|
||||
client: CachedClient,
|
||||
/// Don't use this client, it only exists because `async_http_range_reader` needs
|
||||
|
|
@ -112,6 +115,68 @@ impl RegistryClient {
|
|||
&self.client
|
||||
}
|
||||
|
||||
/// Read the directories and flat remote indexes from `--find-links`.
|
||||
#[allow(clippy::result_large_err)]
|
||||
pub fn flat_index(&self) -> Result<Vec<(DistFilename, PathBuf)>, Error> {
|
||||
let mut dists = Vec::new();
|
||||
for flat_index in self.index_locations.flat_indexes() {
|
||||
match flat_index {
|
||||
FlatIndexLocation::Path(path) => {
|
||||
dists.extend(Self::read_flat_index_dir(path).map_err(Error::FindLinks)?);
|
||||
}
|
||||
FlatIndexLocation::Url(_) => {
|
||||
warn!("TODO(konstin): No yet implemented: Find links urls");
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(dists)
|
||||
}
|
||||
|
||||
/// Read a list of [`DistFilename`] entries from a `--find-links` directory..
|
||||
fn read_flat_index_dir(path: &PathBuf) -> Result<Vec<(DistFilename, PathBuf)>, io::Error> {
|
||||
// Absolute paths are required for the URL conversion.
|
||||
let path = fs_err::canonicalize(path)?;
|
||||
|
||||
let mut dists = Vec::new();
|
||||
for entry in fs_err::read_dir(&path)? {
|
||||
let entry = entry?;
|
||||
let metadata = entry.metadata()?;
|
||||
if !metadata.is_file() {
|
||||
continue;
|
||||
}
|
||||
|
||||
let Ok(filename) = entry.file_name().into_string() else {
|
||||
warn!(
|
||||
"Skipping non-UTF-8 filename in `--find-links` directory: {}",
|
||||
entry.file_name().to_string_lossy()
|
||||
);
|
||||
continue;
|
||||
};
|
||||
let Some(filename) = DistFilename::try_from_normalized_filename(&filename) else {
|
||||
debug!(
|
||||
"Ignoring `--find-links` entry (expected a wheel or source distribution filename): {}",
|
||||
entry.path().display()
|
||||
);
|
||||
continue;
|
||||
};
|
||||
let path = entry.path().to_path_buf();
|
||||
dists.push((filename, path));
|
||||
}
|
||||
if dists.is_empty() {
|
||||
warn!(
|
||||
"No packages found in `--find-links` directory: {}",
|
||||
path.display()
|
||||
);
|
||||
} else {
|
||||
debug!(
|
||||
"Found {} packages in `--find-links` directory: {}",
|
||||
dists.len(),
|
||||
path.display()
|
||||
);
|
||||
}
|
||||
Ok(dists)
|
||||
}
|
||||
|
||||
/// Fetch a package from the `PyPI` simple API.
|
||||
///
|
||||
/// "simple" here refers to [PEP 503 – Simple Repository API](https://peps.python.org/pep-0503/)
|
||||
|
|
@ -122,77 +187,13 @@ impl RegistryClient {
|
|||
&self,
|
||||
package_name: &PackageName,
|
||||
) -> Result<(IndexUrl, SimpleMetadata), Error> {
|
||||
if self.index_urls.no_index() {
|
||||
if self.index_locations.no_index() {
|
||||
return Err(Error::NoIndex(package_name.as_ref().to_string()));
|
||||
}
|
||||
|
||||
for index in &self.index_urls {
|
||||
// Format the URL for PyPI.
|
||||
let mut url: Url = index.clone().into();
|
||||
url.path_segments_mut()
|
||||
.unwrap()
|
||||
.pop_if_empty()
|
||||
.push(package_name.as_ref());
|
||||
for index in self.index_locations.indexes() {
|
||||
let result = self.simple_single_index(package_name, index).await?;
|
||||
|
||||
trace!("Fetching metadata for {package_name} from {url}");
|
||||
|
||||
let cache_entry = self.cache.entry(
|
||||
CacheBucket::Simple,
|
||||
Path::new(&match index {
|
||||
IndexUrl::Pypi => "pypi".to_string(),
|
||||
IndexUrl::Url(url) => cache_key::digest(&cache_key::CanonicalUrl::new(url)),
|
||||
}),
|
||||
format!("{package_name}.msgpack"),
|
||||
);
|
||||
|
||||
let simple_request = self
|
||||
.client
|
||||
.uncached()
|
||||
.get(url.clone())
|
||||
.header("Accept-Encoding", "gzip")
|
||||
.header("Accept", MediaType::accepts())
|
||||
.build()?;
|
||||
let parse_simple_response = |response: Response| {
|
||||
async {
|
||||
let content_type = response
|
||||
.headers()
|
||||
.get("content-type")
|
||||
.ok_or_else(|| Error::MissingContentType(url.clone()))?;
|
||||
let content_type = content_type
|
||||
.to_str()
|
||||
.map_err(|err| Error::InvalidContentTypeHeader(url.clone(), err))?;
|
||||
let media_type = content_type.split(';').next().unwrap_or(content_type);
|
||||
let media_type = MediaType::from_str(media_type).ok_or_else(|| {
|
||||
Error::UnsupportedMediaType(url.clone(), media_type.to_string())
|
||||
})?;
|
||||
|
||||
match media_type {
|
||||
MediaType::Json => {
|
||||
let bytes = response.bytes().await?;
|
||||
let data: SimpleJson = serde_json::from_slice(bytes.as_ref())
|
||||
.map_err(|err| Error::from_json_err(err, url.clone()))?;
|
||||
let base = BaseUrl::from(url.clone());
|
||||
let metadata =
|
||||
SimpleMetadata::from_files(data.files, package_name, &base);
|
||||
Ok(metadata)
|
||||
}
|
||||
MediaType::Html => {
|
||||
let text = response.text().await?;
|
||||
let SimpleHtml { base, files } = SimpleHtml::parse(&text, &url)
|
||||
.map_err(|err| Error::from_html_err(err, url.clone()))?;
|
||||
let metadata = SimpleMetadata::from_files(files, package_name, &base);
|
||||
Ok(metadata)
|
||||
}
|
||||
}
|
||||
}
|
||||
.instrument(info_span!("parse_simple_api", package = %package_name))
|
||||
};
|
||||
let result = self
|
||||
.client
|
||||
.get_cached_with_callback(simple_request, &cache_entry, parse_simple_response)
|
||||
.await;
|
||||
|
||||
// Fetch from the index.
|
||||
return match result {
|
||||
Ok(metadata) => Ok((index.clone(), metadata)),
|
||||
Err(CachedClientError::Client(Error::RequestError(err))) => {
|
||||
|
|
@ -208,6 +209,77 @@ impl RegistryClient {
|
|||
Err(Error::PackageNotFound(package_name.to_string()))
|
||||
}
|
||||
|
||||
async fn simple_single_index(
|
||||
&self,
|
||||
package_name: &PackageName,
|
||||
index: &IndexUrl,
|
||||
) -> Result<Result<SimpleMetadata, CachedClientError<Error>>, Error> {
|
||||
// Format the URL for PyPI.
|
||||
let mut url: Url = index.clone().into();
|
||||
url.path_segments_mut()
|
||||
.unwrap()
|
||||
.pop_if_empty()
|
||||
.push(package_name.as_ref());
|
||||
|
||||
trace!("Fetching metadata for {package_name} from {url}");
|
||||
|
||||
let cache_entry = self.cache.entry(
|
||||
CacheBucket::Simple,
|
||||
Path::new(&match index {
|
||||
IndexUrl::Pypi => "pypi".to_string(),
|
||||
IndexUrl::Url(url) => cache_key::digest(&cache_key::CanonicalUrl::new(url)),
|
||||
}),
|
||||
format!("{package_name}.msgpack"),
|
||||
);
|
||||
|
||||
let simple_request = self
|
||||
.client
|
||||
.uncached()
|
||||
.get(url.clone())
|
||||
.header("Accept-Encoding", "gzip")
|
||||
.header("Accept", MediaType::accepts())
|
||||
.build()?;
|
||||
let parse_simple_response = |response: Response| {
|
||||
async {
|
||||
let content_type = response
|
||||
.headers()
|
||||
.get("content-type")
|
||||
.ok_or_else(|| Error::MissingContentType(url.clone()))?;
|
||||
let content_type = content_type
|
||||
.to_str()
|
||||
.map_err(|err| Error::InvalidContentTypeHeader(url.clone(), err))?;
|
||||
let media_type = content_type.split(';').next().unwrap_or(content_type);
|
||||
let media_type = MediaType::from_str(media_type).ok_or_else(|| {
|
||||
Error::UnsupportedMediaType(url.clone(), media_type.to_string())
|
||||
})?;
|
||||
|
||||
match media_type {
|
||||
MediaType::Json => {
|
||||
let bytes = response.bytes().await?;
|
||||
let data: SimpleJson = serde_json::from_slice(bytes.as_ref())
|
||||
.map_err(|err| Error::from_json_err(err, url.clone()))?;
|
||||
let base = BaseUrl::from(url.clone());
|
||||
let metadata = SimpleMetadata::from_files(data.files, package_name, &base);
|
||||
Ok(metadata)
|
||||
}
|
||||
MediaType::Html => {
|
||||
let text = response.text().await?;
|
||||
let SimpleHtml { base, files } = SimpleHtml::parse(&text, &url)
|
||||
.map_err(|err| Error::from_html_err(err, url.clone()))?;
|
||||
let metadata = SimpleMetadata::from_files(files, package_name, &base);
|
||||
Ok(metadata)
|
||||
}
|
||||
}
|
||||
}
|
||||
.instrument(info_span!("parse_simple_api", package = %package_name))
|
||||
};
|
||||
let result = self
|
||||
.client
|
||||
.get_cached_with_callback(simple_request, &cache_entry, parse_simple_response)
|
||||
.await;
|
||||
Ok(result)
|
||||
}
|
||||
|
||||
/// Fetch the metadata for a remote wheel file.
|
||||
///
|
||||
/// For a remote wheel, we try the following ways to fetch the metadata:
|
||||
|
|
@ -217,10 +289,16 @@ impl RegistryClient {
|
|||
#[instrument(skip(self))]
|
||||
pub async fn wheel_metadata(&self, built_dist: &BuiltDist) -> Result<Metadata21, Error> {
|
||||
let metadata = match &built_dist {
|
||||
BuiltDist::Registry(wheel) => {
|
||||
self.wheel_metadata_registry(&wheel.index, &wheel.file)
|
||||
.await?
|
||||
}
|
||||
BuiltDist::Registry(wheel) => match &wheel.file.url {
|
||||
FileLocation::Url(url) => {
|
||||
self.wheel_metadata_registry(&wheel.index, &wheel.file, url)
|
||||
.await?
|
||||
}
|
||||
FileLocation::Path(path, _url) => {
|
||||
let reader = fs_err::tokio::File::open(&path).await?;
|
||||
read_metadata_async(&wheel.filename, built_dist.to_string(), reader).await?
|
||||
}
|
||||
},
|
||||
BuiltDist::DirectUrl(wheel) => {
|
||||
self.wheel_metadata_no_pep658(
|
||||
&wheel.filename,
|
||||
|
|
@ -250,8 +328,9 @@ impl RegistryClient {
|
|||
&self,
|
||||
index: &IndexUrl,
|
||||
file: &File,
|
||||
url: &Url,
|
||||
) -> Result<Metadata21, Error> {
|
||||
if self.index_urls.no_index() {
|
||||
if self.index_locations.no_index() {
|
||||
return Err(Error::NoIndex(file.filename.clone()));
|
||||
}
|
||||
|
||||
|
|
@ -262,7 +341,7 @@ impl RegistryClient {
|
|||
.as_ref()
|
||||
.is_some_and(pypi_types::DistInfoMetadata::is_available)
|
||||
{
|
||||
let url = Url::parse(&format!("{}.metadata", file.url))?;
|
||||
let url = Url::parse(&format!("{}.metadata", url))?;
|
||||
|
||||
let cache_entry = self.cache.entry(
|
||||
CacheBucket::Wheels,
|
||||
|
|
@ -275,7 +354,9 @@ impl RegistryClient {
|
|||
|
||||
info_span!("parse_metadata21")
|
||||
.in_scope(|| Metadata21::parse(bytes.as_ref()))
|
||||
.map_err(|err| Error::MetadataParseError(filename, url.to_string(), err))
|
||||
.map_err(|err| {
|
||||
Error::MetadataParseError(filename, url.to_string(), Box::new(err))
|
||||
})
|
||||
};
|
||||
let req = self.client.uncached().get(url.clone()).build()?;
|
||||
Ok(self
|
||||
|
|
@ -286,7 +367,7 @@ impl RegistryClient {
|
|||
// If we lack PEP 658 support, try using HTTP range requests to read only the
|
||||
// `.dist-info/METADATA` file from the zip, and if that also fails, download the whole wheel
|
||||
// into the cache and read from there
|
||||
self.wheel_metadata_no_pep658(&filename, &file.url, WheelCache::Index(index))
|
||||
self.wheel_metadata_no_pep658(&filename, url, WheelCache::Index(index))
|
||||
.await
|
||||
}
|
||||
}
|
||||
|
|
@ -298,7 +379,7 @@ impl RegistryClient {
|
|||
url: &'data Url,
|
||||
cache_shard: WheelCache<'data>,
|
||||
) -> Result<Metadata21, Error> {
|
||||
if self.index_urls.no_index() {
|
||||
if self.index_locations.no_index() {
|
||||
return Err(Error::NoIndex(url.to_string()));
|
||||
}
|
||||
|
||||
|
|
@ -317,7 +398,7 @@ impl RegistryClient {
|
|||
trace!("Getting metadata for {filename} by range request");
|
||||
let text = wheel_metadata_from_remote_zip(filename, &mut reader).await?;
|
||||
let metadata = Metadata21::parse(text.as_bytes()).map_err(|err| {
|
||||
Error::MetadataParseError(filename.clone(), url.to_string(), err)
|
||||
Error::MetadataParseError(filename.clone(), url.to_string(), Box::new(err))
|
||||
})?;
|
||||
Ok(metadata)
|
||||
}
|
||||
|
|
@ -364,7 +445,7 @@ impl RegistryClient {
|
|||
&self,
|
||||
url: &Url,
|
||||
) -> Result<Box<dyn futures::AsyncRead + Unpin + Send + Sync>, Error> {
|
||||
if self.index_urls.no_index() {
|
||||
if self.index_locations.no_index() {
|
||||
return Err(Error::NoIndex(url.to_string()));
|
||||
}
|
||||
|
||||
|
|
@ -413,7 +494,7 @@ pub async fn read_metadata_async(
|
|||
.map_err(|err| Error::Zip(filename.clone(), err))?;
|
||||
|
||||
let metadata = Metadata21::parse(&contents)
|
||||
.map_err(|err| Error::MetadataParseError(filename.clone(), debug_source, err))?;
|
||||
.map_err(|err| Error::MetadataParseError(filename.clone(), debug_source, Box::new(err)))?;
|
||||
Ok(metadata)
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -5,7 +5,7 @@ use anyhow::{Context, Result};
|
|||
use clap::Parser;
|
||||
use fs_err as fs;
|
||||
|
||||
use distribution_types::IndexUrls;
|
||||
use distribution_types::IndexLocations;
|
||||
use platform_host::Platform;
|
||||
use puffin_build::{SourceBuild, SourceBuildContext};
|
||||
use puffin_cache::{Cache, CacheArgs};
|
||||
|
|
@ -54,7 +54,7 @@ pub(crate) async fn build(args: BuildArgs) -> Result<PathBuf> {
|
|||
let platform = Platform::current()?;
|
||||
let venv = Virtualenv::from_env(platform, &cache)?;
|
||||
let client = RegistryClientBuilder::new(cache.clone()).build();
|
||||
let index_urls = IndexUrls::default();
|
||||
let index_urls = IndexLocations::default();
|
||||
let setup_py = SetupPyStrategy::default();
|
||||
|
||||
let build_dispatch = BuildDispatch::new(
|
||||
|
|
|
|||
|
|
@ -11,7 +11,7 @@ use rustc_hash::FxHashMap;
|
|||
use tracing::info;
|
||||
|
||||
use distribution_types::{
|
||||
CachedDist, Dist, DistributionMetadata, IndexUrls, Name, Resolution, VersionOrUrl,
|
||||
CachedDist, Dist, DistributionMetadata, IndexLocations, Name, Resolution, VersionOrUrl,
|
||||
};
|
||||
use install_wheel_rs::linker::LinkMode;
|
||||
use pep508_rs::Requirement;
|
||||
|
|
@ -59,7 +59,7 @@ pub(crate) async fn install_many(args: InstallManyArgs) -> Result<()> {
|
|||
let platform = Platform::current()?;
|
||||
let venv = Virtualenv::from_env(platform, &cache)?;
|
||||
let client = RegistryClientBuilder::new(cache.clone()).build();
|
||||
let index_urls = IndexUrls::default();
|
||||
let index_locations = IndexLocations::default();
|
||||
let setup_py = SetupPyStrategy::default();
|
||||
let tags = venv.interpreter().tags()?;
|
||||
|
||||
|
|
@ -67,7 +67,7 @@ pub(crate) async fn install_many(args: InstallManyArgs) -> Result<()> {
|
|||
&client,
|
||||
&cache,
|
||||
venv.interpreter(),
|
||||
&index_urls,
|
||||
&index_locations,
|
||||
venv.python_executable(),
|
||||
setup_py,
|
||||
args.no_build,
|
||||
|
|
@ -81,7 +81,7 @@ pub(crate) async fn install_many(args: InstallManyArgs) -> Result<()> {
|
|||
tags,
|
||||
&client,
|
||||
&venv,
|
||||
&index_urls,
|
||||
&index_locations,
|
||||
)
|
||||
.await
|
||||
{
|
||||
|
|
@ -101,7 +101,7 @@ async fn install_chunk(
|
|||
tags: &Tags,
|
||||
client: &RegistryClient,
|
||||
venv: &Virtualenv,
|
||||
index_urls: &IndexUrls,
|
||||
index_locations: &IndexLocations,
|
||||
) -> Result<()> {
|
||||
let resolution: Vec<_> = DistFinder::new(tags, client, venv.interpreter())
|
||||
.resolve_stream(requirements)
|
||||
|
|
@ -136,7 +136,7 @@ async fn install_chunk(
|
|||
.into_distributions()
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let mut registry_index = RegistryWheelIndex::new(build_dispatch.cache(), tags, index_urls);
|
||||
let mut registry_index = RegistryWheelIndex::new(build_dispatch.cache(), tags, index_locations);
|
||||
let (cached, uncached): (Vec<_>, Vec<_>) = dists.into_iter().partition_map(|dist| {
|
||||
// We always want the wheel for the latest version not whatever matching is in cache
|
||||
let VersionOrUrl::Version(version) = dist.version_or_url() else {
|
||||
|
|
|
|||
|
|
@ -9,7 +9,7 @@ use fs_err::File;
|
|||
use itertools::Itertools;
|
||||
use petgraph::dot::{Config as DotConfig, Dot};
|
||||
|
||||
use distribution_types::{IndexUrls, Resolution};
|
||||
use distribution_types::{FlatIndexLocation, IndexLocations, IndexUrl, Resolution};
|
||||
use pep508_rs::Requirement;
|
||||
use platform_host::Platform;
|
||||
use puffin_cache::{Cache, CacheArgs};
|
||||
|
|
@ -42,6 +42,12 @@ pub(crate) struct ResolveCliArgs {
|
|||
cache_args: CacheArgs,
|
||||
#[arg(long)]
|
||||
exclude_newer: Option<DateTime<Utc>>,
|
||||
#[clap(long, short, default_value = IndexUrl::Pypi.as_str(), env = "PUFFIN_INDEX_URL")]
|
||||
index_url: IndexUrl,
|
||||
#[clap(long)]
|
||||
extra_index_url: Vec<IndexUrl>,
|
||||
#[clap(long)]
|
||||
find_links: Vec<FlatIndexLocation>,
|
||||
}
|
||||
|
||||
pub(crate) async fn resolve_cli(args: ResolveCliArgs) -> Result<()> {
|
||||
|
|
@ -49,17 +55,19 @@ pub(crate) async fn resolve_cli(args: ResolveCliArgs) -> Result<()> {
|
|||
|
||||
let platform = Platform::current()?;
|
||||
let venv = Virtualenv::from_env(platform, &cache)?;
|
||||
let client = RegistryClientBuilder::new(cache.clone()).build();
|
||||
let index_urls = IndexUrls::default();
|
||||
let setup_py = SetupPyStrategy::default();
|
||||
let index_locations =
|
||||
IndexLocations::from_args(args.index_url, args.extra_index_url, args.find_links, false);
|
||||
let client = RegistryClientBuilder::new(cache.clone())
|
||||
.index_locations(index_locations.clone())
|
||||
.build();
|
||||
|
||||
let build_dispatch = BuildDispatch::new(
|
||||
&client,
|
||||
&cache,
|
||||
venv.interpreter(),
|
||||
&index_urls,
|
||||
&index_locations,
|
||||
venv.python_executable(),
|
||||
setup_py,
|
||||
SetupPyStrategy::default(),
|
||||
args.no_build,
|
||||
);
|
||||
|
||||
|
|
@ -73,7 +81,7 @@ pub(crate) async fn resolve_cli(args: ResolveCliArgs) -> Result<()> {
|
|||
tags,
|
||||
&client,
|
||||
&build_dispatch,
|
||||
);
|
||||
)?;
|
||||
let resolution_graph = resolver.resolve().await.with_context(|| {
|
||||
format!(
|
||||
"No solution found when resolving: {}",
|
||||
|
|
|
|||
|
|
@ -11,7 +11,7 @@ use tokio::time::Instant;
|
|||
use tracing::{info, info_span, Span};
|
||||
use tracing_indicatif::span_ext::IndicatifSpanExt;
|
||||
|
||||
use distribution_types::IndexUrls;
|
||||
use distribution_types::IndexLocations;
|
||||
use pep440_rs::{Version, VersionSpecifier, VersionSpecifiers};
|
||||
use pep508_rs::{Requirement, VersionOrUrl};
|
||||
use platform_host::Platform;
|
||||
|
|
@ -73,14 +73,14 @@ pub(crate) async fn resolve_many(args: ResolveManyArgs) -> Result<()> {
|
|||
let platform = Platform::current()?;
|
||||
let venv = Virtualenv::from_env(platform, &cache)?;
|
||||
let client = RegistryClientBuilder::new(cache.clone()).build();
|
||||
let index_urls = IndexUrls::default();
|
||||
let index_locations = IndexLocations::default();
|
||||
let setup_py = SetupPyStrategy::default();
|
||||
|
||||
let build_dispatch = BuildDispatch::new(
|
||||
&client,
|
||||
&cache,
|
||||
venv.interpreter(),
|
||||
&index_urls,
|
||||
&index_locations,
|
||||
venv.python_executable(),
|
||||
setup_py,
|
||||
args.no_build,
|
||||
|
|
|
|||
|
|
@ -9,7 +9,7 @@ use anyhow::{bail, Context, Result};
|
|||
use itertools::Itertools;
|
||||
use tracing::{debug, instrument};
|
||||
|
||||
use distribution_types::{CachedDist, DistributionId, IndexUrls, Name, Resolution};
|
||||
use distribution_types::{CachedDist, DistributionId, IndexLocations, Name, Resolution};
|
||||
use pep508_rs::Requirement;
|
||||
use puffin_build::{SourceBuild, SourceBuildContext};
|
||||
use puffin_cache::Cache;
|
||||
|
|
@ -25,7 +25,7 @@ pub struct BuildDispatch<'a> {
|
|||
client: &'a RegistryClient,
|
||||
cache: &'a Cache,
|
||||
interpreter: &'a Interpreter,
|
||||
index_urls: &'a IndexUrls,
|
||||
index_locations: &'a IndexLocations,
|
||||
base_python: PathBuf,
|
||||
setup_py: SetupPyStrategy,
|
||||
no_build: bool,
|
||||
|
|
@ -39,7 +39,7 @@ impl<'a> BuildDispatch<'a> {
|
|||
client: &'a RegistryClient,
|
||||
cache: &'a Cache,
|
||||
interpreter: &'a Interpreter,
|
||||
index_urls: &'a IndexUrls,
|
||||
index_locations: &'a IndexLocations,
|
||||
base_python: PathBuf,
|
||||
setup_py: SetupPyStrategy,
|
||||
no_build: bool,
|
||||
|
|
@ -48,7 +48,7 @@ impl<'a> BuildDispatch<'a> {
|
|||
client,
|
||||
cache,
|
||||
interpreter,
|
||||
index_urls,
|
||||
index_locations,
|
||||
base_python,
|
||||
setup_py,
|
||||
no_build,
|
||||
|
|
@ -99,7 +99,7 @@ impl<'a> BuildContext for BuildDispatch<'a> {
|
|||
tags,
|
||||
self.client,
|
||||
self,
|
||||
);
|
||||
)?;
|
||||
let graph = resolver.resolve().await.with_context(|| {
|
||||
format!(
|
||||
"No solution found when resolving: {}",
|
||||
|
|
@ -149,7 +149,7 @@ impl<'a> BuildContext for BuildDispatch<'a> {
|
|||
Vec::new(),
|
||||
site_packages,
|
||||
&Reinstall::None,
|
||||
self.index_urls,
|
||||
self.index_locations,
|
||||
self.cache(),
|
||||
venv,
|
||||
tags,
|
||||
|
|
|
|||
|
|
@ -12,7 +12,9 @@ use tracing::instrument;
|
|||
use url::Url;
|
||||
|
||||
use distribution_filename::{WheelFilename, WheelFilenameError};
|
||||
use distribution_types::{BuiltDist, DirectGitUrl, Dist, LocalEditable, Name, SourceDist};
|
||||
use distribution_types::{
|
||||
BuiltDist, DirectGitUrl, Dist, FileLocation, LocalEditable, Name, SourceDist,
|
||||
};
|
||||
use platform_tags::Tags;
|
||||
use puffin_cache::{Cache, CacheBucket, WheelCache};
|
||||
use puffin_client::RegistryClient;
|
||||
|
|
@ -108,6 +110,24 @@ impl<'a, Context: BuildContext + Send + Sync> DistributionDatabase<'a, Context>
|
|||
) -> Result<LocalWheel, DistributionDatabaseError> {
|
||||
match &dist {
|
||||
Dist::Built(BuiltDist::Registry(wheel)) => {
|
||||
let url = match &wheel.file.url {
|
||||
FileLocation::Url(url) => url,
|
||||
FileLocation::Path(path, url) => {
|
||||
let cache_entry = self.cache.entry(
|
||||
CacheBucket::Wheels,
|
||||
WheelCache::Url(url).remote_wheel_dir(wheel.name().as_ref()),
|
||||
wheel.filename.stem(),
|
||||
);
|
||||
|
||||
return Ok(LocalWheel::Disk(DiskWheel {
|
||||
dist: dist.clone(),
|
||||
path: path.clone(),
|
||||
target: cache_entry.into_path_buf(),
|
||||
filename: wheel.filename.clone(),
|
||||
}));
|
||||
}
|
||||
};
|
||||
|
||||
// Download and unzip on the same tokio task.
|
||||
//
|
||||
// In all wheels we've seen so far, unzipping while downloading is
|
||||
|
|
@ -123,7 +143,7 @@ impl<'a, Context: BuildContext + Send + Sync> DistributionDatabase<'a, Context>
|
|||
// for downloading and unzipping (with a buffer in between) and switch
|
||||
// to rayon if this buffer grows large by the time the file is fully
|
||||
// downloaded.
|
||||
let reader = self.client.stream_external(&wheel.file.url).await?;
|
||||
let reader = self.client.stream_external(url).await?;
|
||||
|
||||
// Download and unzip the wheel to a temporary directory.
|
||||
let temp_dir = tempfile::tempdir_in(self.cache.root())?;
|
||||
|
|
|
|||
|
|
@ -4,7 +4,7 @@ use std::path::Path;
|
|||
|
||||
use rustc_hash::FxHashMap;
|
||||
|
||||
use distribution_types::{CachedRegistryDist, CachedWheel, IndexUrls};
|
||||
use distribution_types::{CachedRegistryDist, CachedWheel, IndexLocations};
|
||||
use pep440_rs::Version;
|
||||
use platform_tags::Tags;
|
||||
use puffin_cache::{Cache, CacheBucket, WheelCache};
|
||||
|
|
@ -16,17 +16,17 @@ use puffin_normalize::PackageName;
|
|||
pub struct RegistryWheelIndex<'a> {
|
||||
cache: &'a Cache,
|
||||
tags: &'a Tags,
|
||||
index_urls: &'a IndexUrls,
|
||||
index_locations: &'a IndexLocations,
|
||||
index: FxHashMap<PackageName, BTreeMap<Version, CachedRegistryDist>>,
|
||||
}
|
||||
|
||||
impl<'a> RegistryWheelIndex<'a> {
|
||||
/// Initialize an index of cached distributions from a directory.
|
||||
pub fn new(cache: &'a Cache, tags: &'a Tags, index_urls: &'a IndexUrls) -> Self {
|
||||
pub fn new(cache: &'a Cache, tags: &'a Tags, index_locations: &'a IndexLocations) -> Self {
|
||||
Self {
|
||||
cache,
|
||||
tags,
|
||||
index_urls,
|
||||
index_locations,
|
||||
index: FxHashMap::default(),
|
||||
}
|
||||
}
|
||||
|
|
@ -56,9 +56,12 @@ impl<'a> RegistryWheelIndex<'a> {
|
|||
fn get_impl(&mut self, name: &PackageName) -> &BTreeMap<Version, CachedRegistryDist> {
|
||||
let versions = match self.index.entry(name.clone()) {
|
||||
Entry::Occupied(entry) => entry.into_mut(),
|
||||
Entry::Vacant(entry) => {
|
||||
entry.insert(Self::index(name, self.cache, self.tags, self.index_urls))
|
||||
}
|
||||
Entry::Vacant(entry) => entry.insert(Self::index(
|
||||
name,
|
||||
self.cache,
|
||||
self.tags,
|
||||
self.index_locations,
|
||||
)),
|
||||
};
|
||||
versions
|
||||
}
|
||||
|
|
@ -68,11 +71,11 @@ impl<'a> RegistryWheelIndex<'a> {
|
|||
package: &PackageName,
|
||||
cache: &Cache,
|
||||
tags: &Tags,
|
||||
index_urls: &IndexUrls,
|
||||
index_locations: &IndexLocations,
|
||||
) -> BTreeMap<Version, CachedRegistryDist> {
|
||||
let mut versions = BTreeMap::new();
|
||||
|
||||
for index_url in index_urls {
|
||||
for index_url in index_locations.indexes() {
|
||||
// Index all the wheels that were downloaded directly from the registry.
|
||||
let wheel_dir = cache.shard(
|
||||
CacheBucket::Wheels,
|
||||
|
|
|
|||
|
|
@ -16,8 +16,8 @@ use zip::ZipArchive;
|
|||
|
||||
use distribution_filename::WheelFilename;
|
||||
use distribution_types::{
|
||||
DirectArchiveUrl, DirectGitUrl, Dist, GitSourceDist, Identifier, LocalEditable, Name,
|
||||
PathSourceDist, RemoteSource, SourceDist,
|
||||
DirectArchiveUrl, DirectGitUrl, Dist, FileLocation, GitSourceDist, Identifier, LocalEditable,
|
||||
Name, PathSourceDist, RemoteSource, SourceDist,
|
||||
};
|
||||
use install_wheel_rs::read_dist_info;
|
||||
use platform_tags::Tags;
|
||||
|
|
@ -98,19 +98,32 @@ impl<'a, T: BuildContext> SourceDistCachedBuilder<'a, T> {
|
|||
.await?
|
||||
}
|
||||
SourceDist::Registry(registry_source_dist) => {
|
||||
let url = match ®istry_source_dist.file.url {
|
||||
FileLocation::Url(url) => url,
|
||||
FileLocation::Path(path, url) => {
|
||||
let path_source_dist = PathSourceDist {
|
||||
name: registry_source_dist.filename.name.clone(),
|
||||
url: url.clone(),
|
||||
path: path.clone(),
|
||||
editable: false,
|
||||
};
|
||||
return self.path(source_dist, &path_source_dist).await;
|
||||
}
|
||||
};
|
||||
|
||||
// For registry source distributions, shard by package, then by SHA.
|
||||
// Ex) `pypi/requests/a673187abc19fe6c`
|
||||
let cache_shard = self.build_context.cache().shard(
|
||||
CacheBucket::BuiltWheels,
|
||||
WheelCache::Index(®istry_source_dist.index)
|
||||
.remote_wheel_dir(registry_source_dist.name.as_ref())
|
||||
.remote_wheel_dir(registry_source_dist.filename.name.as_ref())
|
||||
.join(®istry_source_dist.file.distribution_id().as_str()[..16]),
|
||||
);
|
||||
|
||||
self.url(
|
||||
source_dist,
|
||||
®istry_source_dist.file.filename,
|
||||
®istry_source_dist.file.url,
|
||||
url,
|
||||
&cache_shard,
|
||||
None,
|
||||
)
|
||||
|
|
@ -154,19 +167,32 @@ impl<'a, T: BuildContext> SourceDistCachedBuilder<'a, T> {
|
|||
.await?
|
||||
}
|
||||
SourceDist::Registry(registry_source_dist) => {
|
||||
let url = match ®istry_source_dist.file.url {
|
||||
FileLocation::Url(url) => url,
|
||||
FileLocation::Path(path, url) => {
|
||||
let path_source_dist = PathSourceDist {
|
||||
name: registry_source_dist.filename.name.clone(),
|
||||
url: url.clone(),
|
||||
path: path.clone(),
|
||||
editable: false,
|
||||
};
|
||||
return self.path_metadata(source_dist, &path_source_dist).await;
|
||||
}
|
||||
};
|
||||
|
||||
// For registry source distributions, shard by package, then by SHA.
|
||||
// Ex) `pypi/requests/a673187abc19fe6c`
|
||||
let cache_shard = self.build_context.cache().shard(
|
||||
CacheBucket::BuiltWheels,
|
||||
WheelCache::Index(®istry_source_dist.index)
|
||||
.remote_wheel_dir(registry_source_dist.name.as_ref())
|
||||
.remote_wheel_dir(registry_source_dist.filename.name.as_ref())
|
||||
.join(®istry_source_dist.file.distribution_id().as_str()[..16]),
|
||||
);
|
||||
|
||||
self.url_metadata(
|
||||
source_dist,
|
||||
®istry_source_dist.file.filename,
|
||||
®istry_source_dist.file.url,
|
||||
url,
|
||||
&cache_shard,
|
||||
None,
|
||||
)
|
||||
|
|
|
|||
|
|
@ -153,7 +153,7 @@ impl<'a, Context: BuildContext + Send + Sync> Downloader<'a, Context> {
|
|||
}
|
||||
|
||||
/// Download, build, and unzip a single wheel.
|
||||
#[instrument(skip_all, fields(name = % dist, size = ? dist.size(), url = dist.file().map(|file| file.url.as_str()).unwrap_or_default()))]
|
||||
#[instrument(skip_all, fields(name = % dist, size = ? dist.size(), url = dist.file().map(|file| file.url.to_string()).unwrap_or_default()))]
|
||||
pub async fn get_wheel(
|
||||
&self,
|
||||
dist: Dist,
|
||||
|
|
|
|||
|
|
@ -5,7 +5,7 @@ use rustc_hash::FxHashSet;
|
|||
use tracing::{debug, warn};
|
||||
|
||||
use distribution_types::{
|
||||
git_reference, BuiltDist, CachedDirectUrlDist, CachedDist, Dist, IndexUrls, InstalledDist,
|
||||
git_reference, BuiltDist, CachedDirectUrlDist, CachedDist, Dist, IndexLocations, InstalledDist,
|
||||
Name, SourceDist,
|
||||
};
|
||||
use pep508_rs::{Requirement, VersionOrUrl};
|
||||
|
|
@ -45,13 +45,13 @@ impl InstallPlan {
|
|||
editable_requirements: Vec<ResolvedEditable>,
|
||||
mut site_packages: SitePackages,
|
||||
reinstall: &Reinstall,
|
||||
index_urls: &IndexUrls,
|
||||
index_locations: &IndexLocations,
|
||||
cache: &Cache,
|
||||
venv: &Virtualenv,
|
||||
tags: &Tags,
|
||||
) -> Result<Self> {
|
||||
// Index all the already-downloaded wheels in the cache.
|
||||
let mut registry_index = RegistryWheelIndex::new(cache, tags, index_urls);
|
||||
let mut registry_index = RegistryWheelIndex::new(cache, tags, index_locations);
|
||||
|
||||
let mut local = vec![];
|
||||
let mut remote = vec![];
|
||||
|
|
|
|||
|
|
@ -2,6 +2,7 @@ use pubgrub::range::Range;
|
|||
use rustc_hash::FxHashMap;
|
||||
|
||||
use distribution_types::{Dist, DistributionMetadata, Name};
|
||||
use distribution_types::{DistRequiresPython, ResolvableDist};
|
||||
use pep440_rs::VersionSpecifiers;
|
||||
use pep508_rs::{Requirement, VersionOrUrl};
|
||||
use puffin_normalize::PackageName;
|
||||
|
|
@ -10,7 +11,7 @@ use crate::prerelease_mode::PreReleaseStrategy;
|
|||
use crate::pubgrub::PubGrubVersion;
|
||||
use crate::python_requirement::PythonRequirement;
|
||||
use crate::resolution_mode::ResolutionStrategy;
|
||||
use crate::version_map::{DistRequiresPython, ResolvableFile, VersionMap};
|
||||
use crate::version_map::VersionMap;
|
||||
use crate::{Manifest, ResolutionOptions};
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
|
|
@ -160,7 +161,7 @@ impl CandidateSelector {
|
|||
/// Select the first-matching [`Candidate`] from a set of candidate versions and files,
|
||||
/// preferring wheels over source distributions.
|
||||
fn select_candidate<'a>(
|
||||
versions: impl Iterator<Item = (&'a PubGrubVersion, ResolvableFile<'a>)>,
|
||||
versions: impl Iterator<Item = (&'a PubGrubVersion, ResolvableDist<'a>)>,
|
||||
package_name: &'a PackageName,
|
||||
range: &Range<PubGrubVersion>,
|
||||
allow_prerelease: AllowPreRelease,
|
||||
|
|
@ -168,7 +169,7 @@ impl CandidateSelector {
|
|||
#[derive(Debug)]
|
||||
enum PreReleaseCandidate<'a> {
|
||||
NotNecessary,
|
||||
IfNecessary(&'a PubGrubVersion, ResolvableFile<'a>),
|
||||
IfNecessary(&'a PubGrubVersion, ResolvableDist<'a>),
|
||||
}
|
||||
|
||||
let mut prerelease = None;
|
||||
|
|
@ -222,15 +223,15 @@ pub(crate) struct Candidate<'a> {
|
|||
/// The version of the package.
|
||||
version: &'a PubGrubVersion,
|
||||
/// The file to use for resolving and installing the package.
|
||||
file: ResolvableFile<'a>,
|
||||
dist: ResolvableDist<'a>,
|
||||
}
|
||||
|
||||
impl<'a> Candidate<'a> {
|
||||
fn new(name: &'a PackageName, version: &'a PubGrubVersion, file: ResolvableFile<'a>) -> Self {
|
||||
fn new(name: &'a PackageName, version: &'a PubGrubVersion, dist: ResolvableDist<'a>) -> Self {
|
||||
Self {
|
||||
name,
|
||||
version,
|
||||
file,
|
||||
dist,
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -246,12 +247,12 @@ impl<'a> Candidate<'a> {
|
|||
|
||||
/// Return the [`DistFile`] to use when resolving the package.
|
||||
pub(crate) fn resolve(&self) -> &DistRequiresPython {
|
||||
self.file.resolve()
|
||||
self.dist.resolve()
|
||||
}
|
||||
|
||||
/// Return the [`DistFile`] to use when installing the package.
|
||||
pub(crate) fn install(&self) -> &DistRequiresPython {
|
||||
self.file.install()
|
||||
self.dist.install()
|
||||
}
|
||||
|
||||
/// If the candidate doesn't match the given requirement, return the version specifiers.
|
||||
|
|
|
|||
|
|
@ -3,6 +3,7 @@
|
|||
//! This is similar to running `pip install` with the `--no-deps` flag.
|
||||
|
||||
use anyhow::Result;
|
||||
use distribution_filename::DistFilename;
|
||||
use futures::{stream, Stream, StreamExt, TryStreamExt};
|
||||
use rustc_hash::FxHashMap;
|
||||
|
||||
|
|
@ -56,15 +57,10 @@ impl<'a> DistFinder<'a> {
|
|||
let (index, metadata) = self.client.simple(&requirement.name).await?;
|
||||
|
||||
// Pick a version that satisfies the requirement.
|
||||
let Some(ParsedFile {
|
||||
name,
|
||||
version,
|
||||
file,
|
||||
}) = self.select(requirement, metadata)
|
||||
else {
|
||||
let Some(ParsedFile { filename, file }) = self.select(requirement, metadata) else {
|
||||
return Err(ResolveError::NotFound(requirement.clone()));
|
||||
};
|
||||
let distribution = Dist::from_registry(name, version, file, index);
|
||||
let distribution = Dist::from_registry(filename, file, index);
|
||||
|
||||
if let Some(reporter) = self.reporter.as_ref() {
|
||||
reporter.on_progress(&distribution);
|
||||
|
|
@ -152,8 +148,7 @@ impl<'a> DistFinder<'a> {
|
|||
{
|
||||
best_wheel = Some((
|
||||
ParsedFile {
|
||||
name: wheel.name,
|
||||
version: wheel.version,
|
||||
filename: DistFilename::WheelFilename(wheel),
|
||||
file,
|
||||
},
|
||||
priority,
|
||||
|
|
@ -181,8 +176,7 @@ impl<'a> DistFinder<'a> {
|
|||
|
||||
best_version = Some(sdist.version.clone());
|
||||
best_sdist = Some(ParsedFile {
|
||||
name: sdist.name,
|
||||
version: sdist.version,
|
||||
filename: DistFilename::SourceDistFilename(sdist),
|
||||
file,
|
||||
});
|
||||
}
|
||||
|
|
@ -195,10 +189,8 @@ impl<'a> DistFinder<'a> {
|
|||
|
||||
#[derive(Debug)]
|
||||
struct ParsedFile {
|
||||
/// The [`PackageName`] extracted from the [`File`].
|
||||
name: PackageName,
|
||||
/// The version extracted from the [`File`].
|
||||
version: Version,
|
||||
/// The wheel or source dist filename extracted from the [`File`].
|
||||
filename: DistFilename,
|
||||
/// The underlying [`File`].
|
||||
file: File,
|
||||
}
|
||||
|
|
|
|||
|
|
@ -253,12 +253,12 @@ impl std::fmt::Display for DisplayResolutionGraph<'_> {
|
|||
nodes.sort_unstable_by_key(|(_, package)| package.name());
|
||||
|
||||
// Print out the dependency graph.
|
||||
for (index, package) in nodes {
|
||||
for (index, dist) in nodes {
|
||||
// Display the node itself.
|
||||
if let Some((editable, _)) = self.resolution.editables.get(package.name()) {
|
||||
if let Some((editable, _)) = self.resolution.editables.get(dist.name()) {
|
||||
write!(f, "-e {}", editable.verbatim())?;
|
||||
} else {
|
||||
write!(f, "{}", package.verbatim())?;
|
||||
write!(f, "{}", dist.verbatim())?;
|
||||
}
|
||||
|
||||
// Display the distribution hashes, if any.
|
||||
|
|
@ -266,7 +266,7 @@ impl std::fmt::Display for DisplayResolutionGraph<'_> {
|
|||
if let Some(hashes) = self
|
||||
.resolution
|
||||
.hashes
|
||||
.get(package.name())
|
||||
.get(dist.name())
|
||||
.filter(|hashes| !hashes.is_empty())
|
||||
{
|
||||
for hash in hashes {
|
||||
|
|
|
|||
|
|
@ -75,6 +75,8 @@ pub struct Resolver<'a, Provider: ResolverProvider> {
|
|||
|
||||
impl<'a, Context: BuildContext + Send + Sync> Resolver<'a, DefaultResolverProvider<'a, Context>> {
|
||||
/// Initialize a new resolver using the default backend doing real requests.
|
||||
///
|
||||
/// Reads the flat index entries.
|
||||
pub fn new(
|
||||
manifest: Manifest,
|
||||
options: ResolutionOptions,
|
||||
|
|
@ -83,7 +85,7 @@ impl<'a, Context: BuildContext + Send + Sync> Resolver<'a, DefaultResolverProvid
|
|||
tags: &'a Tags,
|
||||
client: &'a RegistryClient,
|
||||
build_context: &'a Context,
|
||||
) -> Self {
|
||||
) -> Result<Self, puffin_client::Error> {
|
||||
let provider = DefaultResolverProvider::new(
|
||||
client,
|
||||
DistributionDatabase::new(build_context.cache(), tags, client, build_context),
|
||||
|
|
@ -95,14 +97,14 @@ impl<'a, Context: BuildContext + Send + Sync> Resolver<'a, DefaultResolverProvid
|
|||
.iter()
|
||||
.chain(manifest.constraints.iter())
|
||||
.collect(),
|
||||
);
|
||||
Self::new_custom_io(
|
||||
)?;
|
||||
Ok(Self::new_custom_io(
|
||||
manifest,
|
||||
options,
|
||||
markers,
|
||||
PythonRequirement::new(interpreter, markers),
|
||||
provider,
|
||||
)
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -377,14 +379,10 @@ impl<'a, Provider: ResolverProvider> Resolver<'a, Provider> {
|
|||
}
|
||||
PubGrubPackage::Package(package_name, _extra, Some(url)) => {
|
||||
// Emit a request to fetch the metadata for this distribution.
|
||||
let distribution = Dist::from_url(package_name.clone(), url.clone())?;
|
||||
if self
|
||||
.index
|
||||
.distributions
|
||||
.register_owned(distribution.package_id())
|
||||
{
|
||||
priorities.add(distribution.name().clone());
|
||||
request_sink.unbounded_send(Request::Dist(distribution))?;
|
||||
let dist = Dist::from_url(package_name.clone(), url.clone())?;
|
||||
if self.index.distributions.register_owned(dist.package_id()) {
|
||||
priorities.add(dist.name().clone());
|
||||
request_sink.unbounded_send(Request::Dist(dist))?;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -542,8 +540,8 @@ impl<'a, Provider: ResolverProvider> Resolver<'a, Provider> {
|
|||
.distributions
|
||||
.register_owned(candidate.package_id())
|
||||
{
|
||||
let distribution = candidate.resolve().dist.clone();
|
||||
request_sink.unbounded_send(Request::Dist(distribution))?;
|
||||
let dist = candidate.resolve().dist.clone();
|
||||
request_sink.unbounded_send(Request::Dist(dist))?;
|
||||
}
|
||||
|
||||
Ok(Some(version))
|
||||
|
|
@ -690,13 +688,19 @@ impl<'a, Provider: ResolverProvider> Resolver<'a, Provider> {
|
|||
trace!("Received package metadata for: {package_name}");
|
||||
self.index.packages.done(package_name, version_map);
|
||||
}
|
||||
Some(Response::Dist(Dist::Built(distribution), metadata, ..)) => {
|
||||
trace!("Received built distribution metadata for: {distribution}");
|
||||
self.index
|
||||
.distributions
|
||||
.done(distribution.package_id(), metadata);
|
||||
Some(Response::Dist {
|
||||
dist: Dist::Built(dist),
|
||||
metadata,
|
||||
precise: _,
|
||||
}) => {
|
||||
trace!("Received built distribution metadata for: {dist}");
|
||||
self.index.distributions.done(dist.package_id(), metadata);
|
||||
}
|
||||
Some(Response::Dist(Dist::Source(distribution), metadata, precise)) => {
|
||||
Some(Response::Dist {
|
||||
dist: Dist::Source(distribution),
|
||||
metadata,
|
||||
precise,
|
||||
}) => {
|
||||
trace!("Received source distribution metadata for: {distribution}");
|
||||
self.index
|
||||
.distributions
|
||||
|
|
@ -753,7 +757,11 @@ impl<'a, Provider: ResolverProvider> Resolver<'a, Provider> {
|
|||
ResolveError::FetchAndBuild(Box::new(source_dist), err)
|
||||
}
|
||||
})?;
|
||||
Ok(Some(Response::Dist(dist, metadata, precise)))
|
||||
Ok(Some(Response::Dist {
|
||||
dist,
|
||||
metadata,
|
||||
precise,
|
||||
}))
|
||||
}
|
||||
|
||||
// Pre-fetch the package and distribution metadata.
|
||||
|
|
@ -804,7 +812,11 @@ impl<'a, Provider: ResolverProvider> Resolver<'a, Provider> {
|
|||
}
|
||||
})?;
|
||||
|
||||
Ok(Some(Response::Dist(dist, metadata, precise)))
|
||||
Ok(Some(Response::Dist {
|
||||
dist,
|
||||
metadata,
|
||||
precise,
|
||||
}))
|
||||
} else {
|
||||
Ok(None)
|
||||
}
|
||||
|
|
@ -852,7 +864,11 @@ enum Response {
|
|||
/// The returned metadata for a package hosted on a registry.
|
||||
Package(PackageName, VersionMap),
|
||||
/// The returned metadata for a distribution.
|
||||
Dist(Dist, Metadata21, Option<Url>),
|
||||
Dist {
|
||||
dist: Dist,
|
||||
metadata: Metadata21,
|
||||
precise: Option<Url>,
|
||||
},
|
||||
}
|
||||
|
||||
/// An enum used by [`DependencyProvider`] that holds information about package dependencies.
|
||||
|
|
|
|||
|
|
@ -3,11 +3,13 @@ use std::future::Future;
|
|||
use anyhow::Result;
|
||||
use chrono::{DateTime, Utc};
|
||||
use futures::TryFutureExt;
|
||||
use rustc_hash::FxHashMap;
|
||||
use url::Url;
|
||||
|
||||
use crate::pubgrub::PubGrubVersion;
|
||||
use distribution_types::Dist;
|
||||
use platform_tags::Tags;
|
||||
use puffin_client::RegistryClient;
|
||||
use puffin_client::{FlatIndex, RegistryClient};
|
||||
use puffin_distribution::{DistributionDatabase, DistributionDatabaseError};
|
||||
use puffin_normalize::PackageName;
|
||||
use puffin_traits::BuildContext;
|
||||
|
|
@ -46,6 +48,8 @@ pub trait ResolverProvider: Send + Sync {
|
|||
/// [`RegistryClient`] and [`DistributionDatabase`].
|
||||
pub struct DefaultResolverProvider<'a, Context: BuildContext + Send + Sync> {
|
||||
client: &'a RegistryClient,
|
||||
/// These are the entries from `--find-links` that act as overrides for index responses.
|
||||
flat_index: FxHashMap<PackageName, FlatIndex<PubGrubVersion>>,
|
||||
fetcher: DistributionDatabase<'a, Context>,
|
||||
tags: &'a Tags,
|
||||
python_requirement: PythonRequirement<'a>,
|
||||
|
|
@ -54,6 +58,7 @@ pub struct DefaultResolverProvider<'a, Context: BuildContext + Send + Sync> {
|
|||
}
|
||||
|
||||
impl<'a, Context: BuildContext + Send + Sync> DefaultResolverProvider<'a, Context> {
|
||||
/// Reads the flat index entries and builds the provider.
|
||||
pub fn new(
|
||||
client: &'a RegistryClient,
|
||||
fetcher: DistributionDatabase<'a, Context>,
|
||||
|
|
@ -61,15 +66,19 @@ impl<'a, Context: BuildContext + Send + Sync> DefaultResolverProvider<'a, Contex
|
|||
python_requirement: PythonRequirement<'a>,
|
||||
exclude_newer: Option<DateTime<Utc>>,
|
||||
allowed_yanks: AllowedYanks,
|
||||
) -> Self {
|
||||
Self {
|
||||
) -> Result<Self, puffin_client::Error> {
|
||||
let flat_index_dists = client.flat_index()?;
|
||||
let flat_index = FlatIndex::from_dists(flat_index_dists, tags);
|
||||
|
||||
Ok(Self {
|
||||
client,
|
||||
flat_index,
|
||||
fetcher,
|
||||
tags,
|
||||
python_requirement,
|
||||
exclude_newer,
|
||||
allowed_yanks,
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -80,6 +89,7 @@ impl<'a, Context: BuildContext + Send + Sync> ResolverProvider
|
|||
&'io self,
|
||||
package_name: &'io PackageName,
|
||||
) -> impl Future<Output = VersionMapResponse> + Send + 'io {
|
||||
let flat_index_override = self.flat_index.get(package_name).cloned();
|
||||
self.client
|
||||
.simple(package_name)
|
||||
.map_ok(move |(index, metadata)| {
|
||||
|
|
@ -91,6 +101,7 @@ impl<'a, Context: BuildContext + Send + Sync> ResolverProvider
|
|||
&self.python_requirement,
|
||||
&self.allowed_yanks,
|
||||
self.exclude_newer.as_ref(),
|
||||
flat_index_override,
|
||||
)
|
||||
})
|
||||
}
|
||||
|
|
|
|||
|
|
@ -5,10 +5,9 @@ use chrono::{DateTime, Utc};
|
|||
use tracing::{instrument, warn};
|
||||
|
||||
use distribution_filename::DistFilename;
|
||||
use distribution_types::{Dist, IndexUrl};
|
||||
use pep440_rs::VersionSpecifiers;
|
||||
use platform_tags::{TagPriority, Tags};
|
||||
use puffin_client::SimpleMetadata;
|
||||
use distribution_types::{Dist, IndexUrl, PrioritizedDistribution, ResolvableDist};
|
||||
use platform_tags::Tags;
|
||||
use puffin_client::{FlatIndex, SimpleMetadata};
|
||||
use puffin_normalize::PackageName;
|
||||
use puffin_warnings::warn_user_once;
|
||||
use pypi_types::{Hashes, Yanked};
|
||||
|
|
@ -18,12 +17,12 @@ use crate::python_requirement::PythonRequirement;
|
|||
use crate::yanks::AllowedYanks;
|
||||
|
||||
/// A map from versions to distributions.
|
||||
#[derive(Debug, Default)]
|
||||
#[derive(Debug, Default, Clone)]
|
||||
pub struct VersionMap(BTreeMap<PubGrubVersion, PrioritizedDistribution>);
|
||||
|
||||
impl VersionMap {
|
||||
/// Initialize a [`VersionMap`] from the given metadata.
|
||||
#[instrument(skip_all, fields(package_name = % package_name))]
|
||||
#[instrument(skip_all, fields(package_name))]
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub(crate) fn from_metadata(
|
||||
metadata: SimpleMetadata,
|
||||
|
|
@ -33,9 +32,11 @@ impl VersionMap {
|
|||
python_requirement: &PythonRequirement,
|
||||
allowed_yanks: &AllowedYanks,
|
||||
exclude_newer: Option<&DateTime<Utc>>,
|
||||
flat_index: Option<FlatIndex<PubGrubVersion>>,
|
||||
) -> Self {
|
||||
// If we have packages of the same name from find links, gives them priority, otherwise start empty
|
||||
let mut version_map: BTreeMap<PubGrubVersion, PrioritizedDistribution> =
|
||||
BTreeMap::default();
|
||||
flat_index.map(|overrides| overrides.0).unwrap_or_default();
|
||||
|
||||
// Collect compatible distributions.
|
||||
for (version, files) in metadata {
|
||||
|
|
@ -82,22 +83,24 @@ impl VersionMap {
|
|||
})
|
||||
});
|
||||
let dist = Dist::from_registry(
|
||||
filename.name.clone(),
|
||||
filename.version.clone(),
|
||||
DistFilename::WheelFilename(filename),
|
||||
file,
|
||||
index.clone(),
|
||||
);
|
||||
match version_map.entry(version.clone().into()) {
|
||||
Entry::Occupied(mut entry) => {
|
||||
entry
|
||||
.get_mut()
|
||||
.insert_built(dist, requires_python, hash, priority);
|
||||
entry.get_mut().insert_built(
|
||||
dist,
|
||||
requires_python,
|
||||
Some(hash),
|
||||
priority,
|
||||
);
|
||||
}
|
||||
Entry::Vacant(entry) => {
|
||||
entry.insert(PrioritizedDistribution::from_built(
|
||||
dist,
|
||||
requires_python,
|
||||
hash,
|
||||
Some(hash),
|
||||
priority,
|
||||
));
|
||||
}
|
||||
|
|
@ -105,20 +108,21 @@ impl VersionMap {
|
|||
}
|
||||
DistFilename::SourceDistFilename(filename) => {
|
||||
let dist = Dist::from_registry(
|
||||
filename.name.clone(),
|
||||
filename.version.clone(),
|
||||
DistFilename::SourceDistFilename(filename),
|
||||
file,
|
||||
index.clone(),
|
||||
);
|
||||
match version_map.entry(version.clone().into()) {
|
||||
Entry::Occupied(mut entry) => {
|
||||
entry.get_mut().insert_source(dist, requires_python, hash);
|
||||
entry
|
||||
.get_mut()
|
||||
.insert_source(dist, requires_python, Some(hash));
|
||||
}
|
||||
Entry::Vacant(entry) => {
|
||||
entry.insert(PrioritizedDistribution::from_source(
|
||||
dist,
|
||||
requires_python,
|
||||
hash,
|
||||
Some(hash),
|
||||
));
|
||||
}
|
||||
}
|
||||
|
|
@ -131,200 +135,24 @@ impl VersionMap {
|
|||
}
|
||||
|
||||
/// Return the [`DistFile`] for the given version, if any.
|
||||
pub(crate) fn get(&self, version: &PubGrubVersion) -> Option<ResolvableFile> {
|
||||
pub(crate) fn get(&self, version: &PubGrubVersion) -> Option<ResolvableDist> {
|
||||
self.0.get(version).and_then(PrioritizedDistribution::get)
|
||||
}
|
||||
|
||||
/// Return an iterator over the versions and distributions.
|
||||
pub(crate) fn iter(
|
||||
&self,
|
||||
) -> impl DoubleEndedIterator<Item = (&PubGrubVersion, ResolvableFile)> {
|
||||
) -> impl DoubleEndedIterator<Item = (&PubGrubVersion, ResolvableDist)> {
|
||||
self.0
|
||||
.iter()
|
||||
.filter_map(|(version, file)| Some((version, file.get()?)))
|
||||
.filter_map(|(version, dist)| Some((version, dist.get()?)))
|
||||
}
|
||||
|
||||
/// Return the [`Hashes`] for the given version, if any.
|
||||
pub(crate) fn hashes(&self, version: &PubGrubVersion) -> Vec<Hashes> {
|
||||
self.0
|
||||
.get(version)
|
||||
.map(|file| file.hashes.clone())
|
||||
.map(|file| file.hashes().to_vec())
|
||||
.unwrap_or_default()
|
||||
}
|
||||
}
|
||||
|
||||
/// Attach its requires-python to a [`Dist`], since downstream needs this information to filter
|
||||
/// [`PrioritizedDistribution`].
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct DistRequiresPython {
|
||||
pub(crate) dist: Dist,
|
||||
pub(crate) requires_python: Option<VersionSpecifiers>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
struct PrioritizedDistribution {
|
||||
/// An arbitrary source distribution for the package version.
|
||||
source: Option<DistRequiresPython>,
|
||||
/// The highest-priority, platform-compatible wheel for the package version.
|
||||
compatible_wheel: Option<(DistRequiresPython, TagPriority)>,
|
||||
/// An arbitrary, platform-incompatible wheel for the package version.
|
||||
incompatible_wheel: Option<DistRequiresPython>,
|
||||
/// The hashes for each distribution.
|
||||
hashes: Vec<Hashes>,
|
||||
}
|
||||
|
||||
impl PrioritizedDistribution {
|
||||
/// Create a new [`PrioritizedDistribution`] from the given wheel distribution.
|
||||
fn from_built(
|
||||
dist: Dist,
|
||||
requires_python: Option<VersionSpecifiers>,
|
||||
hash: Hashes,
|
||||
priority: Option<TagPriority>,
|
||||
) -> Self {
|
||||
if let Some(priority) = priority {
|
||||
Self {
|
||||
source: None,
|
||||
compatible_wheel: Some((
|
||||
DistRequiresPython {
|
||||
dist,
|
||||
|
||||
requires_python,
|
||||
},
|
||||
priority,
|
||||
)),
|
||||
incompatible_wheel: None,
|
||||
hashes: vec![hash],
|
||||
}
|
||||
} else {
|
||||
Self {
|
||||
source: None,
|
||||
compatible_wheel: None,
|
||||
incompatible_wheel: Some(DistRequiresPython {
|
||||
dist,
|
||||
requires_python,
|
||||
}),
|
||||
hashes: vec![hash],
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Create a new [`PrioritizedDistribution`] from the given source distribution.
|
||||
fn from_source(dist: Dist, requires_python: Option<VersionSpecifiers>, hash: Hashes) -> Self {
|
||||
Self {
|
||||
source: Some(DistRequiresPython {
|
||||
dist,
|
||||
requires_python,
|
||||
}),
|
||||
compatible_wheel: None,
|
||||
incompatible_wheel: None,
|
||||
hashes: vec![hash],
|
||||
}
|
||||
}
|
||||
|
||||
/// Insert the given built distribution into the [`PrioritizedDistribution`].
|
||||
fn insert_built(
|
||||
&mut self,
|
||||
dist: Dist,
|
||||
requires_python: Option<VersionSpecifiers>,
|
||||
hash: Hashes,
|
||||
priority: Option<TagPriority>,
|
||||
) {
|
||||
// Prefer the highest-priority, platform-compatible wheel.
|
||||
if let Some(priority) = priority {
|
||||
if let Some((.., existing_priority)) = &self.compatible_wheel {
|
||||
if priority > *existing_priority {
|
||||
self.compatible_wheel = Some((
|
||||
DistRequiresPython {
|
||||
dist,
|
||||
requires_python,
|
||||
},
|
||||
priority,
|
||||
));
|
||||
}
|
||||
} else {
|
||||
self.compatible_wheel = Some((
|
||||
DistRequiresPython {
|
||||
dist,
|
||||
requires_python,
|
||||
},
|
||||
priority,
|
||||
));
|
||||
}
|
||||
} else if self.incompatible_wheel.is_none() {
|
||||
self.incompatible_wheel = Some(DistRequiresPython {
|
||||
dist,
|
||||
requires_python,
|
||||
});
|
||||
}
|
||||
self.hashes.push(hash);
|
||||
}
|
||||
|
||||
/// Insert the given source distribution into the [`PrioritizedDistribution`].
|
||||
fn insert_source(
|
||||
&mut self,
|
||||
dist: Dist,
|
||||
requires_python: Option<VersionSpecifiers>,
|
||||
hash: Hashes,
|
||||
) {
|
||||
if self.source.is_none() {
|
||||
self.source = Some(DistRequiresPython {
|
||||
dist,
|
||||
requires_python,
|
||||
});
|
||||
}
|
||||
self.hashes.push(hash);
|
||||
}
|
||||
|
||||
/// Return the highest-priority distribution for the package version, if any.
|
||||
fn get(&self) -> Option<ResolvableFile> {
|
||||
match (
|
||||
&self.compatible_wheel,
|
||||
&self.source,
|
||||
&self.incompatible_wheel,
|
||||
) {
|
||||
// Prefer the highest-priority, platform-compatible wheel.
|
||||
(Some((wheel, _)), _, _) => Some(ResolvableFile::CompatibleWheel(wheel)),
|
||||
// If we have a compatible source distribution and an incompatible wheel, return the
|
||||
// wheel. We assume that all distributions have the same metadata for a given package
|
||||
// version. If a compatible source distribution exists, we assume we can build it, but
|
||||
// using the wheel is faster.
|
||||
(_, Some(source_dist), Some(wheel)) => {
|
||||
Some(ResolvableFile::IncompatibleWheel(source_dist, wheel))
|
||||
}
|
||||
// Otherwise, if we have a source distribution, return it.
|
||||
(_, Some(source_dist), _) => Some(ResolvableFile::SourceDist(source_dist)),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub(crate) enum ResolvableFile<'a> {
|
||||
/// The distribution should be resolved and installed using a source distribution.
|
||||
SourceDist(&'a DistRequiresPython),
|
||||
/// The distribution should be resolved and installed using a wheel distribution.
|
||||
CompatibleWheel(&'a DistRequiresPython),
|
||||
/// The distribution should be resolved using an incompatible wheel distribution, but
|
||||
/// installed using a source distribution.
|
||||
IncompatibleWheel(&'a DistRequiresPython, &'a DistRequiresPython),
|
||||
}
|
||||
|
||||
impl<'a> ResolvableFile<'a> {
|
||||
/// Return the [`DistFile`] to use during resolution.
|
||||
pub(crate) fn resolve(&self) -> &DistRequiresPython {
|
||||
match *self {
|
||||
ResolvableFile::SourceDist(sdist) => sdist,
|
||||
ResolvableFile::CompatibleWheel(wheel) => wheel,
|
||||
ResolvableFile::IncompatibleWheel(_, wheel) => wheel,
|
||||
}
|
||||
}
|
||||
|
||||
/// Return the [`DistFile`] to use during installation.
|
||||
pub(crate) fn install(&self) -> &DistRequiresPython {
|
||||
match *self {
|
||||
ResolvableFile::SourceDist(sdist) => sdist,
|
||||
ResolvableFile::CompatibleWheel(wheel) => wheel,
|
||||
ResolvableFile::IncompatibleWheel(sdist, _) => sdist,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -119,7 +119,7 @@ async fn resolve(
|
|||
tags,
|
||||
&client,
|
||||
&build_context,
|
||||
);
|
||||
)?;
|
||||
Ok(resolver.resolve().await?)
|
||||
}
|
||||
|
||||
|
|
|
|||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Loading…
Reference in New Issue