Use `VerbatimParsedUrl` in `pep508_rs` (#3758)

When parsing requirements from any source, directly parse the url parts
(and reject unsupported urls) instead of parsing url parts at a later
stage. This removes a bunch of error branches and concludes the work
parsing url parts once and passing them around everywhere.

Many usages of the assembled `VerbatimUrl` remain, but these can be
removed incrementally.

Please review commit-by-commit.
This commit is contained in:
konsti 2024-05-23 21:52:47 +02:00 committed by GitHub
parent 0d2f3fc4e4
commit 4db468e27f
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
56 changed files with 877 additions and 656 deletions

9
Cargo.lock generated
View File

@ -1104,7 +1104,6 @@ dependencies = [
"cache-key", "cache-key",
"distribution-filename", "distribution-filename",
"fs-err", "fs-err",
"git2",
"indexmap", "indexmap",
"itertools 0.13.0", "itertools 0.13.0",
"once_cell", "once_cell",
@ -2860,7 +2859,9 @@ dependencies = [
name = "pypi-types" name = "pypi-types"
version = "0.0.1" version = "0.0.1"
dependencies = [ dependencies = [
"anyhow",
"chrono", "chrono",
"git2",
"indexmap", "indexmap",
"mailparse", "mailparse",
"once_cell", "once_cell",
@ -2873,6 +2874,7 @@ dependencies = [
"toml", "toml",
"tracing", "tracing",
"url", "url",
"uv-git",
"uv-normalize", "uv-normalize",
] ]
@ -3076,12 +3078,12 @@ dependencies = [
"insta", "insta",
"itertools 0.13.0", "itertools 0.13.0",
"pep508_rs", "pep508_rs",
"pypi-types",
"regex", "regex",
"reqwest", "reqwest",
"reqwest-middleware", "reqwest-middleware",
"tempfile", "tempfile",
"test-case", "test-case",
"thiserror",
"tokio", "tokio",
"tracing", "tracing",
"unscanny", "unscanny",
@ -4505,6 +4507,7 @@ dependencies = [
"pep508_rs", "pep508_rs",
"platform-tags", "platform-tags",
"predicates", "predicates",
"pypi-types",
"rayon", "rayon",
"regex", "regex",
"requirements-txt", "requirements-txt",
@ -4579,6 +4582,7 @@ dependencies = [
"once_cell", "once_cell",
"pep440_rs", "pep440_rs",
"pep508_rs", "pep508_rs",
"pypi-types",
"regex", "regex",
"rustc-hash", "rustc-hash",
"serde", "serde",
@ -4704,6 +4708,7 @@ dependencies = [
"pep508_rs", "pep508_rs",
"poloto", "poloto",
"pretty_assertions", "pretty_assertions",
"pypi-types",
"resvg", "resvg",
"rustc-hash", "rustc-hash",
"schemars", "schemars",

View File

@ -1,3 +1,5 @@
use std::str::FromStr;
use bench::criterion::black_box; use bench::criterion::black_box;
use bench::criterion::{criterion_group, criterion_main, measurement::WallTime, Criterion}; use bench::criterion::{criterion_group, criterion_main, measurement::WallTime, Criterion};
use distribution_types::Requirement; use distribution_types::Requirement;
@ -15,9 +17,9 @@ fn resolve_warm_jupyter(c: &mut Criterion<WallTime>) {
let cache = &Cache::from_path("../../.cache").unwrap().init().unwrap(); let cache = &Cache::from_path("../../.cache").unwrap().init().unwrap();
let venv = PythonEnvironment::from_virtualenv(cache).unwrap(); let venv = PythonEnvironment::from_virtualenv(cache).unwrap();
let client = &RegistryClientBuilder::new(cache.clone()).build(); let client = &RegistryClientBuilder::new(cache.clone()).build();
let manifest = &Manifest::simple(vec![ let manifest = &Manifest::simple(vec![Requirement::from(
Requirement::from_pep508("jupyter".parse().unwrap()).unwrap() pep508_rs::Requirement::from_str("jupyter").unwrap(),
]); )]);
let run = || { let run = || {
runtime runtime
@ -45,13 +47,10 @@ fn resolve_warm_airflow(c: &mut Criterion<WallTime>) {
let venv = PythonEnvironment::from_virtualenv(cache).unwrap(); let venv = PythonEnvironment::from_virtualenv(cache).unwrap();
let client = &RegistryClientBuilder::new(cache.clone()).build(); let client = &RegistryClientBuilder::new(cache.clone()).build();
let manifest = &Manifest::simple(vec![ let manifest = &Manifest::simple(vec![
Requirement::from_pep508("apache-airflow[all]".parse().unwrap()).unwrap(), Requirement::from(pep508_rs::Requirement::from_str("apache-airflow[all]").unwrap()),
Requirement::from_pep508( Requirement::from(
"apache-airflow-providers-apache-beam>3.0.0" pep508_rs::Requirement::from_str("apache-airflow-providers-apache-beam>3.0.0").unwrap(),
.parse() ),
.unwrap(),
)
.unwrap(),
]); ]);
let run = || { let run = || {
@ -73,10 +72,10 @@ criterion_main!(uv);
mod resolver { mod resolver {
use anyhow::Result; use anyhow::Result;
use install_wheel_rs::linker::LinkMode;
use once_cell::sync::Lazy; use once_cell::sync::Lazy;
use distribution_types::IndexLocations; use distribution_types::IndexLocations;
use install_wheel_rs::linker::LinkMode;
use pep508_rs::{MarkerEnvironment, MarkerEnvironmentBuilder}; use pep508_rs::{MarkerEnvironment, MarkerEnvironmentBuilder};
use platform_tags::{Arch, Os, Platform, Tags}; use platform_tags::{Arch, Os, Platform, Tags};
use uv_cache::Cache; use uv_cache::Cache;

View File

@ -25,7 +25,6 @@ uv-normalize = { workspace = true }
anyhow = { workspace = true } anyhow = { workspace = true }
fs-err = { workspace = true } fs-err = { workspace = true }
git2 = { workspace = true }
indexmap = { workspace = true } indexmap = { workspace = true }
itertools = { workspace = true } itertools = { workspace = true }
once_cell = { workspace = true } once_cell = { workspace = true }

View File

@ -105,9 +105,9 @@ impl std::fmt::Display for DirectSourceUrl<'_> {
pub struct GitSourceUrl<'a> { pub struct GitSourceUrl<'a> {
/// The URL with the revision and subdirectory fragment. /// The URL with the revision and subdirectory fragment.
pub url: &'a VerbatimUrl, pub url: &'a VerbatimUrl,
pub git: &'a GitUrl,
/// The URL without the revision and subdirectory fragment. /// The URL without the revision and subdirectory fragment.
pub git: Cow<'a, GitUrl>, pub subdirectory: Option<&'a Path>,
pub subdirectory: Option<Cow<'a, Path>>,
} }
impl std::fmt::Display for GitSourceUrl<'_> { impl std::fmt::Display for GitSourceUrl<'_> {
@ -120,8 +120,8 @@ impl<'a> From<&'a GitSourceDist> for GitSourceUrl<'a> {
fn from(dist: &'a GitSourceDist) -> Self { fn from(dist: &'a GitSourceDist) -> Self {
Self { Self {
url: &dist.url, url: &dist.url,
git: Cow::Borrowed(&dist.git), git: &dist.git,
subdirectory: dist.subdirectory.as_deref().map(Cow::Borrowed), subdirectory: dist.subdirectory.as_deref(),
} }
} }
} }

View File

@ -4,12 +4,12 @@ use anyhow::{anyhow, Result};
use distribution_filename::WheelFilename; use distribution_filename::WheelFilename;
use pep508_rs::VerbatimUrl; use pep508_rs::VerbatimUrl;
use pypi_types::HashDigest; use pypi_types::{HashDigest, ParsedPathUrl};
use uv_normalize::PackageName; use uv_normalize::PackageName;
use crate::{ use crate::{
BuiltDist, Dist, DistributionMetadata, Hashed, InstalledMetadata, InstalledVersion, Name, BuiltDist, Dist, DistributionMetadata, Hashed, InstalledMetadata, InstalledVersion, Name,
ParsedPathUrl, ParsedUrl, SourceDist, VersionOrUrlRef, ParsedUrl, SourceDist, VersionOrUrlRef,
}; };
/// A built distribution (wheel) that exists in the local cache. /// A built distribution (wheel) that exists in the local cache.

View File

@ -42,6 +42,7 @@ use url::Url;
use distribution_filename::WheelFilename; use distribution_filename::WheelFilename;
use pep440_rs::Version; use pep440_rs::Version;
use pep508_rs::{Pep508Url, VerbatimUrl}; use pep508_rs::{Pep508Url, VerbatimUrl};
use pypi_types::{ParsedUrl, VerbatimParsedUrl};
use uv_git::GitUrl; use uv_git::GitUrl;
use uv_normalize::PackageName; use uv_normalize::PackageName;
@ -57,7 +58,6 @@ pub use crate::hash::*;
pub use crate::id::*; pub use crate::id::*;
pub use crate::index_url::*; pub use crate::index_url::*;
pub use crate::installed::*; pub use crate::installed::*;
pub use crate::parsed_url::*;
pub use crate::prioritized_distribution::*; pub use crate::prioritized_distribution::*;
pub use crate::requirement::*; pub use crate::requirement::*;
pub use crate::resolution::*; pub use crate::resolution::*;
@ -77,7 +77,6 @@ mod hash;
mod id; mod id;
mod index_url; mod index_url;
mod installed; mod installed;
mod parsed_url;
mod prioritized_distribution; mod prioritized_distribution;
mod requirement; mod requirement;
mod resolution; mod resolution;

View File

@ -9,7 +9,7 @@ use pep508_rs::{MarkerEnvironment, MarkerTree, RequirementOrigin, VerbatimUrl, V
use uv_git::{GitReference, GitSha}; use uv_git::{GitReference, GitSha};
use uv_normalize::{ExtraName, PackageName}; use uv_normalize::{ExtraName, PackageName};
use crate::{ParsedUrl, ParsedUrlError}; use crate::{ParsedUrl, VerbatimParsedUrl};
/// The requirements of a distribution, an extension over PEP 508's requirements. /// The requirements of a distribution, an extension over PEP 508's requirements.
#[derive(Debug, Clone, Eq, PartialEq)] #[derive(Debug, Clone, Eq, PartialEq)]
@ -44,9 +44,11 @@ impl Requirement {
true true
} }
} }
}
impl From<pep508_rs::Requirement<VerbatimParsedUrl>> for Requirement {
/// Convert a [`pep508_rs::Requirement`] to a [`Requirement`]. /// Convert a [`pep508_rs::Requirement`] to a [`Requirement`].
pub fn from_pep508(requirement: pep508_rs::Requirement) -> Result<Self, Box<ParsedUrlError>> { fn from(requirement: pep508_rs::Requirement<VerbatimParsedUrl>) -> Self {
let source = match requirement.version_or_url { let source = match requirement.version_or_url {
None => RequirementSource::Registry { None => RequirementSource::Registry {
specifier: VersionSpecifiers::empty(), specifier: VersionSpecifiers::empty(),
@ -58,17 +60,16 @@ impl Requirement {
index: None, index: None,
}, },
Some(VersionOrUrl::Url(url)) => { Some(VersionOrUrl::Url(url)) => {
let direct_url = ParsedUrl::try_from(url.to_url())?; RequirementSource::from_parsed_url(url.parsed_url, url.verbatim)
RequirementSource::from_parsed_url(direct_url, url)
} }
}; };
Ok(Requirement { Requirement {
name: requirement.name, name: requirement.name,
extras: requirement.extras, extras: requirement.extras,
marker: requirement.marker, marker: requirement.marker,
source, source,
origin: requirement.origin, origin: requirement.origin,
}) }
} }
} }

View File

@ -4,7 +4,7 @@ use std::fmt::{Display, Formatter};
use pep508_rs::{MarkerEnvironment, UnnamedRequirement}; use pep508_rs::{MarkerEnvironment, UnnamedRequirement};
use uv_normalize::ExtraName; use uv_normalize::ExtraName;
use crate::{ParsedUrl, ParsedUrlError, Requirement, RequirementSource}; use crate::{Requirement, RequirementSource, VerbatimParsedUrl};
/// An [`UnresolvedRequirement`] with additional metadata from `requirements.txt`, currently only /// An [`UnresolvedRequirement`] with additional metadata from `requirements.txt`, currently only
/// hashes but in the future also editable and similar information. /// hashes but in the future also editable and similar information.
@ -29,7 +29,7 @@ pub enum UnresolvedRequirement {
/// `tool.uv.sources`. /// `tool.uv.sources`.
Named(Requirement), Named(Requirement),
/// A PEP 508-like, direct URL dependency specifier. /// A PEP 508-like, direct URL dependency specifier.
Unnamed(UnnamedRequirement), Unnamed(UnnamedRequirement<VerbatimParsedUrl>),
} }
impl Display for UnresolvedRequirement { impl Display for UnresolvedRequirement {
@ -64,17 +64,13 @@ impl UnresolvedRequirement {
} }
/// Return the version specifier or URL for the requirement. /// Return the version specifier or URL for the requirement.
pub fn source(&self) -> Result<Cow<'_, RequirementSource>, Box<ParsedUrlError>> { pub fn source(&self) -> Cow<'_, RequirementSource> {
// TODO(konsti): This is a bad place to raise errors, we should have parsed the url earlier.
match self { match self {
Self::Named(requirement) => Ok(Cow::Borrowed(&requirement.source)), Self::Named(requirement) => Cow::Borrowed(&requirement.source),
Self::Unnamed(requirement) => { Self::Unnamed(requirement) => Cow::Owned(RequirementSource::from_parsed_url(
let parsed_url = ParsedUrl::try_from(requirement.url.to_url())?; requirement.url.parsed_url.clone(),
Ok(Cow::Owned(RequirementSource::from_parsed_url( requirement.url.verbatim.clone(),
parsed_url, )),
requirement.url.clone(),
)))
}
} }
} }
} }

View File

@ -16,7 +16,6 @@
#![warn(missing_docs)] #![warn(missing_docs)]
use cursor::Cursor;
#[cfg(feature = "pyo3")] #[cfg(feature = "pyo3")]
use std::collections::hash_map::DefaultHasher; use std::collections::hash_map::DefaultHasher;
use std::collections::HashSet; use std::collections::HashSet;
@ -39,18 +38,18 @@ use thiserror::Error;
use unicode_width::UnicodeWidthChar; use unicode_width::UnicodeWidthChar;
use url::Url; use url::Url;
use cursor::Cursor;
pub use marker::{ pub use marker::{
ExtraOperator, MarkerEnvironment, MarkerEnvironmentBuilder, MarkerExpression, MarkerOperator, ExtraOperator, MarkerEnvironment, MarkerEnvironmentBuilder, MarkerExpression, MarkerOperator,
MarkerTree, MarkerValue, MarkerValueString, MarkerValueVersion, MarkerWarningKind, MarkerTree, MarkerValue, MarkerValueString, MarkerValueVersion, MarkerWarningKind,
StringVersion, StringVersion,
}; };
pub use origin::RequirementOrigin;
#[cfg(feature = "pyo3")] #[cfg(feature = "pyo3")]
use pep440_rs::PyVersion; use pep440_rs::PyVersion;
use pep440_rs::{Version, VersionSpecifier, VersionSpecifiers}; use pep440_rs::{Version, VersionSpecifier, VersionSpecifiers};
#[cfg(feature = "non-pep508-extensions")] #[cfg(feature = "non-pep508-extensions")]
pub use unnamed::UnnamedRequirement; pub use unnamed::{UnnamedRequirement, UnnamedRequirementUrl};
// Parity with the crates.io version of pep508_rs
pub use origin::RequirementOrigin;
pub use uv_normalize::{ExtraName, InvalidNameError, PackageName}; pub use uv_normalize::{ExtraName, InvalidNameError, PackageName};
pub use verbatim_url::{ pub use verbatim_url::{
expand_env_vars, split_scheme, strip_host, Scheme, VerbatimUrl, VerbatimUrlError, expand_env_vars, split_scheme, strip_host, Scheme, VerbatimUrl, VerbatimUrlError,
@ -123,7 +122,7 @@ impl<T: Pep508Url> Display for Pep508Error<T> {
} }
} }
/// We need this to allow e.g. anyhow's `.context()` /// We need this to allow anyhow's `.context()` and `AsDynError`.
impl<E: Error + Debug, T: Pep508Url<Err = E>> std::error::Error for Pep508Error<T> {} impl<E: Error + Debug, T: Pep508Url<Err = E>> std::error::Error for Pep508Error<T> {}
#[cfg(feature = "pyo3")] #[cfg(feature = "pyo3")]
@ -155,17 +154,6 @@ pub struct Requirement<T: Pep508Url = VerbatimUrl> {
pub origin: Option<RequirementOrigin>, pub origin: Option<RequirementOrigin>,
} }
impl Requirement {
/// Set the source file containing the requirement.
#[must_use]
pub fn with_origin(self, origin: RequirementOrigin) -> Self {
Self {
origin: Some(origin),
..self
}
}
}
impl<T: Pep508Url + Display> Display for Requirement<T> { impl<T: Pep508Url + Display> Display for Requirement<T> {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", self.name)?; write!(f, "{}", self.name)?;
@ -453,10 +441,19 @@ impl<T: Pep508Url> Requirement<T> {
..self ..self
} }
} }
/// Set the source file containing the requirement.
#[must_use]
pub fn with_origin(self, origin: RequirementOrigin) -> Self {
Self {
origin: Some(origin),
..self
}
}
} }
/// Type to parse URLs from `name @ <url>` into. Defaults to [`url::Url`]. /// Type to parse URLs from `name @ <url>` into. Defaults to [`url::Url`].
pub trait Pep508Url: Clone + Display + Debug { pub trait Pep508Url: Display + Debug + Sized {
/// String to URL parsing error /// String to URL parsing error
type Err: Error + Debug; type Err: Error + Debug;
@ -1136,7 +1133,7 @@ mod tests {
#[cfg(feature = "non-pep508-extensions")] #[cfg(feature = "non-pep508-extensions")]
fn parse_unnamed_err(input: &str) -> String { fn parse_unnamed_err(input: &str) -> String {
crate::UnnamedRequirement::from_str(input) crate::UnnamedRequirement::<VerbatimUrl>::from_str(input)
.unwrap_err() .unwrap_err()
.to_string() .to_string()
} }
@ -1256,7 +1253,7 @@ mod tests {
#[test] #[test]
#[cfg(feature = "non-pep508-extensions")] #[cfg(feature = "non-pep508-extensions")]
fn direct_url_no_extras() { fn direct_url_no_extras() {
let numpy = crate::UnnamedRequirement::from_str("https://files.pythonhosted.org/packages/28/4a/46d9e65106879492374999e76eb85f87b15328e06bd1550668f79f7b18c6/numpy-1.26.4-cp312-cp312-win32.whl").unwrap(); let numpy = crate::UnnamedRequirement::<VerbatimUrl>::from_str("https://files.pythonhosted.org/packages/28/4a/46d9e65106879492374999e76eb85f87b15328e06bd1550668f79f7b18c6/numpy-1.26.4-cp312-cp312-win32.whl").unwrap();
assert_eq!(numpy.url.to_string(), "https://files.pythonhosted.org/packages/28/4a/46d9e65106879492374999e76eb85f87b15328e06bd1550668f79f7b18c6/numpy-1.26.4-cp312-cp312-win32.whl"); assert_eq!(numpy.url.to_string(), "https://files.pythonhosted.org/packages/28/4a/46d9e65106879492374999e76eb85f87b15328e06bd1550668f79f7b18c6/numpy-1.26.4-cp312-cp312-win32.whl");
assert_eq!(numpy.extras, vec![]); assert_eq!(numpy.extras, vec![]);
} }
@ -1264,9 +1261,10 @@ mod tests {
#[test] #[test]
#[cfg(all(unix, feature = "non-pep508-extensions"))] #[cfg(all(unix, feature = "non-pep508-extensions"))]
fn direct_url_extras() { fn direct_url_extras() {
let numpy = let numpy = crate::UnnamedRequirement::<VerbatimUrl>::from_str(
crate::UnnamedRequirement::from_str("/path/to/numpy-1.26.4-cp312-cp312-win32.whl[dev]") "/path/to/numpy-1.26.4-cp312-cp312-win32.whl[dev]",
.unwrap(); )
.unwrap();
assert_eq!( assert_eq!(
numpy.url.to_string(), numpy.url.to_string(),
"file:///path/to/numpy-1.26.4-cp312-cp312-win32.whl" "file:///path/to/numpy-1.26.4-cp312-cp312-win32.whl"
@ -1277,7 +1275,7 @@ mod tests {
#[test] #[test]
#[cfg(all(windows, feature = "non-pep508-extensions"))] #[cfg(all(windows, feature = "non-pep508-extensions"))]
fn direct_url_extras() { fn direct_url_extras() {
let numpy = crate::UnnamedRequirement::from_str( let numpy = crate::UnnamedRequirement::<VerbatimUrl>::from_str(
"C:\\path\\to\\numpy-1.26.4-cp312-cp312-win32.whl[dev]", "C:\\path\\to\\numpy-1.26.4-cp312-cp312-win32.whl[dev]",
) )
.unwrap(); .unwrap();
@ -1459,7 +1457,8 @@ mod tests {
fn test_marker_parsing() { fn test_marker_parsing() {
let marker = r#"python_version == "2.7" and (sys_platform == "win32" or (os_name == "linux" and implementation_name == 'cpython'))"#; let marker = r#"python_version == "2.7" and (sys_platform == "win32" or (os_name == "linux" and implementation_name == 'cpython'))"#;
let actual = let actual =
parse_markers_cursor::<Url>(&mut Cursor::new(marker), &mut TracingReporter).unwrap(); parse_markers_cursor::<VerbatimUrl>(&mut Cursor::new(marker), &mut TracingReporter)
.unwrap();
let expected = MarkerTree::And(vec![ let expected = MarkerTree::And(vec![
MarkerTree::Expression(MarkerExpression::Version { MarkerTree::Expression(MarkerExpression::Version {
key: MarkerValueVersion::PythonVersion, key: MarkerValueVersion::PythonVersion,

View File

@ -1550,6 +1550,11 @@ impl FromStr for MarkerTree {
} }
impl MarkerTree { impl MarkerTree {
/// Like [`FromStr::from_str`], but the caller chooses the return type generic.
pub fn parse_str<T: Pep508Url>(markers: &str) -> Result<Self, Pep508Error<T>> {
parse_markers(markers, &mut TracingReporter)
}
/// Parse a [`MarkerTree`] from a string with the given reporter. /// Parse a [`MarkerTree`] from a string with the given reporter.
pub fn parse_reporter( pub fn parse_reporter(
markers: &str, markers: &str,

View File

@ -1,28 +1,74 @@
use std::fmt::{Display, Formatter}; use std::fmt::{Debug, Display, Formatter};
use std::hash::Hash;
use std::path::Path; use std::path::Path;
use std::str::FromStr; use std::str::FromStr;
use serde::{de, Deserialize, Deserializer, Serialize, Serializer};
use uv_fs::normalize_url_path; use uv_fs::normalize_url_path;
use uv_normalize::ExtraName; use uv_normalize::ExtraName;
use crate::marker::parse_markers_cursor; use crate::marker::parse_markers_cursor;
use crate::{ use crate::{
expand_env_vars, parse_extras_cursor, split_extras, split_scheme, strip_host, Cursor, expand_env_vars, parse_extras_cursor, split_extras, split_scheme, strip_host, Cursor,
MarkerEnvironment, MarkerTree, Pep508Error, Pep508ErrorSource, Reporter, RequirementOrigin, MarkerEnvironment, MarkerTree, Pep508Error, Pep508ErrorSource, Pep508Url, Reporter,
Scheme, TracingReporter, VerbatimUrl, VerbatimUrlError, RequirementOrigin, Scheme, TracingReporter, VerbatimUrl, VerbatimUrlError,
}; };
/// An extension over [`Pep508Url`] that also supports parsing unnamed requirements, namely paths.
///
/// The error type is fixed to the same as the [`Pep508Url`] impl error.
pub trait UnnamedRequirementUrl: Pep508Url {
/// Parse a URL from a relative or absolute path.
fn parse_path(path: impl AsRef<Path>, working_dir: impl AsRef<Path>)
-> Result<Self, Self::Err>;
/// Parse a URL from an absolute path.
fn parse_absolute_path(path: impl AsRef<Path>) -> Result<Self, Self::Err>;
/// Parse a URL from a string.
fn parse_unnamed_url(given: impl AsRef<str>) -> Result<Self, Self::Err>;
/// Set the verbatim representation of the URL.
#[must_use]
fn with_given(self, given: impl Into<String>) -> Self;
/// Return the original string as given by the user, if available.
fn given(&self) -> Option<&str>;
}
impl UnnamedRequirementUrl for VerbatimUrl {
fn parse_path(
path: impl AsRef<Path>,
working_dir: impl AsRef<Path>,
) -> Result<Self, VerbatimUrlError> {
Self::parse_path(path, working_dir)
}
fn parse_absolute_path(path: impl AsRef<Path>) -> Result<Self, Self::Err> {
Self::parse_absolute_path(path)
}
fn parse_unnamed_url(given: impl AsRef<str>) -> Result<Self, Self::Err> {
Ok(Self::parse_url(given)?)
}
fn with_given(self, given: impl Into<String>) -> Self {
self.with_given(given)
}
fn given(&self) -> Option<&str> {
self.given()
}
}
/// A PEP 508-like, direct URL dependency specifier without a package name. /// A PEP 508-like, direct URL dependency specifier without a package name.
/// ///
/// In a `requirements.txt` file, the name of the package is optional for direct URL /// In a `requirements.txt` file, the name of the package is optional for direct URL
/// dependencies. This isn't compliant with PEP 508, but is common in `requirements.txt`, which /// dependencies. This isn't compliant with PEP 508, but is common in `requirements.txt`, which
/// is implementation-defined. /// is implementation-defined.
#[derive(Hash, Debug, Clone, Eq, PartialEq)] #[derive(Hash, Debug, Clone, Eq, PartialEq)]
pub struct UnnamedRequirement { pub struct UnnamedRequirement<Url: UnnamedRequirementUrl = VerbatimUrl> {
/// The direct URL that defines the version specifier. /// The direct URL that defines the version specifier.
pub url: VerbatimUrl, pub url: Url,
/// The list of extras such as `security`, `tests` in /// The list of extras such as `security`, `tests` in
/// `requests [security,tests] >= 2.8.1, == 2.8.* ; python_version > "3.8"`. /// `requests [security,tests] >= 2.8.1, == 2.8.* ; python_version > "3.8"`.
pub extras: Vec<ExtraName>, pub extras: Vec<ExtraName>,
@ -34,7 +80,7 @@ pub struct UnnamedRequirement {
pub origin: Option<RequirementOrigin>, pub origin: Option<RequirementOrigin>,
} }
impl UnnamedRequirement { impl<Url: UnnamedRequirementUrl> UnnamedRequirement<Url> {
/// Returns whether the markers apply for the given environment /// Returns whether the markers apply for the given environment
pub fn evaluate_markers(&self, env: &MarkerEnvironment, extras: &[ExtraName]) -> bool { pub fn evaluate_markers(&self, env: &MarkerEnvironment, extras: &[ExtraName]) -> bool {
self.evaluate_optional_environment(Some(env), extras) self.evaluate_optional_environment(Some(env), extras)
@ -61,9 +107,22 @@ impl UnnamedRequirement {
..self ..self
} }
} }
/// Parse a PEP 508-like direct URL requirement without a package name.
pub fn parse(
input: &str,
working_dir: impl AsRef<Path>,
reporter: &mut impl Reporter,
) -> Result<Self, Pep508Error<Url>> {
parse_unnamed_requirement(
&mut Cursor::new(input),
Some(working_dir.as_ref()),
reporter,
)
}
} }
impl Display for UnnamedRequirement { impl<Url: UnnamedRequirementUrl> Display for UnnamedRequirement<Url> {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", self.url)?; write!(f, "{}", self.url)?;
if !self.extras.is_empty() { if !self.extras.is_empty() {
@ -84,29 +143,8 @@ impl Display for UnnamedRequirement {
} }
} }
/// <https://github.com/serde-rs/serde/issues/908#issuecomment-298027413> impl<Url: UnnamedRequirementUrl> FromStr for UnnamedRequirement<Url> {
impl<'de> Deserialize<'de> for UnnamedRequirement { type Err = Pep508Error<Url>;
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
let s = String::deserialize(deserializer)?;
FromStr::from_str(&s).map_err(de::Error::custom)
}
}
/// <https://github.com/serde-rs/serde/issues/1316#issue-332908452>
impl Serialize for UnnamedRequirement {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
serializer.collect_str(self)
}
}
impl FromStr for UnnamedRequirement {
type Err = Pep508Error<VerbatimUrl>;
/// Parse a PEP 508-like direct URL requirement without a package name. /// Parse a PEP 508-like direct URL requirement without a package name.
fn from_str(input: &str) -> Result<Self, Self::Err> { fn from_str(input: &str) -> Result<Self, Self::Err> {
@ -114,33 +152,18 @@ impl FromStr for UnnamedRequirement {
} }
} }
impl UnnamedRequirement {
/// Parse a PEP 508-like direct URL requirement without a package name.
pub fn parse(
input: &str,
working_dir: impl AsRef<Path>,
reporter: &mut impl Reporter,
) -> Result<Self, Pep508Error<VerbatimUrl>> {
parse_unnamed_requirement(
&mut Cursor::new(input),
Some(working_dir.as_ref()),
reporter,
)
}
}
/// Parse a PEP 508-like direct URL specifier without a package name. /// Parse a PEP 508-like direct URL specifier without a package name.
/// ///
/// Unlike pip, we allow extras on URLs and paths. /// Unlike pip, we allow extras on URLs and paths.
fn parse_unnamed_requirement( fn parse_unnamed_requirement<Url: UnnamedRequirementUrl>(
cursor: &mut Cursor, cursor: &mut Cursor,
working_dir: Option<&Path>, working_dir: Option<&Path>,
reporter: &mut impl Reporter, reporter: &mut impl Reporter,
) -> Result<UnnamedRequirement, Pep508Error<VerbatimUrl>> { ) -> Result<UnnamedRequirement<Url>, Pep508Error<Url>> {
cursor.eat_whitespace(); cursor.eat_whitespace();
// Parse the URL itself, along with any extras. // Parse the URL itself, along with any extras.
let (url, extras) = parse_unnamed_url(cursor, working_dir)?; let (url, extras) = parse_unnamed_url::<Url>(cursor, working_dir)?;
let requirement_end = cursor.pos(); let requirement_end = cursor.pos();
// wsp* // wsp*
@ -191,13 +214,13 @@ fn parse_unnamed_requirement(
/// Create a `VerbatimUrl` to represent the requirement, and extracts any extras at the end of the /// Create a `VerbatimUrl` to represent the requirement, and extracts any extras at the end of the
/// URL, to comply with the non-PEP 508 extensions. /// URL, to comply with the non-PEP 508 extensions.
fn preprocess_unnamed_url( fn preprocess_unnamed_url<Url: UnnamedRequirementUrl>(
url: &str, url: &str,
#[cfg_attr(not(feature = "non-pep508-extensions"), allow(unused))] working_dir: Option<&Path>, #[cfg_attr(not(feature = "non-pep508-extensions"), allow(unused))] working_dir: Option<&Path>,
cursor: &Cursor, cursor: &Cursor,
start: usize, start: usize,
len: usize, len: usize,
) -> Result<(VerbatimUrl, Vec<ExtraName>), Pep508Error<VerbatimUrl>> { ) -> Result<(Url, Vec<ExtraName>), Pep508Error<Url>> {
// Split extras _before_ expanding the URL. We assume that the extras are not environment // Split extras _before_ expanding the URL. We assume that the extras are not environment
// variables. If we parsed the extras after expanding the URL, then the verbatim representation // variables. If we parsed the extras after expanding the URL, then the verbatim representation
// of the URL itself would be ambiguous, since it would consist of the environment variable, // of the URL itself would be ambiguous, since it would consist of the environment variable,
@ -235,9 +258,9 @@ fn preprocess_unnamed_url(
#[cfg(feature = "non-pep508-extensions")] #[cfg(feature = "non-pep508-extensions")]
if let Some(working_dir) = working_dir { if let Some(working_dir) = working_dir {
let url = VerbatimUrl::parse_path(path.as_ref(), working_dir) let url = Url::parse_path(path.as_ref(), working_dir)
.map_err(|err| Pep508Error { .map_err(|err| Pep508Error {
message: Pep508ErrorSource::<VerbatimUrl>::UrlError(err), message: Pep508ErrorSource::UrlError(err),
start, start,
len, len,
input: cursor.to_string(), input: cursor.to_string(),
@ -246,9 +269,9 @@ fn preprocess_unnamed_url(
return Ok((url, extras)); return Ok((url, extras));
} }
let url = VerbatimUrl::parse_absolute_path(path.as_ref()) let url = Url::parse_absolute_path(path.as_ref())
.map_err(|err| Pep508Error { .map_err(|err| Pep508Error {
message: Pep508ErrorSource::<VerbatimUrl>::UrlError(err), message: Pep508ErrorSource::UrlError(err),
start, start,
len, len,
input: cursor.to_string(), input: cursor.to_string(),
@ -259,11 +282,9 @@ fn preprocess_unnamed_url(
// Ex) `https://download.pytorch.org/whl/torch_stable.html` // Ex) `https://download.pytorch.org/whl/torch_stable.html`
Some(_) => { Some(_) => {
// Ex) `https://download.pytorch.org/whl/torch_stable.html` // Ex) `https://download.pytorch.org/whl/torch_stable.html`
let url = VerbatimUrl::parse_url(expanded.as_ref()) let url = Url::parse_unnamed_url(expanded.as_ref())
.map_err(|err| Pep508Error { .map_err(|err| Pep508Error {
message: Pep508ErrorSource::<VerbatimUrl>::UrlError(VerbatimUrlError::Url( message: Pep508ErrorSource::UrlError(err),
err,
)),
start, start,
len, len,
input: cursor.to_string(), input: cursor.to_string(),
@ -275,9 +296,9 @@ fn preprocess_unnamed_url(
// Ex) `C:\Users\ferris\wheel-0.42.0.tar.gz` // Ex) `C:\Users\ferris\wheel-0.42.0.tar.gz`
_ => { _ => {
if let Some(working_dir) = working_dir { if let Some(working_dir) = working_dir {
let url = VerbatimUrl::parse_path(expanded.as_ref(), working_dir) let url = Url::parse_path(expanded.as_ref(), working_dir)
.map_err(|err| Pep508Error { .map_err(|err| Pep508Error {
message: Pep508ErrorSource::<VerbatimUrl>::UrlError(err), message: Pep508ErrorSource::UrlError(err),
start, start,
len, len,
input: cursor.to_string(), input: cursor.to_string(),
@ -286,7 +307,7 @@ fn preprocess_unnamed_url(
return Ok((url, extras)); return Ok((url, extras));
} }
let url = VerbatimUrl::parse_absolute_path(expanded.as_ref()) let url = Url::parse_absolute_path(expanded.as_ref())
.map_err(|err| Pep508Error { .map_err(|err| Pep508Error {
message: Pep508ErrorSource::UrlError(err), message: Pep508ErrorSource::UrlError(err),
start, start,
@ -300,9 +321,9 @@ fn preprocess_unnamed_url(
} else { } else {
// Ex) `../editable/` // Ex) `../editable/`
if let Some(working_dir) = working_dir { if let Some(working_dir) = working_dir {
let url = VerbatimUrl::parse_path(expanded.as_ref(), working_dir) let url = Url::parse_path(expanded.as_ref(), working_dir)
.map_err(|err| Pep508Error { .map_err(|err| Pep508Error {
message: Pep508ErrorSource::<VerbatimUrl>::UrlError(err), message: Pep508ErrorSource::UrlError(err),
start, start,
len, len,
input: cursor.to_string(), input: cursor.to_string(),
@ -311,7 +332,7 @@ fn preprocess_unnamed_url(
return Ok((url, extras)); return Ok((url, extras));
} }
let url = VerbatimUrl::parse_absolute_path(expanded.as_ref()) let url = Url::parse_absolute_path(expanded.as_ref())
.map_err(|err| Pep508Error { .map_err(|err| Pep508Error {
message: Pep508ErrorSource::UrlError(err), message: Pep508ErrorSource::UrlError(err),
start, start,
@ -329,10 +350,10 @@ fn preprocess_unnamed_url(
/// For example: /// For example:
/// - `https://download.pytorch.org/whl/torch_stable.html[dev]` /// - `https://download.pytorch.org/whl/torch_stable.html[dev]`
/// - `../editable[dev]` /// - `../editable[dev]`
fn parse_unnamed_url( fn parse_unnamed_url<Url: UnnamedRequirementUrl>(
cursor: &mut Cursor, cursor: &mut Cursor,
working_dir: Option<&Path>, working_dir: Option<&Path>,
) -> Result<(VerbatimUrl, Vec<ExtraName>), Pep508Error<VerbatimUrl>> { ) -> Result<(Url, Vec<ExtraName>), Pep508Error<Url>> {
// wsp* // wsp*
cursor.eat_whitespace(); cursor.eat_whitespace();
// <URI_reference> // <URI_reference>

View File

@ -16,8 +16,11 @@ workspace = true
pep440_rs = { workspace = true } pep440_rs = { workspace = true }
pep508_rs = { workspace = true } pep508_rs = { workspace = true }
uv-normalize = { workspace = true } uv-normalize = { workspace = true }
uv-git = { workspace = true }
anyhow = { workspace = true }
chrono = { workspace = true, features = ["serde"] } chrono = { workspace = true, features = ["serde"] }
git2 = { workspace = true }
indexmap = { workspace = true, features = ["serde"] } indexmap = { workspace = true, features = ["serde"] }
mailparse = { workspace = true } mailparse = { workspace = true }
once_cell = { workspace = true } once_cell = { workspace = true }

View File

@ -7,7 +7,9 @@ use serde::{de, Deserialize, Deserializer, Serialize};
use tracing::warn; use tracing::warn;
use pep440_rs::{VersionSpecifiers, VersionSpecifiersParseError}; use pep440_rs::{VersionSpecifiers, VersionSpecifiersParseError};
use pep508_rs::{Pep508Error, Pep508Url, Requirement, VerbatimUrl}; use pep508_rs::{Pep508Error, Pep508Url, Requirement};
use crate::VerbatimParsedUrl;
/// Ex) `>=7.2.0<8.0.0` /// Ex) `>=7.2.0<8.0.0`
static MISSING_COMMA: Lazy<Regex> = Lazy::new(|| Regex::new(r"(\d)([<>=~^!])").unwrap()); static MISSING_COMMA: Lazy<Regex> = Lazy::new(|| Regex::new(r"(\d)([<>=~^!])").unwrap());
@ -114,7 +116,7 @@ fn parse_with_fixups<Err, T: FromStr<Err = Err>>(input: &str, type_name: &str) -
/// Like [`Requirement`], but attempts to correct some common errors in user-provided requirements. /// Like [`Requirement`], but attempts to correct some common errors in user-provided requirements.
#[derive(Debug, Clone, Serialize, Deserialize, Eq, PartialEq)] #[derive(Debug, Clone, Serialize, Deserialize, Eq, PartialEq)]
pub struct LenientRequirement<T: Pep508Url = VerbatimUrl>(Requirement<T>); pub struct LenientRequirement<T: Pep508Url = VerbatimParsedUrl>(Requirement<T>);
impl<T: Pep508Url> FromStr for LenientRequirement<T> { impl<T: Pep508Url> FromStr for LenientRequirement<T> {
type Err = Pep508Error<T>; type Err = Pep508Error<T>;

View File

@ -2,6 +2,7 @@ pub use base_url::*;
pub use direct_url::*; pub use direct_url::*;
pub use lenient_requirement::*; pub use lenient_requirement::*;
pub use metadata::*; pub use metadata::*;
pub use parsed_url::*;
pub use scheme::*; pub use scheme::*;
pub use simple_json::*; pub use simple_json::*;
@ -9,5 +10,6 @@ mod base_url;
mod direct_url; mod direct_url;
mod lenient_requirement; mod lenient_requirement;
mod metadata; mod metadata;
mod parsed_url;
mod scheme; mod scheme;
mod simple_json; mod simple_json;

View File

@ -9,11 +9,11 @@ use thiserror::Error;
use tracing::warn; use tracing::warn;
use pep440_rs::{Version, VersionParseError, VersionSpecifiers, VersionSpecifiersParseError}; use pep440_rs::{Version, VersionParseError, VersionSpecifiers, VersionSpecifiersParseError};
use pep508_rs::{Pep508Error, Requirement, VerbatimUrl}; use pep508_rs::{Pep508Error, Requirement};
use uv_normalize::{ExtraName, InvalidNameError, PackageName}; use uv_normalize::{ExtraName, InvalidNameError, PackageName};
use crate::lenient_requirement::LenientRequirement; use crate::lenient_requirement::LenientRequirement;
use crate::LenientVersionSpecifiers; use crate::{LenientVersionSpecifiers, VerbatimParsedUrl};
/// Python Package Metadata 2.3 as specified in /// Python Package Metadata 2.3 as specified in
/// <https://packaging.python.org/specifications/core-metadata/>. /// <https://packaging.python.org/specifications/core-metadata/>.
@ -29,7 +29,7 @@ pub struct Metadata23 {
pub name: PackageName, pub name: PackageName,
pub version: Version, pub version: Version,
// Optional fields // Optional fields
pub requires_dist: Vec<Requirement<VerbatimUrl>>, pub requires_dist: Vec<Requirement<VerbatimParsedUrl>>,
pub requires_python: Option<VersionSpecifiers>, pub requires_python: Option<VersionSpecifiers>,
pub provides_extras: Vec<ExtraName>, pub provides_extras: Vec<ExtraName>,
} }
@ -50,7 +50,7 @@ pub enum MetadataError {
#[error(transparent)] #[error(transparent)]
Pep440Error(#[from] VersionSpecifiersParseError), Pep440Error(#[from] VersionSpecifiersParseError),
#[error(transparent)] #[error(transparent)]
Pep508Error(#[from] Pep508Error<VerbatimUrl>), Pep508Error(#[from] Box<Pep508Error<VerbatimParsedUrl>>),
#[error(transparent)] #[error(transparent)]
InvalidName(#[from] InvalidNameError), InvalidName(#[from] InvalidNameError),
#[error("Invalid `Metadata-Version` field: {0}")] #[error("Invalid `Metadata-Version` field: {0}")]
@ -61,6 +61,12 @@ pub enum MetadataError {
DynamicField(&'static str), DynamicField(&'static str),
} }
impl From<Pep508Error<VerbatimParsedUrl>> for MetadataError {
fn from(error: Pep508Error<VerbatimParsedUrl>) -> Self {
Self::Pep508Error(Box::new(error))
}
}
/// From <https://github.com/PyO3/python-pkginfo-rs/blob/d719988323a0cfea86d4737116d7917f30e819e2/src/metadata.rs#LL78C2-L91C26> /// From <https://github.com/PyO3/python-pkginfo-rs/blob/d719988323a0cfea86d4737116d7917f30e819e2/src/metadata.rs#LL78C2-L91C26>
impl Metadata23 { impl Metadata23 {
/// Parse the [`Metadata23`] from a `METADATA` file, as included in a built distribution (wheel). /// Parse the [`Metadata23`] from a `METADATA` file, as included in a built distribution (wheel).

View File

@ -1,12 +1,14 @@
use std::path::PathBuf; use std::fmt::{Display, Formatter};
use std::path::{Path, PathBuf};
use anyhow::{Error, Result};
use thiserror::Error; use thiserror::Error;
use url::Url; use url::{ParseError, Url};
use pep508_rs::VerbatimUrl; use pep508_rs::{Pep508Url, UnnamedRequirementUrl, VerbatimUrl, VerbatimUrlError};
use uv_git::{GitSha, GitUrl}; use uv_git::{GitSha, GitUrl};
use crate::{ArchiveInfo, DirInfo, DirectUrl, VcsInfo, VcsKind};
#[derive(Debug, Error)] #[derive(Debug, Error)]
pub enum ParsedUrlError { pub enum ParsedUrlError {
#[error("Unsupported URL prefix `{prefix}` in URL: `{url}` ({message})")] #[error("Unsupported URL prefix `{prefix}` in URL: `{url}` ({message})")]
@ -20,7 +22,9 @@ pub enum ParsedUrlError {
#[error("Failed to parse Git reference from URL: `{0}`")] #[error("Failed to parse Git reference from URL: `{0}`")]
GitShaParse(Url, #[source] git2::Error), GitShaParse(Url, #[source] git2::Error),
#[error("Not a valid URL: `{0}`")] #[error("Not a valid URL: `{0}`")]
UrlParse(String, #[source] url::ParseError), UrlParse(String, #[source] ParseError),
#[error(transparent)]
VerbatimUrl(#[from] VerbatimUrlError),
} }
#[derive(Debug, Clone, Hash, PartialEq, PartialOrd, Eq, Ord)] #[derive(Debug, Clone, Hash, PartialEq, PartialOrd, Eq, Ord)]
@ -29,6 +33,105 @@ pub struct VerbatimParsedUrl {
pub verbatim: VerbatimUrl, pub verbatim: VerbatimUrl,
} }
impl Pep508Url for VerbatimParsedUrl {
type Err = ParsedUrlError;
fn parse_url(url: &str, working_dir: Option<&Path>) -> Result<Self, Self::Err> {
let verbatim_url = <VerbatimUrl as Pep508Url>::parse_url(url, working_dir)?;
Ok(Self {
parsed_url: ParsedUrl::try_from(verbatim_url.to_url())?,
verbatim: verbatim_url,
})
}
}
impl UnnamedRequirementUrl for VerbatimParsedUrl {
fn parse_path(
path: impl AsRef<Path>,
working_dir: impl AsRef<Path>,
) -> Result<Self, Self::Err> {
let verbatim = VerbatimUrl::parse_path(&path, &working_dir)?;
let parsed_path_url = ParsedPathUrl {
url: verbatim.to_url(),
path: working_dir.as_ref().join(path),
editable: false,
};
Ok(Self {
parsed_url: ParsedUrl::Path(parsed_path_url),
verbatim,
})
}
fn parse_absolute_path(path: impl AsRef<Path>) -> Result<Self, Self::Err> {
let verbatim = VerbatimUrl::parse_absolute_path(&path)?;
let parsed_path_url = ParsedPathUrl {
url: verbatim.to_url(),
path: path.as_ref().to_path_buf(),
editable: false,
};
Ok(Self {
parsed_url: ParsedUrl::Path(parsed_path_url),
verbatim,
})
}
fn parse_unnamed_url(url: impl AsRef<str>) -> Result<Self, Self::Err> {
let verbatim = <VerbatimUrl as UnnamedRequirementUrl>::parse_unnamed_url(&url)?;
Ok(Self {
parsed_url: ParsedUrl::try_from(verbatim.to_url())?,
verbatim,
})
}
fn with_given(self, given: impl Into<String>) -> Self {
Self {
verbatim: self.verbatim.with_given(given),
..self
}
}
fn given(&self) -> Option<&str> {
self.verbatim.given()
}
}
impl Display for VerbatimParsedUrl {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
Display::fmt(&self.verbatim, f)
}
}
impl TryFrom<VerbatimUrl> for VerbatimParsedUrl {
type Error = ParsedUrlError;
fn try_from(verbatim_url: VerbatimUrl) -> Result<Self, Self::Error> {
let parsed_url = ParsedUrl::try_from(verbatim_url.to_url())?;
Ok(Self {
parsed_url,
verbatim: verbatim_url,
})
}
}
impl serde::ser::Serialize for VerbatimParsedUrl {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::ser::Serializer,
{
self.verbatim.serialize(serializer)
}
}
impl<'de> serde::de::Deserialize<'de> for VerbatimParsedUrl {
fn deserialize<D>(deserializer: D) -> Result<VerbatimParsedUrl, D::Error>
where
D: serde::de::Deserializer<'de>,
{
let verbatim_url = VerbatimUrl::deserialize(deserializer)?;
Self::try_from(verbatim_url).map_err(serde::de::Error::custom)
}
}
/// We support three types of URLs for distributions: /// We support three types of URLs for distributions:
/// * The path to a file or directory (`file://`) /// * The path to a file or directory (`file://`)
/// * A Git repository (`git+https://` or `git+ssh://`), optionally with a subdirectory and/or /// * A Git repository (`git+https://` or `git+ssh://`), optionally with a subdirectory and/or
@ -124,7 +227,7 @@ fn get_subdirectory(url: &Url) -> Option<PathBuf> {
} }
/// Return the Git reference of the given URL, if it exists. /// Return the Git reference of the given URL, if it exists.
pub fn git_reference(url: Url) -> Result<Option<GitSha>, Error> { pub fn git_reference(url: Url) -> Result<Option<GitSha>, Box<ParsedUrlError>> {
let ParsedGitUrl { url, .. } = ParsedGitUrl::try_from(url)?; let ParsedGitUrl { url, .. } = ParsedGitUrl::try_from(url)?;
Ok(url.precise()) Ok(url.precise())
} }
@ -172,10 +275,10 @@ impl TryFrom<Url> for ParsedUrl {
} }
} }
impl TryFrom<&ParsedUrl> for pypi_types::DirectUrl { impl TryFrom<&ParsedUrl> for DirectUrl {
type Error = Error; type Error = ParsedUrlError;
fn try_from(value: &ParsedUrl) -> std::result::Result<Self, Self::Error> { fn try_from(value: &ParsedUrl) -> Result<Self, Self::Error> {
match value { match value {
ParsedUrl::Path(value) => Self::try_from(value), ParsedUrl::Path(value) => Self::try_from(value),
ParsedUrl::Git(value) => Self::try_from(value), ParsedUrl::Git(value) => Self::try_from(value),
@ -184,26 +287,26 @@ impl TryFrom<&ParsedUrl> for pypi_types::DirectUrl {
} }
} }
impl TryFrom<&ParsedPathUrl> for pypi_types::DirectUrl { impl TryFrom<&ParsedPathUrl> for DirectUrl {
type Error = Error; type Error = ParsedUrlError;
fn try_from(value: &ParsedPathUrl) -> Result<Self, Self::Error> { fn try_from(value: &ParsedPathUrl) -> Result<Self, Self::Error> {
Ok(Self::LocalDirectory { Ok(Self::LocalDirectory {
url: value.url.to_string(), url: value.url.to_string(),
dir_info: pypi_types::DirInfo { dir_info: DirInfo {
editable: value.editable.then_some(true), editable: value.editable.then_some(true),
}, },
}) })
} }
} }
impl TryFrom<&ParsedArchiveUrl> for pypi_types::DirectUrl { impl TryFrom<&ParsedArchiveUrl> for DirectUrl {
type Error = Error; type Error = ParsedUrlError;
fn try_from(value: &ParsedArchiveUrl) -> Result<Self, Self::Error> { fn try_from(value: &ParsedArchiveUrl) -> Result<Self, Self::Error> {
Ok(Self::ArchiveUrl { Ok(Self::ArchiveUrl {
url: value.url.to_string(), url: value.url.to_string(),
archive_info: pypi_types::ArchiveInfo { archive_info: ArchiveInfo {
hash: None, hash: None,
hashes: None, hashes: None,
}, },
@ -212,14 +315,14 @@ impl TryFrom<&ParsedArchiveUrl> for pypi_types::DirectUrl {
} }
} }
impl TryFrom<&ParsedGitUrl> for pypi_types::DirectUrl { impl TryFrom<&ParsedGitUrl> for DirectUrl {
type Error = Error; type Error = ParsedUrlError;
fn try_from(value: &ParsedGitUrl) -> Result<Self, Self::Error> { fn try_from(value: &ParsedGitUrl) -> Result<Self, Self::Error> {
Ok(Self::VcsUrl { Ok(Self::VcsUrl {
url: value.url.repository().to_string(), url: value.url.repository().to_string(),
vcs_info: pypi_types::VcsInfo { vcs_info: VcsInfo {
vcs: pypi_types::VcsKind::Git, vcs: VcsKind::Git,
commit_id: value.url.precise().as_ref().map(ToString::to_string), commit_id: value.url.precise().as_ref().map(ToString::to_string),
requested_revision: value.url.reference().as_str().map(ToString::to_string), requested_revision: value.url.reference().as_str().map(ToString::to_string),
}, },

View File

@ -15,6 +15,7 @@ workspace = true
[dependencies] [dependencies]
distribution-types = { workspace = true } distribution-types = { workspace = true }
pep508_rs = { workspace = true } pep508_rs = { workspace = true }
pypi-types = { workspace = true }
uv-client = { workspace = true } uv-client = { workspace = true }
uv-fs = { workspace = true } uv-fs = { workspace = true }
uv-normalize = { workspace = true } uv-normalize = { workspace = true }
@ -25,7 +26,6 @@ fs-err = { workspace = true }
regex = { workspace = true } regex = { workspace = true }
reqwest = { workspace = true, optional = true } reqwest = { workspace = true, optional = true }
reqwest-middleware = { workspace = true, optional = true } reqwest-middleware = { workspace = true, optional = true }
thiserror = { workspace = true }
tracing = { workspace = true } tracing = { workspace = true }
unscanny = { workspace = true } unscanny = { workspace = true }
url = { workspace = true } url = { workspace = true }

View File

@ -44,13 +44,12 @@ use tracing::instrument;
use unscanny::{Pattern, Scanner}; use unscanny::{Pattern, Scanner};
use url::Url; use url::Url;
use distribution_types::{ use distribution_types::{Requirement, UnresolvedRequirement, UnresolvedRequirementSpecification};
ParsedUrlError, Requirement, UnresolvedRequirement, UnresolvedRequirementSpecification,
};
use pep508_rs::{ use pep508_rs::{
expand_env_vars, split_scheme, strip_host, Extras, MarkerTree, Pep508Error, Pep508ErrorSource, expand_env_vars, split_scheme, strip_host, Extras, MarkerTree, Pep508Error, Pep508ErrorSource,
RequirementOrigin, Scheme, VerbatimUrl, RequirementOrigin, Scheme, VerbatimUrl,
}; };
use pypi_types::VerbatimParsedUrl;
#[cfg(feature = "http")] #[cfg(feature = "http")]
use uv_client::BaseClient; use uv_client::BaseClient;
use uv_client::BaseClientBuilder; use uv_client::BaseClientBuilder;
@ -59,7 +58,7 @@ use uv_fs::{normalize_url_path, Simplified};
use uv_normalize::ExtraName; use uv_normalize::ExtraName;
use uv_warnings::warn_user; use uv_warnings::warn_user;
pub use crate::requirement::{RequirementsTxtRequirement, RequirementsTxtRequirementError}; pub use crate::requirement::RequirementsTxtRequirement;
mod requirement; mod requirement;
@ -203,7 +202,7 @@ impl EditableRequirement {
) -> Result<Self, RequirementsTxtParserError> { ) -> Result<Self, RequirementsTxtParserError> {
// Identify the markers. // Identify the markers.
let (given, marker) = if let Some((requirement, marker)) = Self::split_markers(given) { let (given, marker) = if let Some((requirement, marker)) = Self::split_markers(given) {
let marker = MarkerTree::from_str(marker).map_err(|err| { let marker = MarkerTree::parse_str(marker).map_err(|err| {
// Map from error on the markers to error on the whole requirement. // Map from error on the markers to error on the whole requirement.
let err = Pep508Error { let err = Pep508Error {
message: err.message, message: err.message,
@ -216,14 +215,14 @@ impl EditableRequirement {
RequirementsTxtParserError::Pep508 { RequirementsTxtParserError::Pep508 {
start: err.start, start: err.start,
end: err.start + err.len, end: err.start + err.len,
source: err, source: Box::new(err),
} }
} }
Pep508ErrorSource::UnsupportedRequirement(_) => { Pep508ErrorSource::UnsupportedRequirement(_) => {
RequirementsTxtParserError::UnsupportedRequirement { RequirementsTxtParserError::UnsupportedRequirement {
start: err.start, start: err.start,
end: err.start + err.len, end: err.start + err.len,
source: err, source: Box::new(err),
} }
} }
} }
@ -248,14 +247,14 @@ impl EditableRequirement {
RequirementsTxtParserError::Pep508 { RequirementsTxtParserError::Pep508 {
start: err.start, start: err.start,
end: err.start + err.len, end: err.start + err.len,
source: err, source: Box::new(err),
} }
} }
Pep508ErrorSource::UnsupportedRequirement(_) => { Pep508ErrorSource::UnsupportedRequirement(_) => {
RequirementsTxtParserError::UnsupportedRequirement { RequirementsTxtParserError::UnsupportedRequirement {
start: err.start, start: err.start,
end: err.start + err.len, end: err.start + err.len,
source: err, source: Box::new(err),
} }
} }
} }
@ -403,21 +402,19 @@ pub struct RequirementEntry {
// We place the impl here instead of next to `UnresolvedRequirementSpecification` because // We place the impl here instead of next to `UnresolvedRequirementSpecification` because
// `UnresolvedRequirementSpecification` is defined in `distribution-types` and `requirements-txt` // `UnresolvedRequirementSpecification` is defined in `distribution-types` and `requirements-txt`
// depends on `distribution-types`. // depends on `distribution-types`.
impl TryFrom<RequirementEntry> for UnresolvedRequirementSpecification { impl From<RequirementEntry> for UnresolvedRequirementSpecification {
type Error = Box<ParsedUrlError>; fn from(value: RequirementEntry) -> Self {
Self {
fn try_from(value: RequirementEntry) -> Result<Self, Self::Error> {
Ok(Self {
requirement: match value.requirement { requirement: match value.requirement {
RequirementsTxtRequirement::Named(named) => { RequirementsTxtRequirement::Named(named) => {
UnresolvedRequirement::Named(Requirement::from_pep508(named)?) UnresolvedRequirement::Named(Requirement::from(named))
} }
RequirementsTxtRequirement::Unnamed(unnamed) => { RequirementsTxtRequirement::Unnamed(unnamed) => {
UnresolvedRequirement::Unnamed(unnamed) UnresolvedRequirement::Unnamed(unnamed)
} }
}, },
hashes: value.hashes, hashes: value.hashes,
}) }
} }
} }
@ -427,7 +424,7 @@ pub struct RequirementsTxt {
/// The actual requirements with the hashes. /// The actual requirements with the hashes.
pub requirements: Vec<RequirementEntry>, pub requirements: Vec<RequirementEntry>,
/// Constraints included with `-c`. /// Constraints included with `-c`.
pub constraints: Vec<pep508_rs::Requirement>, pub constraints: Vec<pep508_rs::Requirement<VerbatimParsedUrl>>,
/// Editables with `-e`. /// Editables with `-e`.
pub editables: Vec<EditableRequirement>, pub editables: Vec<EditableRequirement>,
/// The index URL, specified with `--index-url`. /// The index URL, specified with `--index-url`.
@ -914,30 +911,10 @@ fn parse_requirement_and_hashes(
requirement requirement
} }
}) })
.map_err(|err| match err { .map_err(|err| RequirementsTxtParserError::Pep508 {
RequirementsTxtRequirementError::ParsedUrl(err) => { source: err,
RequirementsTxtParserError::ParsedUrl { start,
source: err, end,
start,
end,
}
}
RequirementsTxtRequirementError::Pep508(err) => match err.message {
Pep508ErrorSource::String(_) | Pep508ErrorSource::UrlError(_) => {
RequirementsTxtParserError::Pep508 {
source: err,
start,
end,
}
}
Pep508ErrorSource::UnsupportedRequirement(_) => {
RequirementsTxtParserError::UnsupportedRequirement {
source: err,
start,
end,
}
}
},
})?; })?;
let hashes = if has_hashes { let hashes = if has_hashes {
@ -1068,17 +1045,17 @@ pub enum RequirementsTxtParserError {
column: usize, column: usize,
}, },
UnsupportedRequirement { UnsupportedRequirement {
source: Pep508Error, source: Box<Pep508Error<VerbatimParsedUrl>>,
start: usize, start: usize,
end: usize, end: usize,
}, },
Pep508 { Pep508 {
source: Pep508Error, source: Box<Pep508Error<VerbatimParsedUrl>>,
start: usize, start: usize,
end: usize, end: usize,
}, },
ParsedUrl { ParsedUrl {
source: Box<ParsedUrlError>, source: Box<Pep508Error<VerbatimParsedUrl>>,
start: usize, start: usize,
end: usize, end: usize,
}, },

View File

@ -1,11 +1,9 @@
use std::path::Path; use std::path::Path;
use thiserror::Error;
use distribution_types::ParsedUrlError;
use pep508_rs::{ use pep508_rs::{
Pep508Error, Pep508ErrorSource, RequirementOrigin, TracingReporter, UnnamedRequirement, Pep508Error, Pep508ErrorSource, RequirementOrigin, TracingReporter, UnnamedRequirement,
}; };
use pypi_types::VerbatimParsedUrl;
/// A requirement specifier in a `requirements.txt` file. /// A requirement specifier in a `requirements.txt` file.
/// ///
@ -15,9 +13,9 @@ use pep508_rs::{
pub enum RequirementsTxtRequirement { pub enum RequirementsTxtRequirement {
/// The uv-specific superset over PEP 508 requirements specifier incorporating /// The uv-specific superset over PEP 508 requirements specifier incorporating
/// `tool.uv.sources`. /// `tool.uv.sources`.
Named(pep508_rs::Requirement), Named(pep508_rs::Requirement<VerbatimParsedUrl>),
/// A PEP 508-like, direct URL dependency specifier. /// A PEP 508-like, direct URL dependency specifier.
Unnamed(UnnamedRequirement), Unnamed(UnnamedRequirement<VerbatimParsedUrl>),
} }
impl RequirementsTxtRequirement { impl RequirementsTxtRequirement {
@ -31,20 +29,12 @@ impl RequirementsTxtRequirement {
} }
} }
#[derive(Debug, Error)]
pub enum RequirementsTxtRequirementError {
#[error(transparent)]
ParsedUrl(#[from] Box<ParsedUrlError>),
#[error(transparent)]
Pep508(#[from] Pep508Error),
}
impl RequirementsTxtRequirement { impl RequirementsTxtRequirement {
/// Parse a requirement as seen in a `requirements.txt` file. /// Parse a requirement as seen in a `requirements.txt` file.
pub fn parse( pub fn parse(
input: &str, input: &str,
working_dir: impl AsRef<Path>, working_dir: impl AsRef<Path>,
) -> Result<Self, RequirementsTxtRequirementError> { ) -> Result<Self, Box<Pep508Error<VerbatimParsedUrl>>> {
// Attempt to parse as a PEP 508-compliant requirement. // Attempt to parse as a PEP 508-compliant requirement.
match pep508_rs::Requirement::parse(input, &working_dir) { match pep508_rs::Requirement::parse(input, &working_dir) {
Ok(requirement) => Ok(Self::Named(requirement)), Ok(requirement) => Ok(Self::Named(requirement)),
@ -57,8 +47,9 @@ impl RequirementsTxtRequirement {
&mut TracingReporter, &mut TracingReporter,
)?)) )?))
} }
_ => Err(RequirementsTxtRequirementError::Pep508(err)), _ => Err(err),
}, },
} }
.map_err(Box::new)
} }
} }

View File

@ -35,25 +35,47 @@ RequirementsTxt {
], ],
version_or_url: Some( version_or_url: Some(
Url( Url(
VerbatimUrl { VerbatimParsedUrl {
url: Url { parsed_url: Archive(
scheme: "https", ParsedArchiveUrl {
cannot_be_a_base: false, url: Url {
username: "", scheme: "https",
password: None, cannot_be_a_base: false,
host: Some( username: "",
Domain( password: None,
"github.com", host: Some(
), Domain(
), "github.com",
port: None, ),
path: "/pandas-dev/pandas", ),
query: None, port: None,
fragment: None, path: "/pandas-dev/pandas",
}, query: None,
given: Some( fragment: None,
"https://github.com/pandas-dev/pandas", },
subdirectory: None,
},
), ),
verbatim: VerbatimUrl {
url: Url {
scheme: "https",
cannot_be_a_base: false,
username: "",
password: None,
host: Some(
Domain(
"github.com",
),
),
port: None,
path: "/pandas-dev/pandas",
query: None,
fragment: None,
},
given: Some(
"https://github.com/pandas-dev/pandas",
),
},
}, },
), ),
), ),

View File

@ -7,21 +7,40 @@ RequirementsTxt {
RequirementEntry { RequirementEntry {
requirement: Unnamed( requirement: Unnamed(
UnnamedRequirement { UnnamedRequirement {
url: VerbatimUrl { url: VerbatimParsedUrl {
url: Url { parsed_url: Path(
scheme: "file", ParsedPathUrl {
cannot_be_a_base: false, url: Url {
username: "", scheme: "file",
password: None, cannot_be_a_base: false,
host: None, username: "",
port: None, password: None,
path: "<REQUIREMENTS_DIR>/scripts/packages/black_editable", host: None,
query: None, port: None,
fragment: None, path: "<REQUIREMENTS_DIR>/scripts/packages/black_editable",
}, query: None,
given: Some( fragment: None,
"./scripts/packages/black_editable", },
path: "<REQUIREMENTS_DIR>/./scripts/packages/black_editable",
editable: false,
},
), ),
verbatim: VerbatimUrl {
url: Url {
scheme: "file",
cannot_be_a_base: false,
username: "",
password: None,
host: None,
port: None,
path: "<REQUIREMENTS_DIR>/scripts/packages/black_editable",
query: None,
fragment: None,
},
given: Some(
"./scripts/packages/black_editable",
),
},
}, },
extras: [], extras: [],
marker: None, marker: None,
@ -37,21 +56,40 @@ RequirementsTxt {
RequirementEntry { RequirementEntry {
requirement: Unnamed( requirement: Unnamed(
UnnamedRequirement { UnnamedRequirement {
url: VerbatimUrl { url: VerbatimParsedUrl {
url: Url { parsed_url: Path(
scheme: "file", ParsedPathUrl {
cannot_be_a_base: false, url: Url {
username: "", scheme: "file",
password: None, cannot_be_a_base: false,
host: None, username: "",
port: None, password: None,
path: "<REQUIREMENTS_DIR>/scripts/packages/black_editable", host: None,
query: None, port: None,
fragment: None, path: "<REQUIREMENTS_DIR>/scripts/packages/black_editable",
}, query: None,
given: Some( fragment: None,
"./scripts/packages/black_editable", },
path: "<REQUIREMENTS_DIR>/./scripts/packages/black_editable",
editable: false,
},
), ),
verbatim: VerbatimUrl {
url: Url {
scheme: "file",
cannot_be_a_base: false,
username: "",
password: None,
host: None,
port: None,
path: "<REQUIREMENTS_DIR>/scripts/packages/black_editable",
query: None,
fragment: None,
},
given: Some(
"./scripts/packages/black_editable",
),
},
}, },
extras: [ extras: [
ExtraName( ExtraName(
@ -71,21 +109,40 @@ RequirementsTxt {
RequirementEntry { RequirementEntry {
requirement: Unnamed( requirement: Unnamed(
UnnamedRequirement { UnnamedRequirement {
url: VerbatimUrl { url: VerbatimParsedUrl {
url: Url { parsed_url: Path(
scheme: "file", ParsedPathUrl {
cannot_be_a_base: false, url: Url {
username: "", scheme: "file",
password: None, cannot_be_a_base: false,
host: None, username: "",
port: None, password: None,
path: "/scripts/packages/black_editable", host: None,
query: None, port: None,
fragment: None, path: "/scripts/packages/black_editable",
}, query: None,
given: Some( fragment: None,
"file:///scripts/packages/black_editable", },
path: "/scripts/packages/black_editable",
editable: false,
},
), ),
verbatim: VerbatimUrl {
url: Url {
scheme: "file",
cannot_be_a_base: false,
username: "",
password: None,
host: None,
port: None,
path: "/scripts/packages/black_editable",
query: None,
fragment: None,
},
given: Some(
"file:///scripts/packages/black_editable",
),
},
}, },
extras: [], extras: [],
marker: None, marker: None,

View File

@ -35,25 +35,47 @@ RequirementsTxt {
], ],
version_or_url: Some( version_or_url: Some(
Url( Url(
VerbatimUrl { VerbatimParsedUrl {
url: Url { parsed_url: Archive(
scheme: "https", ParsedArchiveUrl {
cannot_be_a_base: false, url: Url {
username: "", scheme: "https",
password: None, cannot_be_a_base: false,
host: Some( username: "",
Domain( password: None,
"github.com", host: Some(
), Domain(
), "github.com",
port: None, ),
path: "/pandas-dev/pandas", ),
query: None, port: None,
fragment: None, path: "/pandas-dev/pandas",
}, query: None,
given: Some( fragment: None,
"https://github.com/pandas-dev/pandas", },
subdirectory: None,
},
), ),
verbatim: VerbatimUrl {
url: Url {
scheme: "https",
cannot_be_a_base: false,
username: "",
password: None,
host: Some(
Domain(
"github.com",
),
),
port: None,
path: "/pandas-dev/pandas",
query: None,
fragment: None,
},
given: Some(
"https://github.com/pandas-dev/pandas",
),
},
}, },
), ),
), ),

View File

@ -7,21 +7,40 @@ RequirementsTxt {
RequirementEntry { RequirementEntry {
requirement: Unnamed( requirement: Unnamed(
UnnamedRequirement { UnnamedRequirement {
url: VerbatimUrl { url: VerbatimParsedUrl {
url: Url { parsed_url: Path(
scheme: "file", ParsedPathUrl {
cannot_be_a_base: false, url: Url {
username: "", scheme: "file",
password: None, cannot_be_a_base: false,
host: None, username: "",
port: None, password: None,
path: "/<REQUIREMENTS_DIR>/scripts/packages/black_editable", host: None,
query: None, port: None,
fragment: None, path: "/<REQUIREMENTS_DIR>/scripts/packages/black_editable",
}, query: None,
given: Some( fragment: None,
"./scripts/packages/black_editable", },
path: "<REQUIREMENTS_DIR>/./scripts/packages/black_editable",
editable: false,
},
), ),
verbatim: VerbatimUrl {
url: Url {
scheme: "file",
cannot_be_a_base: false,
username: "",
password: None,
host: None,
port: None,
path: "/<REQUIREMENTS_DIR>/scripts/packages/black_editable",
query: None,
fragment: None,
},
given: Some(
"./scripts/packages/black_editable",
),
},
}, },
extras: [], extras: [],
marker: None, marker: None,
@ -37,21 +56,40 @@ RequirementsTxt {
RequirementEntry { RequirementEntry {
requirement: Unnamed( requirement: Unnamed(
UnnamedRequirement { UnnamedRequirement {
url: VerbatimUrl { url: VerbatimParsedUrl {
url: Url { parsed_url: Path(
scheme: "file", ParsedPathUrl {
cannot_be_a_base: false, url: Url {
username: "", scheme: "file",
password: None, cannot_be_a_base: false,
host: None, username: "",
port: None, password: None,
path: "/<REQUIREMENTS_DIR>/scripts/packages/black_editable", host: None,
query: None, port: None,
fragment: None, path: "/<REQUIREMENTS_DIR>/scripts/packages/black_editable",
}, query: None,
given: Some( fragment: None,
"./scripts/packages/black_editable", },
path: "<REQUIREMENTS_DIR>/./scripts/packages/black_editable",
editable: false,
},
), ),
verbatim: VerbatimUrl {
url: Url {
scheme: "file",
cannot_be_a_base: false,
username: "",
password: None,
host: None,
port: None,
path: "/<REQUIREMENTS_DIR>/scripts/packages/black_editable",
query: None,
fragment: None,
},
given: Some(
"./scripts/packages/black_editable",
),
},
}, },
extras: [ extras: [
ExtraName( ExtraName(
@ -71,21 +109,40 @@ RequirementsTxt {
RequirementEntry { RequirementEntry {
requirement: Unnamed( requirement: Unnamed(
UnnamedRequirement { UnnamedRequirement {
url: VerbatimUrl { url: VerbatimParsedUrl {
url: Url { parsed_url: Path(
scheme: "file", ParsedPathUrl {
cannot_be_a_base: false, url: Url {
username: "", scheme: "file",
password: None, cannot_be_a_base: false,
host: None, username: "",
port: None, password: None,
path: "/<REQUIREMENTS_DIR>/scripts/packages/black_editable", host: None,
query: None, port: None,
fragment: None, path: "/<REQUIREMENTS_DIR>/scripts/packages/black_editable",
}, query: None,
given: Some( fragment: None,
"file:///scripts/packages/black_editable", },
path: "<REQUIREMENTS_DIR>/scripts/packages/black_editable",
editable: false,
},
), ),
verbatim: VerbatimUrl {
url: Url {
scheme: "file",
cannot_be_a_base: false,
username: "",
password: None,
host: None,
port: None,
path: "/<REQUIREMENTS_DIR>/scripts/packages/black_editable",
query: None,
fragment: None,
},
given: Some(
"file:///scripts/packages/black_editable",
),
},
}, },
extras: [], extras: [],
marker: None, marker: None,

View File

@ -17,6 +17,7 @@ workspace = true
distribution-types = { workspace = true } distribution-types = { workspace = true }
pep440_rs = { workspace = true } pep440_rs = { workspace = true }
pep508_rs = { workspace = true } pep508_rs = { workspace = true }
pypi-types = { workspace = true }
uv-fs = { workspace = true } uv-fs = { workspace = true }
uv-interpreter = { workspace = true } uv-interpreter = { workspace = true }
uv-types = { workspace = true } uv-types = { workspace = true }

View File

@ -25,9 +25,10 @@ use tokio::process::Command;
use tokio::sync::{Mutex, Semaphore}; use tokio::sync::{Mutex, Semaphore};
use tracing::{debug, info_span, instrument, Instrument}; use tracing::{debug, info_span, instrument, Instrument};
use distribution_types::{ParsedUrlError, Requirement, Resolution}; use distribution_types::{Requirement, Resolution};
use pep440_rs::Version; use pep440_rs::Version;
use pep508_rs::PackageName; use pep508_rs::PackageName;
use pypi_types::VerbatimParsedUrl;
use uv_configuration::{BuildKind, ConfigSettings, SetupPyStrategy}; use uv_configuration::{BuildKind, ConfigSettings, SetupPyStrategy};
use uv_fs::{PythonExt, Simplified}; use uv_fs::{PythonExt, Simplified};
use uv_interpreter::{Interpreter, PythonEnvironment}; use uv_interpreter::{Interpreter, PythonEnvironment};
@ -66,18 +67,16 @@ static WHEEL_NOT_FOUND_RE: Lazy<Regex> =
static DEFAULT_BACKEND: Lazy<Pep517Backend> = Lazy::new(|| Pep517Backend { static DEFAULT_BACKEND: Lazy<Pep517Backend> = Lazy::new(|| Pep517Backend {
backend: "setuptools.build_meta:__legacy__".to_string(), backend: "setuptools.build_meta:__legacy__".to_string(),
backend_path: None, backend_path: None,
requirements: vec![Requirement::from_pep508( requirements: vec![Requirement::from(
pep508_rs::Requirement::from_str("setuptools >= 40.8.0").unwrap(), pep508_rs::Requirement::from_str("setuptools >= 40.8.0").unwrap(),
) )],
.unwrap()],
}); });
/// The requirements for `--legacy-setup-py` builds. /// The requirements for `--legacy-setup-py` builds.
static SETUP_PY_REQUIREMENTS: Lazy<[Requirement; 2]> = Lazy::new(|| { static SETUP_PY_REQUIREMENTS: Lazy<[Requirement; 2]> = Lazy::new(|| {
[ [
Requirement::from_pep508(pep508_rs::Requirement::from_str("setuptools >= 40.8.0").unwrap()) Requirement::from(pep508_rs::Requirement::from_str("setuptools >= 40.8.0").unwrap()),
.unwrap(), Requirement::from(pep508_rs::Requirement::from_str("wheel").unwrap()),
Requirement::from_pep508(pep508_rs::Requirement::from_str("wheel").unwrap()).unwrap(),
] ]
}); });
@ -116,8 +115,6 @@ pub enum Error {
}, },
#[error("Failed to build PATH for build script")] #[error("Failed to build PATH for build script")]
BuildScriptPath(#[source] env::JoinPathsError), BuildScriptPath(#[source] env::JoinPathsError),
#[error("Failed to parse requirements from build backend")]
DirectUrl(#[source] Box<ParsedUrlError>),
} }
#[derive(Debug)] #[derive(Debug)]
@ -244,7 +241,7 @@ pub struct Project {
#[serde(rename_all = "kebab-case")] #[serde(rename_all = "kebab-case")]
pub struct BuildSystem { pub struct BuildSystem {
/// PEP 508 dependencies required to execute the build system. /// PEP 508 dependencies required to execute the build system.
pub requires: Vec<pep508_rs::Requirement>, pub requires: Vec<pep508_rs::Requirement<VerbatimParsedUrl>>,
/// A string naming a Python object that will be used to perform the build. /// A string naming a Python object that will be used to perform the build.
pub build_backend: Option<String>, pub build_backend: Option<String>,
/// Specify that their backend code is hosted in-tree, this key contains a list of directories. /// Specify that their backend code is hosted in-tree, this key contains a list of directories.
@ -601,9 +598,8 @@ impl SourceBuild {
requirements: build_system requirements: build_system
.requires .requires
.into_iter() .into_iter()
.map(Requirement::from_pep508) .map(Requirement::from)
.collect::<Result<_, _>>() .collect(),
.map_err(|err| Box::new(Error::DirectUrl(err)))?,
} }
} else { } else {
// If a `pyproject.toml` is present, but `[build-system]` is missing, proceed with // If a `pyproject.toml` is present, but `[build-system]` is missing, proceed with
@ -982,7 +978,7 @@ async fn create_pep517_build_environment(
})?; })?;
// Deserialize the requirements from the output file. // Deserialize the requirements from the output file.
let extra_requires: Vec<pep508_rs::Requirement> = serde_json::from_slice::<Vec<pep508_rs::Requirement>>(&contents).map_err(|err| { let extra_requires: Vec<pep508_rs::Requirement<VerbatimParsedUrl>> = serde_json::from_slice::<Vec<pep508_rs::Requirement<VerbatimParsedUrl>>>(&contents).map_err(|err| {
Error::from_command_output( Error::from_command_output(
format!( format!(
"Build backend failed to return extra requires with `get_requires_for_build_{build_kind}`: {err}" "Build backend failed to return extra requires with `get_requires_for_build_{build_kind}`: {err}"
@ -991,11 +987,7 @@ async fn create_pep517_build_environment(
version_id, version_id,
) )
})?; })?;
let extra_requires: Vec<_> = extra_requires let extra_requires: Vec<_> = extra_requires.into_iter().map(Requirement::from).collect();
.into_iter()
.map(Requirement::from_pep508)
.collect::<Result<_, _>>()
.map_err(Error::DirectUrl)?;
// Some packages (such as tqdm 4.66.1) list only extra requires that have already been part of // Some packages (such as tqdm 4.66.1) list only extra requires that have already been part of
// the pyproject.toml requires (in this case, `wheel`). We can skip doing the whole resolution // the pyproject.toml requires (in this case, `wheel`). We can skip doing the whole resolution

View File

@ -20,6 +20,7 @@ distribution-filename = { workspace = true }
distribution-types = { workspace = true } distribution-types = { workspace = true }
install-wheel-rs = { workspace = true } install-wheel-rs = { workspace = true }
pep508_rs = { workspace = true } pep508_rs = { workspace = true }
pypi-types = { workspace = true }
uv-build = { workspace = true } uv-build = { workspace = true }
uv-cache = { workspace = true, features = ["clap"] } uv-cache = { workspace = true, features = ["clap"] }
uv-client = { workspace = true } uv-client = { workspace = true }

View File

@ -5,8 +5,9 @@ use anyhow::{bail, Result};
use clap::Parser; use clap::Parser;
use distribution_filename::WheelFilename; use distribution_filename::WheelFilename;
use distribution_types::{BuiltDist, DirectUrlBuiltDist, ParsedUrl, RemoteSource}; use distribution_types::{BuiltDist, DirectUrlBuiltDist, RemoteSource};
use pep508_rs::VerbatimUrl; use pep508_rs::VerbatimUrl;
use pypi_types::ParsedUrl;
use uv_cache::{Cache, CacheArgs}; use uv_cache::{Cache, CacheArgs};
use uv_client::RegistryClientBuilder; use uv_client::RegistryClientBuilder;

View File

@ -4,7 +4,6 @@ use tokio::task::JoinError;
use zip::result::ZipError; use zip::result::ZipError;
use distribution_filename::WheelFilenameError; use distribution_filename::WheelFilenameError;
use distribution_types::ParsedUrlError;
use pep440_rs::Version; use pep440_rs::Version;
use pypi_types::HashDigest; use pypi_types::HashDigest;
use uv_client::BetterReqwestError; use uv_client::BetterReqwestError;
@ -28,8 +27,6 @@ pub enum Error {
#[error("Git operation failed")] #[error("Git operation failed")]
Git(#[source] anyhow::Error), Git(#[source] anyhow::Error),
#[error(transparent)] #[error(transparent)]
DirectUrl(#[from] Box<ParsedUrlError>),
#[error(transparent)]
Reqwest(#[from] BetterReqwestError), Reqwest(#[from] BetterReqwestError),
#[error(transparent)] #[error(transparent)]
Client(#[from] uv_client::Error), Client(#[from] uv_client::Error),

View File

@ -8,7 +8,7 @@ use tracing::debug;
use url::Url; use url::Url;
use cache_key::{CanonicalUrl, RepositoryUrl}; use cache_key::{CanonicalUrl, RepositoryUrl};
use distribution_types::ParsedGitUrl; use pypi_types::ParsedGitUrl;
use uv_cache::{Cache, CacheBucket}; use uv_cache::{Cache, CacheBucket};
use uv_fs::LockedFile; use uv_fs::LockedFile;
use uv_git::{Fetch, GitReference, GitSha, GitSource, GitUrl}; use uv_git::{Fetch, GitReference, GitSha, GitSource, GitUrl};

View File

@ -17,12 +17,11 @@ use zip::ZipArchive;
use distribution_filename::WheelFilename; use distribution_filename::WheelFilename;
use distribution_types::{ use distribution_types::{
BuildableSource, DirectorySourceDist, DirectorySourceUrl, Dist, FileLocation, GitSourceUrl, BuildableSource, DirectorySourceDist, DirectorySourceUrl, Dist, FileLocation, GitSourceUrl,
HashPolicy, Hashed, LocalEditable, ParsedArchiveUrl, PathSourceUrl, RemoteSource, SourceDist, HashPolicy, Hashed, LocalEditable, PathSourceUrl, RemoteSource, SourceDist, SourceUrl,
SourceUrl,
}; };
use install_wheel_rs::metadata::read_archive_metadata; use install_wheel_rs::metadata::read_archive_metadata;
use platform_tags::Tags; use platform_tags::Tags;
use pypi_types::{HashDigest, Metadata23}; use pypi_types::{HashDigest, Metadata23, ParsedArchiveUrl};
use uv_cache::{ use uv_cache::{
ArchiveTimestamp, CacheBucket, CacheEntry, CacheShard, CachedByTimestamp, Freshness, Timestamp, ArchiveTimestamp, CacheBucket, CacheEntry, CacheShard, CachedByTimestamp, Freshness, Timestamp,
WheelCache, WheelCache,
@ -1026,7 +1025,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
// Resolve to a precise Git SHA. // Resolve to a precise Git SHA.
let url = if let Some(url) = resolve_precise( let url = if let Some(url) = resolve_precise(
&resource.git, resource.git,
self.build_context.cache(), self.build_context.cache(),
self.reporter.as_ref(), self.reporter.as_ref(),
) )
@ -1034,11 +1033,9 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
{ {
Cow::Owned(url) Cow::Owned(url)
} else { } else {
Cow::Borrowed(resource.git.as_ref()) Cow::Borrowed(resource.git)
}; };
let subdirectory = resource.subdirectory.as_deref();
// Fetch the Git repository. // Fetch the Git repository.
let fetch = let fetch =
fetch_git_archive(&url, self.build_context.cache(), self.reporter.as_ref()).await?; fetch_git_archive(&url, self.build_context.cache(), self.reporter.as_ref()).await?;
@ -1062,7 +1059,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
.map(|reporter| reporter.on_build_start(source)); .map(|reporter| reporter.on_build_start(source));
let (disk_filename, filename, metadata) = self let (disk_filename, filename, metadata) = self
.build_distribution(source, fetch.path(), subdirectory, &cache_shard) .build_distribution(source, fetch.path(), resource.subdirectory, &cache_shard)
.await?; .await?;
if let Some(task) = task { if let Some(task) = task {
@ -1102,7 +1099,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
// Resolve to a precise Git SHA. // Resolve to a precise Git SHA.
let url = if let Some(url) = resolve_precise( let url = if let Some(url) = resolve_precise(
&resource.git, resource.git,
self.build_context.cache(), self.build_context.cache(),
self.reporter.as_ref(), self.reporter.as_ref(),
) )
@ -1110,11 +1107,9 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
{ {
Cow::Owned(url) Cow::Owned(url)
} else { } else {
Cow::Borrowed(resource.git.as_ref()) Cow::Borrowed(resource.git)
}; };
let subdirectory = resource.subdirectory.as_deref();
// Fetch the Git repository. // Fetch the Git repository.
let fetch = let fetch =
fetch_git_archive(&url, self.build_context.cache(), self.reporter.as_ref()).await?; fetch_git_archive(&url, self.build_context.cache(), self.reporter.as_ref()).await?;
@ -1143,7 +1138,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
// If the backend supports `prepare_metadata_for_build_wheel`, use it. // If the backend supports `prepare_metadata_for_build_wheel`, use it.
if let Some(metadata) = self if let Some(metadata) = self
.build_metadata(source, fetch.path(), subdirectory) .build_metadata(source, fetch.path(), resource.subdirectory)
.boxed_local() .boxed_local()
.await? .await?
{ {
@ -1165,7 +1160,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
.map(|reporter| reporter.on_build_start(source)); .map(|reporter| reporter.on_build_start(source));
let (_disk_filename, _filename, metadata) = self let (_disk_filename, _filename, metadata) = self
.build_distribution(source, fetch.path(), subdirectory, &cache_shard) .build_distribution(source, fetch.path(), resource.subdirectory, &cache_shard)
.await?; .await?;
if let Some(task) = task { if let Some(task) = task {

View File

@ -12,6 +12,7 @@ use distribution_types::{
UnresolvedRequirementSpecification, UnresolvedRequirementSpecification,
}; };
use pep440_rs::{Version, VersionSpecifiers}; use pep440_rs::{Version, VersionSpecifiers};
use pypi_types::VerbatimParsedUrl;
use requirements_txt::EditableRequirement; use requirements_txt::EditableRequirement;
use uv_cache::{ArchiveTarget, ArchiveTimestamp}; use uv_cache::{ArchiveTarget, ArchiveTimestamp};
use uv_interpreter::PythonEnvironment; use uv_interpreter::PythonEnvironment;
@ -341,9 +342,9 @@ impl SitePackages {
&requirement.extras, &requirement.extras,
) { ) {
let dependency = UnresolvedRequirementSpecification { let dependency = UnresolvedRequirementSpecification {
requirement: UnresolvedRequirement::Named( requirement: UnresolvedRequirement::Named(Requirement::from(
Requirement::from_pep508(dependency)?, dependency,
), )),
hashes: vec![], hashes: vec![],
}; };
if seen.insert(dependency.clone()) { if seen.insert(dependency.clone()) {
@ -363,7 +364,9 @@ impl SitePackages {
while let Some(entry) = stack.pop() { while let Some(entry) = stack.pop() {
let installed = match &entry.requirement { let installed = match &entry.requirement {
UnresolvedRequirement::Named(requirement) => self.get_packages(&requirement.name), UnresolvedRequirement::Named(requirement) => self.get_packages(&requirement.name),
UnresolvedRequirement::Unnamed(requirement) => self.get_urls(requirement.url.raw()), UnresolvedRequirement::Unnamed(requirement) => {
self.get_urls(requirement.url.verbatim.raw())
}
}; };
match installed.as_slice() { match installed.as_slice() {
[] => { [] => {
@ -373,7 +376,7 @@ impl SitePackages {
[distribution] => { [distribution] => {
match RequirementSatisfaction::check( match RequirementSatisfaction::check(
distribution, distribution,
entry.requirement.source()?.as_ref(), entry.requirement.source().as_ref(),
)? { )? {
RequirementSatisfaction::Mismatch | RequirementSatisfaction::OutOfDate => { RequirementSatisfaction::Mismatch | RequirementSatisfaction::OutOfDate => {
return Ok(SatisfiesResult::Unsatisfied(entry.requirement.to_string())) return Ok(SatisfiesResult::Unsatisfied(entry.requirement.to_string()))
@ -405,9 +408,9 @@ impl SitePackages {
entry.requirement.extras(), entry.requirement.extras(),
) { ) {
let dependency = UnresolvedRequirementSpecification { let dependency = UnresolvedRequirementSpecification {
requirement: UnresolvedRequirement::Named( requirement: UnresolvedRequirement::Named(Requirement::from(
Requirement::from_pep508(dependency)?, dependency,
), )),
hashes: vec![], hashes: vec![],
}; };
if seen.insert(dependency.clone()) { if seen.insert(dependency.clone()) {
@ -471,7 +474,7 @@ pub enum SitePackagesDiagnostic {
/// The package that is missing a dependency. /// The package that is missing a dependency.
package: PackageName, package: PackageName,
/// The dependency that is missing. /// The dependency that is missing.
requirement: pep508_rs::Requirement, requirement: pep508_rs::Requirement<VerbatimParsedUrl>,
}, },
IncompatibleDependency { IncompatibleDependency {
/// The package that has an incompatible dependency. /// The package that has an incompatible dependency.
@ -479,7 +482,7 @@ pub enum SitePackagesDiagnostic {
/// The version of the package that is installed. /// The version of the package that is installed.
version: Version, version: Version,
/// The dependency that is incompatible. /// The dependency that is incompatible.
requirement: pep508_rs::Requirement, requirement: pep508_rs::Requirement<VerbatimParsedUrl>,
}, },
DuplicatePackage { DuplicatePackage {
/// The package that has multiple installed distributions. /// The package that has multiple installed distributions.

View File

@ -24,8 +24,6 @@ pub enum LookaheadError {
DownloadAndBuild(SourceDist, #[source] uv_distribution::Error), DownloadAndBuild(SourceDist, #[source] uv_distribution::Error),
#[error(transparent)] #[error(transparent)]
UnsupportedUrl(#[from] distribution_types::Error), UnsupportedUrl(#[from] distribution_types::Error),
#[error(transparent)]
InvalidRequirement(#[from] Box<distribution_types::ParsedUrlError>),
} }
/// A resolver for resolving lookahead requirements from direct URLs. /// A resolver for resolving lookahead requirements from direct URLs.
@ -211,8 +209,8 @@ impl<'a, Context: BuildContext> LookaheadResolver<'a, Context> {
.requires_dist .requires_dist
.iter() .iter()
.cloned() .cloned()
.map(Requirement::from_pep508) .map(Requirement::from)
.collect::<Result<_, _>>()? .collect()
} else { } else {
// Run the PEP 517 build process to extract metadata from the source distribution. // Run the PEP 517 build process to extract metadata from the source distribution.
let archive = self let archive = self
@ -233,10 +231,7 @@ impl<'a, Context: BuildContext> LookaheadResolver<'a, Context> {
.distributions() .distributions()
.done(id, Arc::new(MetadataResponse::Found(archive))); .done(id, Arc::new(MetadataResponse::Found(archive)));
requires_dist requires_dist.into_iter().map(Requirement::from).collect()
.into_iter()
.map(Requirement::from_pep508)
.collect::<Result<_, _>>()?
} }
}; };

View File

@ -20,9 +20,10 @@ use serde::{Deserialize, Serialize};
use thiserror::Error; use thiserror::Error;
use url::Url; use url::Url;
use distribution_types::{ParsedUrlError, Requirement, RequirementSource, Requirements}; use distribution_types::{Requirement, RequirementSource, Requirements};
use pep440_rs::VersionSpecifiers; use pep440_rs::VersionSpecifiers;
use pep508_rs::{RequirementOrigin, VerbatimUrl, VersionOrUrl}; use pep508_rs::{Pep508Error, RequirementOrigin, VerbatimUrl, VersionOrUrl};
use pypi_types::VerbatimParsedUrl;
use uv_configuration::PreviewMode; use uv_configuration::PreviewMode;
use uv_fs::Simplified; use uv_fs::Simplified;
use uv_git::GitReference; use uv_git::GitReference;
@ -34,7 +35,7 @@ use crate::ExtrasSpecification;
#[derive(Debug, Error)] #[derive(Debug, Error)]
pub enum Pep621Error { pub enum Pep621Error {
#[error(transparent)] #[error(transparent)]
Pep508(#[from] pep508_rs::Pep508Error), Pep508(#[from] Box<Pep508Error<VerbatimParsedUrl>>),
#[error("Must specify a `[project]` section alongside `[tool.uv.sources]`")] #[error("Must specify a `[project]` section alongside `[tool.uv.sources]`")]
MissingProjectSection, MissingProjectSection,
#[error("pyproject.toml section is declared as dynamic, but must be static: `{0}`")] #[error("pyproject.toml section is declared as dynamic, but must be static: `{0}`")]
@ -43,12 +44,16 @@ pub enum Pep621Error {
LoweringError(PackageName, #[source] LoweringError), LoweringError(PackageName, #[source] LoweringError),
} }
impl From<Pep508Error<VerbatimParsedUrl>> for Pep621Error {
fn from(error: Pep508Error<VerbatimParsedUrl>) -> Self {
Self::Pep508(Box::new(error))
}
}
/// An error parsing and merging `tool.uv.sources` with /// An error parsing and merging `tool.uv.sources` with
/// `project.{dependencies,optional-dependencies}`. /// `project.{dependencies,optional-dependencies}`.
#[derive(Debug, Error)] #[derive(Debug, Error)]
pub enum LoweringError { pub enum LoweringError {
#[error("Invalid URL structure")]
DirectUrl(#[from] Box<ParsedUrlError>),
#[error("Unsupported path (can't convert to URL): `{}`", _0.user_display())] #[error("Unsupported path (can't convert to URL): `{}`", _0.user_display())]
PathToUrl(PathBuf), PathToUrl(PathBuf),
#[error("Package is not included as workspace package in `tool.uv.workspace`")] #[error("Package is not included as workspace package in `tool.uv.workspace`")]
@ -385,7 +390,7 @@ pub(crate) fn lower_requirements(
/// Combine `project.dependencies` or `project.optional-dependencies` with `tool.uv.sources`. /// Combine `project.dependencies` or `project.optional-dependencies` with `tool.uv.sources`.
pub(crate) fn lower_requirement( pub(crate) fn lower_requirement(
requirement: pep508_rs::Requirement, requirement: pep508_rs::Requirement<VerbatimParsedUrl>,
project_name: &PackageName, project_name: &PackageName,
project_dir: &Path, project_dir: &Path,
project_sources: &BTreeMap<PackageName, Source>, project_sources: &BTreeMap<PackageName, Source>,
@ -420,7 +425,7 @@ pub(crate) fn lower_requirement(
requirement.name requirement.name
); );
} }
return Ok(Requirement::from_pep508(requirement)?); return Ok(Requirement::from(requirement));
}; };
if preview.is_disabled() { if preview.is_disabled() {

View File

@ -11,6 +11,7 @@ use distribution_types::{
BuildableSource, DirectorySourceUrl, HashPolicy, Requirement, SourceUrl, VersionId, BuildableSource, DirectorySourceUrl, HashPolicy, Requirement, SourceUrl, VersionId,
}; };
use pep508_rs::RequirementOrigin; use pep508_rs::RequirementOrigin;
use pypi_types::VerbatimParsedUrl;
use uv_distribution::{DistributionDatabase, Reporter}; use uv_distribution::{DistributionDatabase, Reporter};
use uv_fs::Simplified; use uv_fs::Simplified;
use uv_resolver::{InMemoryIndex, MetadataResponse}; use uv_resolver::{InMemoryIndex, MetadataResponse};
@ -74,12 +75,15 @@ impl<'a, Context: BuildContext> SourceTreeResolver<'a, Context> {
Ok(requirements Ok(requirements
.into_iter() .into_iter()
.flatten() .flatten()
.map(Requirement::from_pep508) .map(Requirement::from)
.collect::<Result<_, _>>()?) .collect())
} }
/// Infer the package name for a given "unnamed" requirement. /// Infer the package name for a given "unnamed" requirement.
async fn resolve_source_tree(&self, path: &Path) -> Result<Vec<pep508_rs::Requirement>> { async fn resolve_source_tree(
&self,
path: &Path,
) -> Result<Vec<pep508_rs::Requirement<VerbatimParsedUrl>>> {
// Convert to a buildable source. // Convert to a buildable source.
let source_tree = fs_err::canonicalize(path).with_context(|| { let source_tree = fs_err::canonicalize(path).with_context(|| {
format!( format!(

View File

@ -11,7 +11,8 @@ use distribution_types::{
FlatIndexLocation, IndexUrl, Requirement, RequirementSource, UnresolvedRequirement, FlatIndexLocation, IndexUrl, Requirement, RequirementSource, UnresolvedRequirement,
UnresolvedRequirementSpecification, UnresolvedRequirementSpecification,
}; };
use pep508_rs::{UnnamedRequirement, VerbatimUrl}; use pep508_rs::{UnnamedRequirement, UnnamedRequirementUrl};
use pypi_types::VerbatimParsedUrl;
use requirements_txt::{ use requirements_txt::{
EditableRequirement, FindLink, RequirementEntry, RequirementsTxt, RequirementsTxtRequirement, EditableRequirement, FindLink, RequirementEntry, RequirementsTxt, RequirementsTxtRequirement,
}; };
@ -67,12 +68,12 @@ impl RequirementsSpecification {
let requirement = RequirementsTxtRequirement::parse(name, std::env::current_dir()?) let requirement = RequirementsTxtRequirement::parse(name, std::env::current_dir()?)
.with_context(|| format!("Failed to parse: `{name}`"))?; .with_context(|| format!("Failed to parse: `{name}`"))?;
Self { Self {
requirements: vec![UnresolvedRequirementSpecification::try_from( requirements: vec![UnresolvedRequirementSpecification::from(
RequirementEntry { RequirementEntry {
requirement, requirement,
hashes: vec![], hashes: vec![],
}, },
)?], )],
..Self::default() ..Self::default()
} }
} }
@ -96,8 +97,8 @@ impl RequirementsSpecification {
constraints: requirements_txt constraints: requirements_txt
.constraints .constraints
.into_iter() .into_iter()
.map(Requirement::from_pep508) .map(Requirement::from)
.collect::<Result<_, _>>()?, .collect(),
editables: requirements_txt.editables, editables: requirements_txt.editables,
index_url: requirements_txt.index_url.map(IndexUrl::from), index_url: requirements_txt.index_url.map(IndexUrl::from),
extra_index_urls: requirements_txt extra_index_urls: requirements_txt
@ -132,7 +133,7 @@ impl RequirementsSpecification {
project: None, project: None,
requirements: vec![UnresolvedRequirementSpecification { requirements: vec![UnresolvedRequirementSpecification {
requirement: UnresolvedRequirement::Unnamed(UnnamedRequirement { requirement: UnresolvedRequirement::Unnamed(UnnamedRequirement {
url: VerbatimUrl::from_path(path)?, url: VerbatimParsedUrl::parse_absolute_path(path)?,
extras: vec![], extras: vec![],
marker: None, marker: None,
origin: None, origin: None,

View File

@ -11,12 +11,12 @@ use tracing::debug;
use distribution_filename::{SourceDistFilename, WheelFilename}; use distribution_filename::{SourceDistFilename, WheelFilename};
use distribution_types::{ use distribution_types::{
BuildableSource, DirectSourceUrl, DirectorySourceUrl, GitSourceUrl, ParsedGitUrl, BuildableSource, DirectSourceUrl, DirectorySourceUrl, GitSourceUrl, PathSourceUrl,
PathSourceUrl, RemoteSource, Requirement, SourceUrl, UnresolvedRequirement, RemoteSource, Requirement, SourceUrl, UnresolvedRequirement,
UnresolvedRequirementSpecification, VersionId, UnresolvedRequirementSpecification, VersionId,
}; };
use pep508_rs::{Scheme, UnnamedRequirement, VersionOrUrl}; use pep508_rs::{UnnamedRequirement, VersionOrUrl};
use pypi_types::Metadata10; use pypi_types::{Metadata10, ParsedUrl, VerbatimParsedUrl};
use uv_distribution::{DistributionDatabase, Reporter}; use uv_distribution::{DistributionDatabase, Reporter};
use uv_normalize::PackageName; use uv_normalize::PackageName;
use uv_resolver::{InMemoryIndex, MetadataResponse}; use uv_resolver::{InMemoryIndex, MetadataResponse};
@ -72,9 +72,9 @@ impl<'a, Context: BuildContext> NamedRequirementsResolver<'a, Context> {
.map(|entry| async { .map(|entry| async {
match entry.requirement { match entry.requirement {
UnresolvedRequirement::Named(requirement) => Ok(requirement), UnresolvedRequirement::Named(requirement) => Ok(requirement),
UnresolvedRequirement::Unnamed(requirement) => Ok(Requirement::from_pep508( UnresolvedRequirement::Unnamed(requirement) => Ok(Requirement::from(
Self::resolve_requirement(requirement, hasher, index, &database).await?, Self::resolve_requirement(requirement, hasher, index, &database).await?,
)?), )),
} }
}) })
.collect::<FuturesOrdered<_>>() .collect::<FuturesOrdered<_>>()
@ -84,19 +84,19 @@ impl<'a, Context: BuildContext> NamedRequirementsResolver<'a, Context> {
/// Infer the package name for a given "unnamed" requirement. /// Infer the package name for a given "unnamed" requirement.
async fn resolve_requirement( async fn resolve_requirement(
requirement: UnnamedRequirement, requirement: UnnamedRequirement<VerbatimParsedUrl>,
hasher: &HashStrategy, hasher: &HashStrategy,
index: &InMemoryIndex, index: &InMemoryIndex,
database: &DistributionDatabase<'a, Context>, database: &DistributionDatabase<'a, Context>,
) -> Result<pep508_rs::Requirement> { ) -> Result<pep508_rs::Requirement<VerbatimParsedUrl>> {
// If the requirement is a wheel, extract the package name from the wheel filename. // If the requirement is a wheel, extract the package name from the wheel filename.
// //
// Ex) `anyio-4.3.0-py3-none-any.whl` // Ex) `anyio-4.3.0-py3-none-any.whl`
if Path::new(requirement.url.path()) if Path::new(requirement.url.verbatim.path())
.extension() .extension()
.is_some_and(|ext| ext.eq_ignore_ascii_case("whl")) .is_some_and(|ext| ext.eq_ignore_ascii_case("whl"))
{ {
let filename = WheelFilename::from_str(&requirement.url.filename()?)?; let filename = WheelFilename::from_str(&requirement.url.verbatim.filename()?)?;
return Ok(pep508_rs::Requirement { return Ok(pep508_rs::Requirement {
name: filename.name, name: filename.name,
extras: requirement.extras, extras: requirement.extras,
@ -112,6 +112,7 @@ impl<'a, Context: BuildContext> NamedRequirementsResolver<'a, Context> {
// Ex) `anyio-4.3.0.tar.gz` // Ex) `anyio-4.3.0.tar.gz`
if let Some(filename) = requirement if let Some(filename) = requirement
.url .url
.verbatim
.filename() .filename()
.ok() .ok()
.and_then(|filename| SourceDistFilename::parsed_normalized_filename(&filename).ok()) .and_then(|filename| SourceDistFilename::parsed_normalized_filename(&filename).ok())
@ -125,27 +126,43 @@ impl<'a, Context: BuildContext> NamedRequirementsResolver<'a, Context> {
}); });
} }
let source = match Scheme::parse(requirement.url.scheme()) { let source = match &requirement.url.parsed_url {
Some(Scheme::File) => { // If the path points to a directory, attempt to read the name from static metadata.
let path = requirement ParsedUrl::Path(parsed_path_url) if parsed_path_url.path.is_dir() => {
.url // Attempt to read a `PKG-INFO` from the directory.
.to_file_path() if let Some(metadata) = fs_err::read(parsed_path_url.path.join("PKG-INFO"))
.expect("URL to be a file path"); .ok()
.and_then(|contents| Metadata10::parse_pkg_info(&contents).ok())
{
debug!(
"Found PKG-INFO metadata for {path} ({name})",
path = parsed_path_url.path.display(),
name = metadata.name
);
return Ok(pep508_rs::Requirement {
name: metadata.name,
extras: requirement.extras,
version_or_url: Some(VersionOrUrl::Url(requirement.url)),
marker: requirement.marker,
origin: requirement.origin,
});
}
// If the path points to a directory, attempt to read the name from static metadata. // Attempt to read a `pyproject.toml` file.
if path.is_dir() { let project_path = parsed_path_url.path.join("pyproject.toml");
// Attempt to read a `PKG-INFO` from the directory. if let Some(pyproject) = fs_err::read_to_string(project_path)
if let Some(metadata) = fs_err::read(path.join("PKG-INFO")) .ok()
.ok() .and_then(|contents| toml::from_str::<PyProjectToml>(&contents).ok())
.and_then(|contents| Metadata10::parse_pkg_info(&contents).ok()) {
{ // Read PEP 621 metadata from the `pyproject.toml`.
if let Some(project) = pyproject.project {
debug!( debug!(
"Found PKG-INFO metadata for {path} ({name})", "Found PEP 621 metadata for {path} in `pyproject.toml` ({name})",
path = path.display(), path = parsed_path_url.path.display(),
name = metadata.name name = project.name
); );
return Ok(pep508_rs::Requirement { return Ok(pep508_rs::Requirement {
name: metadata.name, name: project.name,
extras: requirement.extras, extras: requirement.extras,
version_or_url: Some(VersionOrUrl::Url(requirement.url)), version_or_url: Some(VersionOrUrl::Url(requirement.url)),
marker: requirement.marker, marker: requirement.marker,
@ -153,106 +170,75 @@ impl<'a, Context: BuildContext> NamedRequirementsResolver<'a, Context> {
}); });
} }
// Attempt to read a `pyproject.toml` file. // Read Poetry-specific metadata from the `pyproject.toml`.
let project_path = path.join("pyproject.toml"); if let Some(tool) = pyproject.tool {
if let Some(pyproject) = fs_err::read_to_string(project_path) if let Some(poetry) = tool.poetry {
.ok() if let Some(name) = poetry.name {
.and_then(|contents| toml::from_str::<PyProjectToml>(&contents).ok()) debug!(
{ "Found Poetry metadata for {path} in `pyproject.toml` ({name})",
// Read PEP 621 metadata from the `pyproject.toml`. path = parsed_path_url.path.display(),
if let Some(project) = pyproject.project { name = name
debug!( );
"Found PEP 621 metadata for {path} in `pyproject.toml` ({name})", return Ok(pep508_rs::Requirement {
path = path.display(), name,
name = project.name extras: requirement.extras,
); version_or_url: Some(VersionOrUrl::Url(requirement.url)),
return Ok(pep508_rs::Requirement { marker: requirement.marker,
name: project.name, origin: requirement.origin,
extras: requirement.extras, });
version_or_url: Some(VersionOrUrl::Url(requirement.url)),
marker: requirement.marker,
origin: requirement.origin,
});
}
// Read Poetry-specific metadata from the `pyproject.toml`.
if let Some(tool) = pyproject.tool {
if let Some(poetry) = tool.poetry {
if let Some(name) = poetry.name {
debug!(
"Found Poetry metadata for {path} in `pyproject.toml` ({name})",
path = path.display(),
name = name
);
return Ok(pep508_rs::Requirement {
name,
extras: requirement.extras,
version_or_url: Some(VersionOrUrl::Url(requirement.url)),
marker: requirement.marker,
origin: requirement.origin,
});
}
} }
} }
} }
}
// Attempt to read a `setup.cfg` from the directory. // Attempt to read a `setup.cfg` from the directory.
if let Some(setup_cfg) = fs_err::read_to_string(path.join("setup.cfg")) if let Some(setup_cfg) =
fs_err::read_to_string(parsed_path_url.path.join("setup.cfg"))
.ok() .ok()
.and_then(|contents| { .and_then(|contents| {
let mut ini = Ini::new_cs(); let mut ini = Ini::new_cs();
ini.set_multiline(true); ini.set_multiline(true);
ini.read(contents).ok() ini.read(contents).ok()
}) })
{ {
if let Some(section) = setup_cfg.get("metadata") { if let Some(section) = setup_cfg.get("metadata") {
if let Some(Some(name)) = section.get("name") { if let Some(Some(name)) = section.get("name") {
if let Ok(name) = PackageName::from_str(name) { if let Ok(name) = PackageName::from_str(name) {
debug!( debug!(
"Found setuptools metadata for {path} in `setup.cfg` ({name})", "Found setuptools metadata for {path} in `setup.cfg` ({name})",
path = path.display(), path = parsed_path_url.path.display(),
name = name name = name
); );
return Ok(pep508_rs::Requirement { return Ok(pep508_rs::Requirement {
name, name,
extras: requirement.extras, extras: requirement.extras,
version_or_url: Some(VersionOrUrl::Url(requirement.url)), version_or_url: Some(VersionOrUrl::Url(requirement.url)),
marker: requirement.marker, marker: requirement.marker,
origin: requirement.origin, origin: requirement.origin,
}); });
}
} }
} }
} }
SourceUrl::Directory(DirectorySourceUrl {
url: &requirement.url,
path: Cow::Owned(path),
})
} else {
SourceUrl::Path(PathSourceUrl {
url: &requirement.url,
path: Cow::Owned(path),
})
} }
}
Some(Scheme::Http | Scheme::Https) => SourceUrl::Direct(DirectSourceUrl { SourceUrl::Directory(DirectorySourceUrl {
url: &requirement.url, url: &requirement.url.verbatim,
}), path: Cow::Borrowed(&parsed_path_url.path),
Some(Scheme::GitSsh | Scheme::GitHttps | Scheme::GitHttp) => {
let git = ParsedGitUrl::try_from(requirement.url.to_url())?;
SourceUrl::Git(GitSourceUrl {
git: Cow::Owned(git.url),
subdirectory: git.subdirectory.map(Cow::Owned),
url: &requirement.url,
}) })
} }
_ => { // If it's not a directory, assume it's a file.
return Err(anyhow::anyhow!( ParsedUrl::Path(parsed_path_url) => SourceUrl::Path(PathSourceUrl {
"Unsupported scheme for unnamed requirement: {}", url: &requirement.url.verbatim,
requirement.url path: Cow::Borrowed(&parsed_path_url.path),
)); }),
} ParsedUrl::Archive(parsed_archive_url) => SourceUrl::Direct(DirectSourceUrl {
url: &parsed_archive_url.url,
}),
ParsedUrl::Git(parsed_git_url) => SourceUrl::Git(GitSourceUrl {
url: &requirement.url.verbatim,
git: &parsed_git_url.url,
subdirectory: parsed_git_url.subdirectory.as_deref(),
}),
}; };
// Fetch the metadata for the distribution. // Fetch the metadata for the distribution.

View File

@ -9,7 +9,7 @@ use pubgrub::report::{DefaultStringReporter, DerivationTree, External, Reporter}
use rustc_hash::{FxHashMap, FxHashSet}; use rustc_hash::{FxHashMap, FxHashSet};
use dashmap::DashMap; use dashmap::DashMap;
use distribution_types::{BuiltDist, IndexLocations, InstalledDist, ParsedUrlError, SourceDist}; use distribution_types::{BuiltDist, IndexLocations, InstalledDist, SourceDist};
use pep440_rs::Version; use pep440_rs::Version;
use pep508_rs::Requirement; use pep508_rs::Requirement;
use uv_normalize::PackageName; use uv_normalize::PackageName;
@ -96,10 +96,6 @@ pub enum ResolveError {
#[error("In `--require-hashes` mode, all requirements must be pinned upfront with `==`, but found: `{0}`")] #[error("In `--require-hashes` mode, all requirements must be pinned upfront with `==`, but found: `{0}`")]
UnhashedPackage(PackageName), UnhashedPackage(PackageName),
// TODO(konsti): Attach the distribution that contained the invalid requirement as error source.
#[error("Failed to parse requirements")]
DirectUrl(#[from] Box<ParsedUrlError>),
/// Something unexpected happened. /// Something unexpected happened.
#[error("{0}")] #[error("{0}")]
Failure(String), Failure(String),

View File

@ -12,14 +12,13 @@ use url::Url;
use distribution_filename::WheelFilename; use distribution_filename::WheelFilename;
use distribution_types::{ use distribution_types::{
BuiltDist, DirectUrlBuiltDist, DirectUrlSourceDist, DirectorySourceDist, Dist, FileLocation, BuiltDist, DirectUrlBuiltDist, DirectUrlSourceDist, DirectorySourceDist, Dist, FileLocation,
GitSourceDist, IndexUrl, ParsedArchiveUrl, ParsedGitUrl, PathBuiltDist, PathSourceDist, GitSourceDist, IndexUrl, PathBuiltDist, PathSourceDist, RegistryBuiltDist, RegistryBuiltWheel,
RegistryBuiltDist, RegistryBuiltWheel, RegistrySourceDist, RemoteSource, Resolution, RegistrySourceDist, RemoteSource, Resolution, ResolvedDist, ToUrlError,
ResolvedDist, ToUrlError,
}; };
use pep440_rs::Version; use pep440_rs::Version;
use pep508_rs::{MarkerEnvironment, VerbatimUrl}; use pep508_rs::{MarkerEnvironment, VerbatimUrl};
use platform_tags::{TagCompatibility, TagPriority, Tags}; use platform_tags::{TagCompatibility, TagPriority, Tags};
use pypi_types::HashDigest; use pypi_types::{HashDigest, ParsedArchiveUrl, ParsedGitUrl};
use uv_git::{GitReference, GitSha}; use uv_git::{GitReference, GitSha};
use uv_normalize::PackageName; use uv_normalize::PackageName;

View File

@ -4,21 +4,19 @@ use std::sync::Arc;
use rustc_hash::FxHashMap; use rustc_hash::FxHashMap;
use tracing::trace; use tracing::trace;
use distribution_types::{ParsedUrlError, Requirement, RequirementSource}; use distribution_types::{Requirement, RequirementSource};
use pep440_rs::{Operator, Version}; use pep440_rs::{Operator, Version};
use pep508_rs::{MarkerEnvironment, UnnamedRequirement}; use pep508_rs::{MarkerEnvironment, UnnamedRequirement};
use pypi_types::{HashDigest, HashError}; use pypi_types::{HashDigest, HashError, VerbatimParsedUrl};
use requirements_txt::{RequirementEntry, RequirementsTxtRequirement}; use requirements_txt::{RequirementEntry, RequirementsTxtRequirement};
use uv_normalize::PackageName; use uv_normalize::PackageName;
#[derive(thiserror::Error, Debug)] #[derive(thiserror::Error, Debug)]
pub enum PreferenceError { pub enum PreferenceError {
#[error("direct URL requirements without package names are not supported: `{0}`")] #[error("direct URL requirements without package names are not supported: `{0}`")]
Bare(UnnamedRequirement), Bare(UnnamedRequirement<VerbatimParsedUrl>),
#[error(transparent)] #[error(transparent)]
Hash(#[from] HashError), Hash(#[from] HashError),
#[error(transparent)]
ParsedUrl(#[from] Box<ParsedUrlError>),
} }
/// A pinned requirement, as extracted from a `requirements.txt` file. /// A pinned requirement, as extracted from a `requirements.txt` file.
@ -33,9 +31,7 @@ impl Preference {
pub fn from_entry(entry: RequirementEntry) -> Result<Self, PreferenceError> { pub fn from_entry(entry: RequirementEntry) -> Result<Self, PreferenceError> {
Ok(Self { Ok(Self {
requirement: match entry.requirement { requirement: match entry.requirement {
RequirementsTxtRequirement::Named(requirement) => { RequirementsTxtRequirement::Named(requirement) => Requirement::from(requirement),
Requirement::from_pep508(requirement)?
}
RequirementsTxtRequirement::Unnamed(requirement) => { RequirementsTxtRequirement::Unnamed(requirement) => {
return Err(PreferenceError::Bare(requirement)); return Err(PreferenceError::Bare(requirement));
} }

View File

@ -1,5 +1,6 @@
use distribution_types::{DistributionMetadata, Name, VerbatimParsedUrl, VersionOrUrlRef}; use distribution_types::{DistributionMetadata, Name, VersionOrUrlRef};
use pep440_rs::Version; use pep440_rs::Version;
use pypi_types::VerbatimParsedUrl;
use uv_normalize::PackageName; use uv_normalize::PackageName;
#[derive(Debug)] #[derive(Debug)]

View File

@ -1,5 +1,5 @@
use distribution_types::VerbatimParsedUrl;
use pep508_rs::MarkerTree; use pep508_rs::MarkerTree;
use pypi_types::VerbatimParsedUrl;
use std::fmt::{Display, Formatter}; use std::fmt::{Display, Formatter};
use std::ops::Deref; use std::ops::Deref;
use std::sync::Arc; use std::sync::Arc;

View File

@ -1,7 +1,7 @@
use url::Url; use url::Url;
use distribution_types::{ParsedGitUrl, ParsedUrl, VerbatimParsedUrl};
use pep508_rs::VerbatimUrl; use pep508_rs::VerbatimUrl;
use pypi_types::{ParsedGitUrl, ParsedUrl, VerbatimParsedUrl};
use uv_distribution::git_url_to_precise; use uv_distribution::git_url_to_precise;
use uv_git::GitReference; use uv_git::GitReference;

View File

@ -7,12 +7,12 @@ use pubgrub::type_aliases::SelectedDependencies;
use rustc_hash::{FxHashMap, FxHashSet}; use rustc_hash::{FxHashMap, FxHashSet};
use distribution_types::{ use distribution_types::{
Dist, DistributionMetadata, Name, ParsedUrlError, Requirement, ResolutionDiagnostic, Dist, DistributionMetadata, Name, Requirement, ResolutionDiagnostic, ResolvedDist, VersionId,
ResolvedDist, VersionId, VersionOrUrlRef, VersionOrUrlRef,
}; };
use pep440_rs::{Version, VersionSpecifier}; use pep440_rs::{Version, VersionSpecifier};
use pep508_rs::MarkerEnvironment; use pep508_rs::MarkerEnvironment;
use pypi_types::Yanked; use pypi_types::{ParsedUrlError, Yanked};
use uv_normalize::PackageName; use uv_normalize::PackageName;
use crate::dependency_provider::UvDependencyProvider; use crate::dependency_provider::UvDependencyProvider;
@ -512,8 +512,8 @@ impl ResolutionGraph {
.requires_dist .requires_dist
.iter() .iter()
.cloned() .cloned()
.map(Requirement::from_pep508) .map(Requirement::from)
.collect::<anyhow::Result<_, _>>()?; .collect();
for req in manifest.apply(requirements.iter()) { for req in manifest.apply(requirements.iter()) {
let Some(ref marker_tree) = req.marker else { let Some(ref marker_tree) = req.marker else {
continue; continue;

View File

@ -203,9 +203,10 @@ mod tests {
use anyhow::Result; use anyhow::Result;
use url::Url; use url::Url;
use distribution_types::{ParsedUrl, RequirementSource}; use distribution_types::RequirementSource;
use pep440_rs::{Operator, Version, VersionSpecifier, VersionSpecifiers}; use pep440_rs::{Operator, Version, VersionSpecifier, VersionSpecifiers};
use pep508_rs::VerbatimUrl; use pep508_rs::VerbatimUrl;
use pypi_types::ParsedUrl;
use crate::resolver::locals::{iter_locals, Locals}; use crate::resolver::locals::{iter_locals, Locals};

View File

@ -1063,8 +1063,8 @@ impl<InstalledPackages: InstalledPackagesProvider> ResolverState<InstalledPackag
.requires_dist .requires_dist
.iter() .iter()
.cloned() .cloned()
.map(Requirement::from_pep508) .map(Requirement::from)
.collect::<Result<_, _>>()?; .collect();
let dependencies = PubGrubDependencies::from_requirements( let dependencies = PubGrubDependencies::from_requirements(
&requirements, &requirements,
&self.constraints, &self.constraints,
@ -1170,8 +1170,8 @@ impl<InstalledPackages: InstalledPackagesProvider> ResolverState<InstalledPackag
.requires_dist .requires_dist
.iter() .iter()
.cloned() .cloned()
.map(Requirement::from_pep508) .map(Requirement::from)
.collect::<Result<_, _>>()?; .collect();
let dependencies = PubGrubDependencies::from_requirements( let dependencies = PubGrubDependencies::from_requirements(
&requirements, &requirements,
&self.constraints, &self.constraints,

View File

@ -1,11 +1,9 @@
use distribution_types::{RequirementSource, Verbatim};
use rustc_hash::FxHashMap; use rustc_hash::FxHashMap;
use tracing::debug; use tracing::debug;
use distribution_types::{
ParsedArchiveUrl, ParsedGitUrl, ParsedPathUrl, ParsedUrl, RequirementSource, Verbatim,
VerbatimParsedUrl,
};
use pep508_rs::{MarkerEnvironment, VerbatimUrl}; use pep508_rs::{MarkerEnvironment, VerbatimUrl};
use pypi_types::{ParsedArchiveUrl, ParsedGitUrl, ParsedPathUrl, ParsedUrl, VerbatimParsedUrl};
use uv_distribution::is_same_reference; use uv_distribution::is_same_reference;
use uv_git::GitUrl; use uv_git::GitUrl;
use uv_normalize::PackageName; use uv_normalize::PackageName;

View File

@ -167,10 +167,9 @@ macro_rules! assert_snapshot {
#[tokio::test] #[tokio::test]
async fn black() -> Result<()> { async fn black() -> Result<()> {
let manifest = Manifest::simple(vec![Requirement::from_pep508( let manifest = Manifest::simple(vec![Requirement::from(
pep508_rs::Requirement::from_str("black<=23.9.1").unwrap(), pep508_rs::Requirement::from_str("black<=23.9.1").unwrap(),
) )]);
.unwrap()]);
let options = OptionsBuilder::new() let options = OptionsBuilder::new()
.exclude_newer(Some(*EXCLUDE_NEWER)) .exclude_newer(Some(*EXCLUDE_NEWER))
.build(); .build();
@ -196,10 +195,9 @@ async fn black() -> Result<()> {
#[tokio::test] #[tokio::test]
async fn black_colorama() -> Result<()> { async fn black_colorama() -> Result<()> {
let manifest = Manifest::simple(vec![Requirement::from_pep508( let manifest = Manifest::simple(vec![Requirement::from(
pep508_rs::Requirement::from_str("black[colorama]<=23.9.1").unwrap(), pep508_rs::Requirement::from_str("black[colorama]<=23.9.1").unwrap(),
) )]);
.unwrap()]);
let options = OptionsBuilder::new() let options = OptionsBuilder::new()
.exclude_newer(Some(*EXCLUDE_NEWER)) .exclude_newer(Some(*EXCLUDE_NEWER))
.build(); .build();
@ -228,10 +226,9 @@ async fn black_colorama() -> Result<()> {
/// Resolve Black with an invalid extra. The resolver should ignore the extra. /// Resolve Black with an invalid extra. The resolver should ignore the extra.
#[tokio::test] #[tokio::test]
async fn black_tensorboard() -> Result<()> { async fn black_tensorboard() -> Result<()> {
let manifest = Manifest::simple(vec![Requirement::from_pep508( let manifest = Manifest::simple(vec![Requirement::from(
pep508_rs::Requirement::from_str("black[tensorboard]<=23.9.1").unwrap(), pep508_rs::Requirement::from_str("black[tensorboard]<=23.9.1").unwrap(),
) )]);
.unwrap()]);
let options = OptionsBuilder::new() let options = OptionsBuilder::new()
.exclude_newer(Some(*EXCLUDE_NEWER)) .exclude_newer(Some(*EXCLUDE_NEWER))
.build(); .build();
@ -257,10 +254,9 @@ async fn black_tensorboard() -> Result<()> {
#[tokio::test] #[tokio::test]
async fn black_python_310() -> Result<()> { async fn black_python_310() -> Result<()> {
let manifest = Manifest::simple(vec![Requirement::from_pep508( let manifest = Manifest::simple(vec![Requirement::from(
pep508_rs::Requirement::from_str("black<=23.9.1").unwrap(), pep508_rs::Requirement::from_str("black<=23.9.1").unwrap(),
) )]);
.unwrap()]);
let options = OptionsBuilder::new() let options = OptionsBuilder::new()
.exclude_newer(Some(*EXCLUDE_NEWER)) .exclude_newer(Some(*EXCLUDE_NEWER))
.build(); .build();
@ -293,14 +289,12 @@ async fn black_python_310() -> Result<()> {
#[tokio::test] #[tokio::test]
async fn black_mypy_extensions() -> Result<()> { async fn black_mypy_extensions() -> Result<()> {
let manifest = Manifest::new( let manifest = Manifest::new(
vec![ vec![Requirement::from(
Requirement::from_pep508(pep508_rs::Requirement::from_str("black<=23.9.1").unwrap()) pep508_rs::Requirement::from_str("black<=23.9.1").unwrap(),
.unwrap(), )],
], Constraints::from_requirements(vec![Requirement::from(
Constraints::from_requirements(vec![Requirement::from_pep508(
pep508_rs::Requirement::from_str("mypy-extensions<0.4.4").unwrap(), pep508_rs::Requirement::from_str("mypy-extensions<0.4.4").unwrap(),
) )]),
.unwrap()]),
Overrides::default(), Overrides::default(),
vec![], vec![],
None, None,
@ -336,14 +330,12 @@ async fn black_mypy_extensions() -> Result<()> {
#[tokio::test] #[tokio::test]
async fn black_mypy_extensions_extra() -> Result<()> { async fn black_mypy_extensions_extra() -> Result<()> {
let manifest = Manifest::new( let manifest = Manifest::new(
vec![ vec![Requirement::from(
Requirement::from_pep508(pep508_rs::Requirement::from_str("black<=23.9.1").unwrap()) pep508_rs::Requirement::from_str("black<=23.9.1").unwrap(),
.unwrap(), )],
], Constraints::from_requirements(vec![Requirement::from(
Constraints::from_requirements(vec![Requirement::from_pep508(
pep508_rs::Requirement::from_str("mypy-extensions[extra]<0.4.4").unwrap(), pep508_rs::Requirement::from_str("mypy-extensions[extra]<0.4.4").unwrap(),
) )]),
.unwrap()]),
Overrides::default(), Overrides::default(),
vec![], vec![],
None, None,
@ -379,14 +371,12 @@ async fn black_mypy_extensions_extra() -> Result<()> {
#[tokio::test] #[tokio::test]
async fn black_flake8() -> Result<()> { async fn black_flake8() -> Result<()> {
let manifest = Manifest::new( let manifest = Manifest::new(
vec![ vec![Requirement::from(
Requirement::from_pep508(pep508_rs::Requirement::from_str("black<=23.9.1").unwrap()) pep508_rs::Requirement::from_str("black<=23.9.1").unwrap(),
.unwrap(), )],
], Constraints::from_requirements(vec![Requirement::from(
Constraints::from_requirements(vec![Requirement::from_pep508(
pep508_rs::Requirement::from_str("flake8<1").unwrap(), pep508_rs::Requirement::from_str("flake8<1").unwrap(),
) )]),
.unwrap()]),
Overrides::default(), Overrides::default(),
vec![], vec![],
None, None,
@ -419,10 +409,9 @@ async fn black_flake8() -> Result<()> {
#[tokio::test] #[tokio::test]
async fn black_lowest() -> Result<()> { async fn black_lowest() -> Result<()> {
let manifest = Manifest::simple(vec![Requirement::from_pep508( let manifest = Manifest::simple(vec![Requirement::from(
pep508_rs::Requirement::from_str("black>21").unwrap(), pep508_rs::Requirement::from_str("black>21").unwrap(),
) )]);
.unwrap()]);
let options = OptionsBuilder::new() let options = OptionsBuilder::new()
.resolution_mode(ResolutionMode::Lowest) .resolution_mode(ResolutionMode::Lowest)
.exclude_newer(Some(*EXCLUDE_NEWER)) .exclude_newer(Some(*EXCLUDE_NEWER))
@ -449,10 +438,9 @@ async fn black_lowest() -> Result<()> {
#[tokio::test] #[tokio::test]
async fn black_lowest_direct() -> Result<()> { async fn black_lowest_direct() -> Result<()> {
let manifest = Manifest::simple(vec![Requirement::from_pep508( let manifest = Manifest::simple(vec![Requirement::from(
pep508_rs::Requirement::from_str("black>21").unwrap(), pep508_rs::Requirement::from_str("black>21").unwrap(),
) )]);
.unwrap()]);
let options = OptionsBuilder::new() let options = OptionsBuilder::new()
.resolution_mode(ResolutionMode::LowestDirect) .resolution_mode(ResolutionMode::LowestDirect)
.exclude_newer(Some(*EXCLUDE_NEWER)) .exclude_newer(Some(*EXCLUDE_NEWER))
@ -480,12 +468,14 @@ async fn black_lowest_direct() -> Result<()> {
#[tokio::test] #[tokio::test]
async fn black_respect_preference() -> Result<()> { async fn black_respect_preference() -> Result<()> {
let manifest = Manifest::new( let manifest = Manifest::new(
vec![Requirement::from_pep508(pep508_rs::Requirement::from_str("black<=23.9.1")?).unwrap()], vec![Requirement::from(pep508_rs::Requirement::from_str(
"black<=23.9.1",
)?)],
Constraints::default(), Constraints::default(),
Overrides::default(), Overrides::default(),
vec![Preference::from_requirement( vec![Preference::from_requirement(Requirement::from(
Requirement::from_pep508(pep508_rs::Requirement::from_str("black==23.9.0")?).unwrap(), pep508_rs::Requirement::from_str("black==23.9.0")?,
)], ))],
None, None,
vec![], vec![],
Exclusions::default(), Exclusions::default(),
@ -518,12 +508,14 @@ async fn black_respect_preference() -> Result<()> {
#[tokio::test] #[tokio::test]
async fn black_ignore_preference() -> Result<()> { async fn black_ignore_preference() -> Result<()> {
let manifest = Manifest::new( let manifest = Manifest::new(
vec![Requirement::from_pep508(pep508_rs::Requirement::from_str("black<=23.9.1")?).unwrap()], vec![Requirement::from(pep508_rs::Requirement::from_str(
"black<=23.9.1",
)?)],
Constraints::default(), Constraints::default(),
Overrides::default(), Overrides::default(),
vec![Preference::from_requirement( vec![Preference::from_requirement(Requirement::from(
Requirement::from_pep508(pep508_rs::Requirement::from_str("black==23.9.2")?).unwrap(), pep508_rs::Requirement::from_str("black==23.9.2")?,
)], ))],
None, None,
vec![], vec![],
Exclusions::default(), Exclusions::default(),
@ -554,10 +546,9 @@ async fn black_ignore_preference() -> Result<()> {
#[tokio::test] #[tokio::test]
async fn black_disallow_prerelease() -> Result<()> { async fn black_disallow_prerelease() -> Result<()> {
let manifest = Manifest::simple(vec![Requirement::from_pep508( let manifest = Manifest::simple(vec![Requirement::from(
pep508_rs::Requirement::from_str("black<=20.0").unwrap(), pep508_rs::Requirement::from_str("black<=20.0").unwrap(),
) )]);
.unwrap()]);
let options = OptionsBuilder::new() let options = OptionsBuilder::new()
.prerelease_mode(PreReleaseMode::Disallow) .prerelease_mode(PreReleaseMode::Disallow)
.exclude_newer(Some(*EXCLUDE_NEWER)) .exclude_newer(Some(*EXCLUDE_NEWER))
@ -578,10 +569,9 @@ async fn black_disallow_prerelease() -> Result<()> {
#[tokio::test] #[tokio::test]
async fn black_allow_prerelease_if_necessary() -> Result<()> { async fn black_allow_prerelease_if_necessary() -> Result<()> {
let manifest = Manifest::simple(vec![Requirement::from_pep508( let manifest = Manifest::simple(vec![Requirement::from(
pep508_rs::Requirement::from_str("black<=20.0").unwrap(), pep508_rs::Requirement::from_str("black<=20.0").unwrap(),
) )]);
.unwrap()]);
let options = OptionsBuilder::new() let options = OptionsBuilder::new()
.prerelease_mode(PreReleaseMode::IfNecessary) .prerelease_mode(PreReleaseMode::IfNecessary)
.exclude_newer(Some(*EXCLUDE_NEWER)) .exclude_newer(Some(*EXCLUDE_NEWER))
@ -602,10 +592,9 @@ async fn black_allow_prerelease_if_necessary() -> Result<()> {
#[tokio::test] #[tokio::test]
async fn pylint_disallow_prerelease() -> Result<()> { async fn pylint_disallow_prerelease() -> Result<()> {
let manifest = Manifest::simple(vec![Requirement::from_pep508( let manifest = Manifest::simple(vec![Requirement::from(
pep508_rs::Requirement::from_str("pylint==2.3.0").unwrap(), pep508_rs::Requirement::from_str("pylint==2.3.0").unwrap(),
) )]);
.unwrap()]);
let options = OptionsBuilder::new() let options = OptionsBuilder::new()
.prerelease_mode(PreReleaseMode::Disallow) .prerelease_mode(PreReleaseMode::Disallow)
.exclude_newer(Some(*EXCLUDE_NEWER)) .exclude_newer(Some(*EXCLUDE_NEWER))
@ -628,10 +617,9 @@ async fn pylint_disallow_prerelease() -> Result<()> {
#[tokio::test] #[tokio::test]
async fn pylint_allow_prerelease() -> Result<()> { async fn pylint_allow_prerelease() -> Result<()> {
let manifest = Manifest::simple(vec![Requirement::from_pep508( let manifest = Manifest::simple(vec![Requirement::from(
pep508_rs::Requirement::from_str("pylint==2.3.0").unwrap(), pep508_rs::Requirement::from_str("pylint==2.3.0").unwrap(),
) )]);
.unwrap()]);
let options = OptionsBuilder::new() let options = OptionsBuilder::new()
.prerelease_mode(PreReleaseMode::Allow) .prerelease_mode(PreReleaseMode::Allow)
.exclude_newer(Some(*EXCLUDE_NEWER)) .exclude_newer(Some(*EXCLUDE_NEWER))
@ -655,10 +643,8 @@ async fn pylint_allow_prerelease() -> Result<()> {
#[tokio::test] #[tokio::test]
async fn pylint_allow_explicit_prerelease_without_marker() -> Result<()> { async fn pylint_allow_explicit_prerelease_without_marker() -> Result<()> {
let manifest = Manifest::simple(vec![ let manifest = Manifest::simple(vec![
Requirement::from_pep508(pep508_rs::Requirement::from_str("pylint==2.3.0").unwrap()) Requirement::from(pep508_rs::Requirement::from_str("pylint==2.3.0").unwrap()),
.unwrap(), Requirement::from(pep508_rs::Requirement::from_str("isort>=5.0.0").unwrap()),
Requirement::from_pep508(pep508_rs::Requirement::from_str("isort>=5.0.0").unwrap())
.unwrap(),
]); ]);
let options = OptionsBuilder::new() let options = OptionsBuilder::new()
.prerelease_mode(PreReleaseMode::Explicit) .prerelease_mode(PreReleaseMode::Explicit)
@ -683,10 +669,8 @@ async fn pylint_allow_explicit_prerelease_without_marker() -> Result<()> {
#[tokio::test] #[tokio::test]
async fn pylint_allow_explicit_prerelease_with_marker() -> Result<()> { async fn pylint_allow_explicit_prerelease_with_marker() -> Result<()> {
let manifest = Manifest::simple(vec![ let manifest = Manifest::simple(vec![
Requirement::from_pep508(pep508_rs::Requirement::from_str("pylint==2.3.0").unwrap()) Requirement::from(pep508_rs::Requirement::from_str("pylint==2.3.0").unwrap()),
.unwrap(), Requirement::from(pep508_rs::Requirement::from_str("isort>=5.0.0b").unwrap()),
Requirement::from_pep508(pep508_rs::Requirement::from_str("isort>=5.0.0b").unwrap())
.unwrap(),
]); ]);
let options = OptionsBuilder::new() let options = OptionsBuilder::new()
.prerelease_mode(PreReleaseMode::Explicit) .prerelease_mode(PreReleaseMode::Explicit)
@ -712,10 +696,9 @@ async fn pylint_allow_explicit_prerelease_with_marker() -> Result<()> {
/// fail with a pre-release-centric hint. /// fail with a pre-release-centric hint.
#[tokio::test] #[tokio::test]
async fn msgraph_sdk() -> Result<()> { async fn msgraph_sdk() -> Result<()> {
let manifest = Manifest::simple(vec![Requirement::from_pep508( let manifest = Manifest::simple(vec![Requirement::from(
pep508_rs::Requirement::from_str("msgraph-sdk==1.0.0").unwrap(), pep508_rs::Requirement::from_str("msgraph-sdk==1.0.0").unwrap(),
) )]);
.unwrap()]);
let options = OptionsBuilder::new() let options = OptionsBuilder::new()
.exclude_newer(Some(*EXCLUDE_NEWER)) .exclude_newer(Some(*EXCLUDE_NEWER))
.build(); .build();

View File

@ -110,7 +110,7 @@ impl HashStrategy {
} }
UnresolvedRequirement::Unnamed(requirement) => { UnresolvedRequirement::Unnamed(requirement) => {
// Direct URLs are always allowed. // Direct URLs are always allowed.
PackageId::from_url(&requirement.url) PackageId::from_url(&requirement.url.verbatim)
} }
}; };

View File

@ -19,6 +19,7 @@ install-wheel-rs = { workspace = true, features = ["clap"], default-features = f
pep440_rs = { workspace = true } pep440_rs = { workspace = true }
pep508_rs = { workspace = true } pep508_rs = { workspace = true }
platform-tags = { workspace = true } platform-tags = { workspace = true }
pypi-types = { workspace = true }
requirements-txt = { workspace = true, features = ["http"] } requirements-txt = { workspace = true, features = ["http"] }
uv-auth = { workspace = true } uv-auth = { workspace = true }
uv-cache = { workspace = true, features = ["clap"] } uv-cache = { workspace = true, features = ["clap"] }

View File

@ -16,8 +16,7 @@ use tempfile::tempdir_in;
use tracing::debug; use tracing::debug;
use distribution_types::{ use distribution_types::{
IndexLocations, LocalEditable, LocalEditables, ParsedUrlError, SourceAnnotation, IndexLocations, LocalEditable, LocalEditables, SourceAnnotation, SourceAnnotations, Verbatim,
SourceAnnotations, Verbatim,
}; };
use distribution_types::{Requirement, Requirements}; use distribution_types::{Requirement, Requirements};
use install_wheel_rs::linker::LinkMode; use install_wheel_rs::linker::LinkMode;
@ -472,17 +471,17 @@ pub(crate) async fn pip_compile(
.requires_dist .requires_dist
.iter() .iter()
.cloned() .cloned()
.map(Requirement::from_pep508) .map(Requirement::from)
.collect::<Result<_, _>>()?, .collect(),
optional_dependencies: IndexMap::default(), optional_dependencies: IndexMap::default(),
}; };
Ok::<_, Box<ParsedUrlError>>(BuiltEditableMetadata { BuiltEditableMetadata {
built: built_editable.editable, built: built_editable.editable,
metadata: built_editable.metadata, metadata: built_editable.metadata,
requirements, requirements,
}) }
}) })
.collect::<Result<_, _>>()?; .collect();
// Validate that the editables are compatible with the target Python version. // Validate that the editables are compatible with the target Python version.
for editable in &editables { for editable in &editables {

View File

@ -2,12 +2,12 @@ use std::borrow::Cow;
use std::fmt::Write; use std::fmt::Write;
use anstream::eprint; use anstream::eprint;
use distribution_types::{IndexLocations, Resolution};
use fs_err as fs; use fs_err as fs;
use itertools::Itertools; use itertools::Itertools;
use owo_colors::OwoColorize; use owo_colors::OwoColorize;
use tracing::{debug, enabled, Level}; use tracing::{debug, enabled, Level};
use distribution_types::{IndexLocations, Resolution};
use install_wheel_rs::linker::LinkMode; use install_wheel_rs::linker::LinkMode;
use platform_tags::Tags; use platform_tags::Tags;
use uv_auth::store_credentials_from_url; use uv_auth::store_credentials_from_url;

View File

@ -1,5 +1,6 @@
//! Common operations shared across the `pip` API and subcommands. //! Common operations shared across the `pip` API and subcommands.
use pypi_types::{ParsedUrl, ParsedUrlError};
use std::fmt::Write; use std::fmt::Write;
use std::path::PathBuf; use std::path::PathBuf;
@ -14,7 +15,7 @@ use distribution_types::{
}; };
use distribution_types::{ use distribution_types::{
DistributionMetadata, IndexLocations, InstalledMetadata, InstalledVersion, LocalDist, Name, DistributionMetadata, IndexLocations, InstalledMetadata, InstalledVersion, LocalDist, Name,
ParsedUrl, RequirementSource, Resolution, RequirementSource, Resolution,
}; };
use install_wheel_rs::linker::LinkMode; use install_wheel_rs::linker::LinkMode;
use pep440_rs::{VersionSpecifier, VersionSpecifiers}; use pep440_rs::{VersionSpecifier, VersionSpecifiers};
@ -177,7 +178,7 @@ pub(crate) async fn resolve<InstalledPackages: InstalledPackagesProvider>(
let python_requirement = PythonRequirement::from_marker_environment(interpreter, markers); let python_requirement = PythonRequirement::from_marker_environment(interpreter, markers);
// Map the editables to their metadata. // Map the editables to their metadata.
let editables = editables.as_metadata().map_err(Error::ParsedUrl)?; let editables = editables.as_metadata();
// Determine any lookahead requirements. // Determine any lookahead requirements.
let lookaheads = match options.dependency_mode { let lookaheads = match options.dependency_mode {
@ -769,12 +770,9 @@ pub(crate) enum Error {
#[error(transparent)] #[error(transparent)]
Lookahead(#[from] uv_requirements::LookaheadError), Lookahead(#[from] uv_requirements::LookaheadError),
#[error(transparent)]
ParsedUrl(Box<distribution_types::ParsedUrlError>),
#[error(transparent)] #[error(transparent)]
Anyhow(#[from] anyhow::Error), Anyhow(#[from] anyhow::Error),
#[error("Installed distribution has unsupported type")] #[error("Installed distribution has unsupported type")]
UnsupportedInstalledDist(#[source] Box<distribution_types::ParsedUrlError>), UnsupportedInstalledDist(#[source] Box<ParsedUrlError>),
} }

View File

@ -7,6 +7,7 @@ use tracing::debug;
use distribution_types::{InstalledMetadata, Name, Requirement, UnresolvedRequirement}; use distribution_types::{InstalledMetadata, Name, Requirement, UnresolvedRequirement};
use pep508_rs::UnnamedRequirement; use pep508_rs::UnnamedRequirement;
use pypi_types::VerbatimParsedUrl;
use uv_cache::Cache; use uv_cache::Cache;
use uv_client::{BaseClientBuilder, Connectivity}; use uv_client::{BaseClientBuilder, Connectivity};
use uv_configuration::{KeyringProviderType, PreviewMode}; use uv_configuration::{KeyringProviderType, PreviewMode};
@ -94,7 +95,7 @@ pub(crate) async fn pip_uninstall(
let site_packages = uv_installer::SitePackages::from_executable(&venv)?; let site_packages = uv_installer::SitePackages::from_executable(&venv)?;
// Partition the requirements into named and unnamed requirements. // Partition the requirements into named and unnamed requirements.
let (named, unnamed): (Vec<Requirement>, Vec<UnnamedRequirement>) = spec let (named, unnamed): (Vec<Requirement>, Vec<UnnamedRequirement<VerbatimParsedUrl>>) = spec
.requirements .requirements
.into_iter() .into_iter()
.partition_map(|entry| match entry.requirement { .partition_map(|entry| match entry.requirement {
@ -118,7 +119,7 @@ pub(crate) async fn pip_uninstall(
let urls = { let urls = {
let mut urls = unnamed let mut urls = unnamed
.into_iter() .into_iter()
.map(|requirement| requirement.url.to_url()) .map(|requirement| requirement.url.verbatim.to_url())
.collect::<Vec<_>>(); .collect::<Vec<_>>();
urls.sort_unstable(); urls.sort_unstable();
urls.dedup(); urls.dedup();

View File

@ -225,16 +225,14 @@ async fn venv_impl(
let requirements = if interpreter.python_tuple() < (3, 12) { let requirements = if interpreter.python_tuple() < (3, 12) {
// Only include `setuptools` and `wheel` on Python <3.12 // Only include `setuptools` and `wheel` on Python <3.12
vec![ vec![
Requirement::from_pep508(pep508_rs::Requirement::from_str("pip").unwrap()).unwrap(), Requirement::from(pep508_rs::Requirement::from_str("pip").unwrap()),
Requirement::from_pep508(pep508_rs::Requirement::from_str("setuptools").unwrap()) Requirement::from(pep508_rs::Requirement::from_str("setuptools").unwrap()),
.unwrap(), Requirement::from(pep508_rs::Requirement::from_str("wheel").unwrap()),
Requirement::from_pep508(pep508_rs::Requirement::from_str("wheel").unwrap())
.unwrap(),
] ]
} else { } else {
vec![ vec![Requirement::from(
Requirement::from_pep508(pep508_rs::Requirement::from_str("pip").unwrap()).unwrap(), pep508_rs::Requirement::from_str("pip").unwrap(),
] )]
}; };
// Resolve and install the requirements. // Resolve and install the requirements.

View File

@ -6,7 +6,7 @@ use indexmap::IndexMap;
use owo_colors::OwoColorize; use owo_colors::OwoColorize;
use distribution_types::{ use distribution_types::{
InstalledDist, LocalEditable, LocalEditables, Name, ParsedUrlError, Requirement, Requirements, InstalledDist, LocalEditable, LocalEditables, Name, Requirement, Requirements,
}; };
use platform_tags::Tags; use platform_tags::Tags;
use requirements_txt::EditableRequirement; use requirements_txt::EditableRequirement;
@ -159,7 +159,7 @@ impl ResolvedEditables {
}) })
} }
pub(crate) fn as_metadata(&self) -> Result<Vec<BuiltEditableMetadata>, Box<ParsedUrlError>> { pub(crate) fn as_metadata(&self) -> Vec<BuiltEditableMetadata> {
self.iter() self.iter()
.map(|editable| { .map(|editable| {
let dependencies: Vec<_> = editable let dependencies: Vec<_> = editable
@ -167,16 +167,16 @@ impl ResolvedEditables {
.requires_dist .requires_dist
.iter() .iter()
.cloned() .cloned()
.map(Requirement::from_pep508) .map(Requirement::from)
.collect::<Result<_, _>>()?; .collect();
Ok::<_, Box<ParsedUrlError>>(BuiltEditableMetadata { BuiltEditableMetadata {
built: editable.local().clone(), built: editable.local().clone(),
metadata: editable.metadata().clone(), metadata: editable.metadata().clone(),
requirements: Requirements { requirements: Requirements {
dependencies, dependencies,
optional_dependencies: IndexMap::default(), optional_dependencies: IndexMap::default(),
}, },
}) }
}) })
.collect() .collect()
} }

View File

@ -5458,7 +5458,10 @@ fn unsupported_scheme() -> Result<()> {
----- stdout ----- ----- stdout -----
----- stderr ----- ----- stderr -----
error: Unsupported URL prefix `bzr` in URL: `bzr+https://example.com/anyio` (Bazaar is not supported) error: Couldn't parse requirement in `requirements.in` at position 0
Caused by: Unsupported URL prefix `bzr` in URL: `bzr+https://example.com/anyio` (Bazaar is not supported)
anyio @ bzr+https://example.com/anyio
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
"### "###
); );