Annotate sources of requirements (#3269)

## Summary

Fixes https://github.com/astral-sh/uv/issues/1343. This is kinda a first
draft at the moment, but does at least mostly work locally (barring some
bits of the test suite that seem to not work for me in general).

## Test Plan

Mostly running the existing tests and checking the revised output is
sane

## Outstanding issues

Most of these come down to "AFAIK, the existing tools don't support
these patterns, but `uv` does" and so I'm not sure there's an existing
good answer here! Most of the answers so far are "whatever was easiest
to build"

- [x] ~~Is "-r pyproject.toml" correct? Should it show something else or
get skipped entirely~~ No it wasn't. Fixed in
3044fa8b86
- [ ] If the requirements file is stdin, that just gets skipped. Should
it be recorded?
- [ ] Overrides get shown as "--override<override.txt>". Correct?
- [x] ~~Some of the tests (e.g.
`dependency_excludes_non_contiguous_range_of_compatible_versions`) make
assumptions about the order of package versions being outputted, which
this PR breaks. I'm not sure if the text is fairly arbitrary and can be
replaced or whether the behaviour needs fixing?~~ - fixed by removing
the custom pubgrub PartialEq/Hash
- [ ] Are all the `TrackedFromStr` et al changes needed, or is there an
easier way? I don't think so, I think it's necessary to track these sort
of things fairly comprehensively to make this feature work, and this
sort of invasive change feels necessary, but happy to be proved wrong
there :)
- [x] ~~If you have a requirement coming in from two or more different
requirements files only one turns up. I've got a closed-source example
for this (can go into more detail if needed), mostly consisting of a
complicated set of common deps creating a larger set. It's a rarer case,
but worth considering.~~ 042432b200
- [ ] Doesn't add annotations for `setup.py` yet
- This is pretty hard, as the correct location to insert the path is
`crates/pypi-types/src/metadata.rs`'s `parse_pkg_info`, which as it's
based off a source distribution has entirely thrown away such matters as
"where did this package requirement get built from". Could add "`built
package name`" as a dep, but that's a little odd.
This commit is contained in:
Tom Parker-Shemilt 2024-05-09 04:19:22 +01:00 committed by GitHub
parent 367958e6b2
commit bc963d13cb
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
42 changed files with 1295 additions and 331 deletions

View File

@ -0,0 +1,52 @@
use serde::{Deserialize, Deserializer, Serialize};
use std::path::PathBuf;
use uv_fs::Simplified;
/// Source of a dependency, e.g., a `-r requirements.txt` file.
#[derive(Debug, Clone, PartialEq, Eq, Hash, PartialOrd, Ord, Serialize)]
pub enum SourceAnnotation {
/// A `pyproject.toml` file.
PyProject {
path: PathBuf,
project_name: Option<String>,
},
/// A `-c constraints.txt` file.
Constraint(PathBuf),
/// An `--override overrides.txt` file.
Override(PathBuf),
/// A `-r requirements.txt` file.
Requirement(PathBuf),
}
impl<'de> Deserialize<'de> for SourceAnnotation {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
let s = String::deserialize(deserializer)?;
Ok(SourceAnnotation::Requirement(PathBuf::from(s)))
}
}
impl std::fmt::Display for SourceAnnotation {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Self::Requirement(path) => {
write!(f, "-r {}", path.user_display())
}
Self::Constraint(path) => {
write!(f, "-c {}", path.user_display())
}
Self::Override(path) => {
write!(f, "--override {}", path.user_display())
}
Self::PyProject { path, project_name } => {
if let Some(project_name) = project_name {
write!(f, "{} ({})", project_name, path.user_display())
} else {
write!(f, "{}", path.user_display())
}
}
}
}
}

View File

@ -44,6 +44,7 @@ use pep440_rs::Version;
use pep508_rs::{Pep508Url, Scheme, VerbatimUrl};
use uv_normalize::PackageName;
pub use crate::annotation::*;
pub use crate::any::*;
pub use crate::buildable::*;
pub use crate::cached::*;
@ -62,6 +63,7 @@ pub use crate::resolved::*;
pub use crate::specified_requirement::*;
pub use crate::traits::*;
mod annotation;
mod any;
mod buildable;
mod cached;

View File

@ -28,6 +28,7 @@ pub struct Requirement {
pub extras: Vec<ExtraName>,
pub marker: Option<MarkerTree>,
pub source: RequirementSource,
pub path: Option<PathBuf>,
}
impl Requirement {
@ -62,6 +63,7 @@ impl Requirement {
extras: requirement.extras,
marker: requirement.marker,
source,
path: requirement.path,
})
}
}

View File

@ -139,6 +139,7 @@ impl From<&ResolvedDist> for Requirement {
extras: vec![],
marker: None,
source,
path: None,
}
}
}

View File

@ -14,6 +14,8 @@ pub struct UnresolvedRequirementSpecification {
pub requirement: UnresolvedRequirement,
/// Hashes of the downloadable packages.
pub hashes: Vec<String>,
/// Path of the source of the requirement
pub path: Option<String>,
}
/// A requirement read from a `requirements.txt` or `pyproject.toml` file.

View File

@ -26,7 +26,7 @@ use std::fmt::{Debug, Display, Formatter};
use std::hash::{Hash, Hasher};
#[cfg(feature = "pyo3")]
use std::ops::Deref;
use std::path::Path;
use std::path::{Path, PathBuf};
use std::str::FromStr;
#[cfg(feature = "pyo3")]
@ -148,6 +148,16 @@ pub struct Requirement<T: Pep508Url = VerbatimUrl> {
/// `requests [security,tests] >= 2.8.1, == 2.8.* ; python_version > "3.8"`.
/// Those are a nested and/or tree.
pub marker: Option<MarkerTree>,
/// The source file containing the requirement.
pub path: Option<PathBuf>,
}
impl Requirement {
/// Set the source file containing the requirement.
#[must_use]
pub fn with_source(self, path: Option<PathBuf>) -> Self {
Self { path, ..self }
}
}
impl<T: Pep508Url + Display> Display for Requirement<T> {
@ -482,6 +492,7 @@ impl<T: Pep508Url> Requirement<T> {
extras,
version_or_url,
marker,
path,
} = self;
Requirement {
name,
@ -494,6 +505,7 @@ impl<T: Pep508Url> Requirement<T> {
Some(VersionOrUrl::Url(url)) => Some(VersionOrUrl::Url(U::from(url))),
},
marker,
path,
}
}
}
@ -1017,6 +1029,7 @@ fn parse_pep508_requirement<T: Pep508Url>(
extras,
version_or_url: requirement_kind,
marker,
path: None,
})
}
@ -1158,6 +1171,7 @@ mod tests {
operator: MarkerOperator::LessThan,
r_value: MarkerValue::QuotedString("2.7".to_string()),
})),
path: None,
};
assert_eq!(requests, expected);
}
@ -1383,6 +1397,7 @@ mod tests {
extras: vec![],
marker: None,
version_or_url: Some(VersionOrUrl::Url(Url::parse(url).unwrap())),
path: None,
};
assert_eq!(pip_url, expected);
}

View File

@ -1,5 +1,5 @@
use std::fmt::{Display, Formatter};
use std::path::Path;
use std::path::{Path, PathBuf};
use std::str::FromStr;
use serde::{de, Deserialize, Deserializer, Serialize, Serializer};
@ -30,6 +30,8 @@ pub struct UnnamedRequirement {
/// `requests [security,tests] >= 2.8.1, == 2.8.* ; python_version > "3.8"`.
/// Those are a nested and/or tree.
pub marker: Option<MarkerTree>,
/// The source file containing the requirement.
pub path: Option<PathBuf>,
}
impl UnnamedRequirement {
@ -41,6 +43,12 @@ impl UnnamedRequirement {
true
}
}
/// Set the source file containing the requirement.
#[must_use]
pub fn with_source(self, path: Option<PathBuf>) -> Self {
Self { path, ..self }
}
}
impl Display for UnnamedRequirement {
@ -159,6 +167,7 @@ fn parse_unnamed_requirement(
url,
extras,
marker,
path: None,
})
}

View File

@ -167,6 +167,9 @@ pub struct EditableRequirement {
pub url: VerbatimUrl,
pub extras: Vec<ExtraName>,
pub path: PathBuf,
/// Path of the original file (where existing)
pub source: Option<PathBuf>,
}
impl EditableRequirement {
@ -191,6 +194,7 @@ impl EditableRequirement {
/// We disallow URLs with schemes other than `file://` (e.g., `https://...`).
pub fn parse(
given: &str,
source: Option<&Path>,
working_dir: impl AsRef<Path>,
) -> Result<Self, RequirementsTxtParserError> {
// Identify the extras.
@ -265,7 +269,12 @@ impl EditableRequirement {
// Add the verbatim representation of the URL to the `VerbatimUrl`.
let url = url.with_given(requirement.to_string());
Ok(Self { url, extras, path })
Ok(Self {
url,
extras,
path,
source: source.map(Path::to_path_buf),
})
}
/// Identify the extras in an editable URL (e.g., `../editable[dev]`).
@ -305,6 +314,8 @@ pub struct RequirementEntry {
pub requirement: RequirementsTxtRequirement,
/// Hashes of the downloadable packages.
pub hashes: Vec<String>,
/// Path of the original file (where existing)
pub path: Option<String>,
}
// We place the impl here instead of next to `UnresolvedRequirementSpecification` because
@ -324,6 +335,7 @@ impl TryFrom<RequirementEntry> for UnresolvedRequirementSpecification {
}
},
hashes: value.hashes,
path: value.path,
})
}
}
@ -402,12 +414,18 @@ impl RequirementsTxt {
})?;
let requirements_dir = requirements_txt.parent().unwrap_or(working_dir);
let data = Self::parse_inner(&content, working_dir, requirements_dir, client_builder)
.await
.map_err(|err| RequirementsTxtFileError {
file: requirements_txt.to_path_buf(),
error: err,
})?;
let data = Self::parse_inner(
&content,
working_dir,
requirements_dir,
client_builder,
requirements_txt,
)
.await
.map_err(|err| RequirementsTxtFileError {
file: requirements_txt.to_path_buf(),
error: err,
})?;
if data == Self::default() {
warn_user!(
"Requirements file {} does not contain any dependencies",
@ -429,11 +447,12 @@ impl RequirementsTxt {
working_dir: &Path,
requirements_dir: &Path,
client_builder: &BaseClientBuilder<'_>,
requirements_txt: &Path,
) -> Result<Self, RequirementsTxtParserError> {
let mut s = Scanner::new(content);
let mut data = Self::default();
while let Some(statement) = parse_entry(&mut s, content, working_dir)? {
while let Some(statement) = parse_entry(&mut s, content, working_dir, requirements_txt)? {
match statement {
RequirementsTxtStatement::Requirements {
filename,
@ -511,7 +530,9 @@ impl RequirementsTxt {
}
}
}
data.constraints.extend(sub_constraints.constraints);
for constraint in sub_constraints.constraints {
data.constraints.push(constraint);
}
}
RequirementsTxtStatement::RequirementEntry(requirement_entry) => {
data.requirements.push(requirement_entry);
@ -585,6 +606,7 @@ fn parse_entry(
s: &mut Scanner,
content: &str,
working_dir: &Path,
requirements_txt: &Path,
) -> Result<Option<RequirementsTxtStatement>, RequirementsTxtParserError> {
// Eat all preceding whitespace, this may run us to the end of file
eat_wrappable_whitespace(s);
@ -613,8 +635,9 @@ fn parse_entry(
}
} else if s.eat_if("-e") || s.eat_if("--editable") {
let path_or_url = parse_value(content, s, |c: char| !['\n', '\r', '#'].contains(&c))?;
let editable_requirement = EditableRequirement::parse(path_or_url, working_dir)
.map_err(|err| err.with_offset(start))?;
let editable_requirement =
EditableRequirement::parse(path_or_url, Some(requirements_txt), working_dir)
.map_err(|err| err.with_offset(start))?;
RequirementsTxtStatement::EditableRequirement(editable_requirement)
} else if s.eat_if("-i") || s.eat_if("--index-url") {
let given = parse_value(content, s, |c: char| !['\n', '\r', '#'].contains(&c))?;
@ -676,10 +699,17 @@ fn parse_entry(
})?;
RequirementsTxtStatement::OnlyBinary(NoBuild::from_arg(specifier))
} else if s.at(char::is_ascii_alphanumeric) || s.at(|char| matches!(char, '.' | '/' | '$')) {
let (requirement, hashes) = parse_requirement_and_hashes(s, content, working_dir)?;
let source = if requirements_txt == Path::new("-") {
None
} else {
Some(requirements_txt)
};
let (requirement, hashes) = parse_requirement_and_hashes(s, content, source, working_dir)?;
RequirementsTxtStatement::RequirementEntry(RequirementEntry {
requirement,
hashes,
path: requirements_txt.to_str().map(ToString::to_string),
})
} else if let Some(char) = s.peek() {
let (line, column) = calculate_row_column(content, s.cursor());
@ -738,6 +768,7 @@ fn eat_trailing_line(content: &str, s: &mut Scanner) -> Result<(), RequirementsT
fn parse_requirement_and_hashes(
s: &mut Scanner,
content: &str,
source: Option<&Path>,
working_dir: &Path,
) -> Result<(RequirementsTxtRequirement, Vec<String>), RequirementsTxtParserError> {
// PEP 508 requirement
@ -795,8 +826,9 @@ fn parse_requirement_and_hashes(
}
}
let requirement =
RequirementsTxtRequirement::parse(requirement, working_dir).map_err(|err| match err {
let requirement = RequirementsTxtRequirement::parse(requirement, working_dir)
.map(|requirement| requirement.with_source(source.map(Path::to_path_buf)))
.map_err(|err| match err {
RequirementsTxtRequirementError::ParsedUrl(err) => {
RequirementsTxtParserError::ParsedUrl {
source: err,
@ -1332,7 +1364,11 @@ mod test {
use crate::{calculate_row_column, EditableRequirement, RequirementsTxt};
fn workspace_test_data_dir() -> PathBuf {
PathBuf::from("./test-data").canonicalize().unwrap()
Path::new("./test-data").simple_canonicalize().unwrap()
}
fn safe_filter_path(path: &Path) -> String {
regex::escape(&path.simplified_display().to_string()).replace(r"\\", r"(\\\\|/)")
}
#[test_case(Path::new("basic.txt"))]
@ -1350,13 +1386,23 @@ mod test {
let working_dir = workspace_test_data_dir().join("requirements-txt");
let requirements_txt = working_dir.join(path);
let actual =
RequirementsTxt::parse(requirements_txt, &working_dir, &BaseClientBuilder::new())
.await
.unwrap();
let actual = RequirementsTxt::parse(
requirements_txt.clone(),
&working_dir,
&BaseClientBuilder::new(),
)
.await
.unwrap();
let snapshot = format!("parse-{}", path.to_string_lossy());
insta::assert_debug_snapshot!(snapshot, actual);
let filter_path = safe_filter_path(&working_dir);
let filters = vec![(filter_path.as_str(), "<REQUIREMENTS_DIR>"), (r"\\\\", "/")];
insta::with_settings!({
filters => filters,
}, {
insta::assert_debug_snapshot!(snapshot, actual);
});
}
#[test_case(Path::new("basic.txt"))]
@ -1401,7 +1447,14 @@ mod test {
.unwrap();
let snapshot = format!("line-endings-{}", path.to_string_lossy());
insta::assert_debug_snapshot!(snapshot, actual);
let filter_path = safe_filter_path(temp_dir.path());
let filters = vec![(filter_path.as_str(), "<REQUIREMENTS_DIR>"), (r"\\\\", "/")];
insta::with_settings!({
filters => filters,
}, {
insta::assert_debug_snapshot!(snapshot, actual);
});
}
#[cfg(unix)]
@ -1439,13 +1492,8 @@ mod test {
.unwrap();
let snapshot = format!("parse-windows-{}", path.to_string_lossy());
let pattern = regex::escape(
&working_dir
.simplified_display()
.to_string()
.replace('\\', "/"),
);
let filters = vec![(pattern.as_str(), "[WORKSPACE_DIR]")];
let filter_path = safe_filter_path(&working_dir);
let filters = vec![(filter_path.as_str(), "[WORKSPACE_DIR]"), (r"\\\\", "/")];
insta::with_settings!({
filters => filters
}, {
@ -1764,33 +1812,46 @@ mod test {
)
.await
.unwrap();
insta::assert_debug_snapshot!(requirements, @r###"
RequirementsTxt {
requirements: [
RequirementEntry {
requirement: Named(
Requirement {
name: PackageName(
"flask",
),
extras: [],
version_or_url: None,
marker: None,
},
),
hashes: [],
},
],
constraints: [],
editables: [],
index_url: None,
extra_index_urls: [],
find_links: [],
no_index: false,
no_binary: None,
only_binary: None,
}
"###);
let filter_path = safe_filter_path(temp_dir.path());
let filters = vec![(filter_path.as_str(), "<REQUIREMENTS_DIR>"), (r"\\\\", "/")];
insta::with_settings!({
filters => filters,
}, {
insta::assert_debug_snapshot!(requirements, @r###"
RequirementsTxt {
requirements: [
RequirementEntry {
requirement: Named(
Requirement {
name: PackageName(
"flask",
),
extras: [],
version_or_url: None,
marker: None,
path: Some(
"<REQUIREMENTS_DIR>/subdir/sibling.txt",
),
},
),
hashes: [],
path: Some(
"<REQUIREMENTS_DIR>/subdir/sibling.txt",
),
},
],
constraints: [],
editables: [],
index_url: None,
extra_index_urls: [],
find_links: [],
no_index: false,
no_binary: None,
only_binary: None,
}
"###);
});
Ok(())
}
@ -1818,39 +1879,52 @@ mod test {
)
.await
.unwrap();
insta::assert_debug_snapshot!(requirements, @r###"
RequirementsTxt {
requirements: [
RequirementEntry {
requirement: Named(
Requirement {
name: PackageName(
"flask",
),
extras: [],
version_or_url: None,
marker: None,
},
),
hashes: [],
},
],
constraints: [],
editables: [],
index_url: None,
extra_index_urls: [],
find_links: [],
no_index: false,
no_binary: Packages(
[
PackageName(
"flask",
),
let filter_path = safe_filter_path(temp_dir.path());
let filters = vec![(filter_path.as_str(), "<REQUIREMENTS_DIR>"), (r"\\\\", "/")];
insta::with_settings!({
filters => filters,
}, {
insta::assert_debug_snapshot!(requirements, @r###"
RequirementsTxt {
requirements: [
RequirementEntry {
requirement: Named(
Requirement {
name: PackageName(
"flask",
),
extras: [],
version_or_url: None,
marker: None,
path: Some(
"<REQUIREMENTS_DIR>/requirements.txt",
),
},
),
hashes: [],
path: Some(
"<REQUIREMENTS_DIR>/requirements.txt",
),
},
],
),
only_binary: None,
}
"###);
constraints: [],
editables: [],
index_url: None,
extra_index_urls: [],
find_links: [],
no_index: false,
no_binary: Packages(
[
PackageName(
"flask",
),
],
),
only_binary: None,
}
"###);
});
Ok(())
}
@ -1884,40 +1958,49 @@ mod test {
.await
.unwrap();
insta::assert_debug_snapshot!(requirements, @r###"
RequirementsTxt {
requirements: [],
constraints: [],
editables: [
EditableRequirement {
url: VerbatimUrl {
url: Url {
scheme: "file",
cannot_be_a_base: false,
username: "",
password: None,
host: None,
port: None,
path: "/foo/bar",
query: None,
fragment: None,
let filter_path = safe_filter_path(temp_dir.path());
let filters = vec![(filter_path.as_str(), "<REQUIREMENTS_DIR>"), (r"\\\\", "/")];
insta::with_settings!({
filters => filters,
}, {
insta::assert_debug_snapshot!(requirements, @r###"
RequirementsTxt {
requirements: [],
constraints: [],
editables: [
EditableRequirement {
url: VerbatimUrl {
url: Url {
scheme: "file",
cannot_be_a_base: false,
username: "",
password: None,
host: None,
port: None,
path: "/foo/bar",
query: None,
fragment: None,
},
given: Some(
"/foo/bar",
),
},
given: Some(
"/foo/bar",
extras: [],
path: "/foo/bar",
source: Some(
"<REQUIREMENTS_DIR>/grandchild.txt",
),
},
extras: [],
path: "/foo/bar",
},
],
index_url: None,
extra_index_urls: [],
find_links: [],
no_index: true,
no_binary: None,
only_binary: None,
}
"###);
],
index_url: None,
extra_index_urls: [],
find_links: [],
no_index: true,
no_binary: None,
only_binary: None,
}
"###);
});
Ok(())
}
@ -2000,154 +2083,190 @@ mod test {
)
.await
.unwrap();
insta::assert_debug_snapshot!(requirements, @r###"
RequirementsTxt {
requirements: [
RequirementEntry {
requirement: Named(
Requirement {
name: PackageName(
"httpx",
),
extras: [],
version_or_url: None,
marker: None,
},
),
hashes: [],
},
RequirementEntry {
requirement: Named(
Requirement {
name: PackageName(
"flask",
),
extras: [],
version_or_url: Some(
VersionSpecifier(
VersionSpecifiers(
[
VersionSpecifier {
operator: Equal,
version: "3.0.0",
},
],
),
let filter_path = safe_filter_path(temp_dir.path());
let filters = vec![(filter_path.as_str(), "<REQUIREMENTS_DIR>"), (r"\\\\", "/")];
insta::with_settings!({
filters => filters,
}, {
insta::assert_debug_snapshot!(requirements, @r###"
RequirementsTxt {
requirements: [
RequirementEntry {
requirement: Named(
Requirement {
name: PackageName(
"httpx",
),
),
marker: None,
},
),
hashes: [
"sha256:1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef",
],
},
RequirementEntry {
requirement: Named(
Requirement {
name: PackageName(
"requests",
),
extras: [],
version_or_url: Some(
VersionSpecifier(
VersionSpecifiers(
[
VersionSpecifier {
operator: Equal,
version: "2.26.0",
},
],
),
extras: [],
version_or_url: None,
marker: None,
path: Some(
"<REQUIREMENTS_DIR>/./sibling.txt",
),
),
marker: None,
},
),
hashes: [
"sha256:fedcba0987654321fedcba0987654321fedcba0987654321fedcba0987654321",
],
},
RequirementEntry {
requirement: Named(
Requirement {
name: PackageName(
"black",
),
extras: [],
version_or_url: Some(
VersionSpecifier(
VersionSpecifiers(
[
VersionSpecifier {
operator: Equal,
version: "21.12b0",
},
],
),
),
),
marker: None,
},
),
hashes: [],
},
RequirementEntry {
requirement: Named(
Requirement {
name: PackageName(
"mypy",
),
extras: [],
version_or_url: Some(
VersionSpecifier(
VersionSpecifiers(
[
VersionSpecifier {
operator: Equal,
version: "0.910",
},
],
),
),
),
marker: None,
},
),
hashes: [],
},
],
constraints: [],
editables: [],
index_url: Some(
VerbatimUrl {
url: Url {
scheme: "https",
cannot_be_a_base: false,
username: "",
password: None,
host: Some(
Domain(
"test.pypi.org",
),
},
),
hashes: [],
path: Some(
"<REQUIREMENTS_DIR>/./sibling.txt",
),
port: None,
path: "/simple/",
query: None,
fragment: None,
},
given: Some(
"https://test.pypi.org/simple/",
),
},
),
extra_index_urls: [],
find_links: [],
no_index: false,
no_binary: All,
only_binary: None,
}
"###);
RequirementEntry {
requirement: Named(
Requirement {
name: PackageName(
"flask",
),
extras: [],
version_or_url: Some(
VersionSpecifier(
VersionSpecifiers(
[
VersionSpecifier {
operator: Equal,
version: "3.0.0",
},
],
),
),
),
marker: None,
path: Some(
"<REQUIREMENTS_DIR>/requirements.txt",
),
},
),
hashes: [
"sha256:1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef",
],
path: Some(
"<REQUIREMENTS_DIR>/requirements.txt",
),
},
RequirementEntry {
requirement: Named(
Requirement {
name: PackageName(
"requests",
),
extras: [],
version_or_url: Some(
VersionSpecifier(
VersionSpecifiers(
[
VersionSpecifier {
operator: Equal,
version: "2.26.0",
},
],
),
),
),
marker: None,
path: Some(
"<REQUIREMENTS_DIR>/requirements.txt",
),
},
),
hashes: [
"sha256:fedcba0987654321fedcba0987654321fedcba0987654321fedcba0987654321",
],
path: Some(
"<REQUIREMENTS_DIR>/requirements.txt",
),
},
RequirementEntry {
requirement: Named(
Requirement {
name: PackageName(
"black",
),
extras: [],
version_or_url: Some(
VersionSpecifier(
VersionSpecifiers(
[
VersionSpecifier {
operator: Equal,
version: "21.12b0",
},
],
),
),
),
marker: None,
path: Some(
"<REQUIREMENTS_DIR>/requirements.txt",
),
},
),
hashes: [],
path: Some(
"<REQUIREMENTS_DIR>/requirements.txt",
),
},
RequirementEntry {
requirement: Named(
Requirement {
name: PackageName(
"mypy",
),
extras: [],
version_or_url: Some(
VersionSpecifier(
VersionSpecifiers(
[
VersionSpecifier {
operator: Equal,
version: "0.910",
},
],
),
),
),
marker: None,
path: Some(
"<REQUIREMENTS_DIR>/requirements.txt",
),
},
),
hashes: [],
path: Some(
"<REQUIREMENTS_DIR>/requirements.txt",
),
},
],
constraints: [],
editables: [],
index_url: Some(
VerbatimUrl {
url: Url {
scheme: "https",
cannot_be_a_base: false,
username: "",
password: None,
host: Some(
Domain(
"test.pypi.org",
),
),
port: None,
path: "/simple/",
query: None,
fragment: None,
},
given: Some(
"https://test.pypi.org/simple/",
),
},
),
extra_index_urls: [],
find_links: [],
no_index: false,
no_binary: All,
only_binary: None,
}
"###);
});
Ok(())
}

View File

@ -1,4 +1,4 @@
use std::path::Path;
use std::path::{Path, PathBuf};
use thiserror::Error;
@ -18,6 +18,17 @@ pub enum RequirementsTxtRequirement {
Unnamed(UnnamedRequirement),
}
impl RequirementsTxtRequirement {
/// Set the source file containing the requirement.
#[must_use]
pub fn with_source(self, path: Option<PathBuf>) -> Self {
match self {
Self::Named(requirement) => Self::Named(requirement.with_source(path)),
Self::Unnamed(requirement) => Self::Unnamed(requirement.with_source(path)),
}
}
}
#[derive(Debug, Error)]
pub enum RequirementsTxtRequirementError {
#[error(transparent)]

View File

@ -24,9 +24,15 @@ RequirementsTxt {
),
),
marker: None,
path: Some(
"<REQUIREMENTS_DIR>/basic.txt",
),
},
),
hashes: [],
path: Some(
"<REQUIREMENTS_DIR>/basic.txt",
),
},
RequirementEntry {
requirement: Named(
@ -48,9 +54,15 @@ RequirementsTxt {
),
),
marker: None,
path: Some(
"<REQUIREMENTS_DIR>/basic.txt",
),
},
),
hashes: [],
path: Some(
"<REQUIREMENTS_DIR>/basic.txt",
),
},
RequirementEntry {
requirement: Named(
@ -72,9 +84,15 @@ RequirementsTxt {
),
),
marker: None,
path: Some(
"<REQUIREMENTS_DIR>/basic.txt",
),
},
),
hashes: [],
path: Some(
"<REQUIREMENTS_DIR>/basic.txt",
),
},
RequirementEntry {
requirement: Named(
@ -96,9 +114,15 @@ RequirementsTxt {
),
),
marker: None,
path: Some(
"<REQUIREMENTS_DIR>/basic.txt",
),
},
),
hashes: [],
path: Some(
"<REQUIREMENTS_DIR>/basic.txt",
),
},
RequirementEntry {
requirement: Named(
@ -120,9 +144,15 @@ RequirementsTxt {
),
),
marker: None,
path: Some(
"<REQUIREMENTS_DIR>/basic.txt",
),
},
),
hashes: [],
path: Some(
"<REQUIREMENTS_DIR>/basic.txt",
),
},
RequirementEntry {
requirement: Named(
@ -144,9 +174,15 @@ RequirementsTxt {
),
),
marker: None,
path: Some(
"<REQUIREMENTS_DIR>/basic.txt",
),
},
),
hashes: [],
path: Some(
"<REQUIREMENTS_DIR>/basic.txt",
),
},
],
constraints: [],

View File

@ -24,9 +24,15 @@ RequirementsTxt {
),
),
marker: None,
path: Some(
"<REQUIREMENTS_DIR>/constraints-a.txt",
),
},
),
hashes: [],
path: Some(
"<REQUIREMENTS_DIR>/constraints-a.txt",
),
},
],
constraints: [
@ -48,6 +54,9 @@ RequirementsTxt {
),
),
marker: None,
path: Some(
"<REQUIREMENTS_DIR>/constraints-b.txt",
),
},
Requirement {
name: PackageName(
@ -67,6 +76,9 @@ RequirementsTxt {
),
),
marker: None,
path: Some(
"<REQUIREMENTS_DIR>/constraints-b.txt",
),
},
],
editables: [],

View File

@ -24,9 +24,15 @@ RequirementsTxt {
),
),
marker: None,
path: Some(
"<REQUIREMENTS_DIR>/constraints-b.txt",
),
},
),
hashes: [],
path: Some(
"<REQUIREMENTS_DIR>/constraints-b.txt",
),
},
RequirementEntry {
requirement: Named(
@ -48,9 +54,15 @@ RequirementsTxt {
),
),
marker: None,
path: Some(
"<REQUIREMENTS_DIR>/constraints-b.txt",
),
},
),
hashes: [],
path: Some(
"<REQUIREMENTS_DIR>/constraints-b.txt",
),
},
],
constraints: [],

View File

@ -13,9 +13,15 @@ RequirementsTxt {
extras: [],
version_or_url: None,
marker: None,
path: Some(
"<REQUIREMENTS_DIR>/editable.txt",
),
},
),
hashes: [],
path: Some(
"<REQUIREMENTS_DIR>/editable.txt",
),
},
RequirementEntry {
requirement: Named(
@ -53,9 +59,15 @@ RequirementsTxt {
),
),
marker: None,
path: Some(
"<REQUIREMENTS_DIR>/editable.txt",
),
},
),
hashes: [],
path: Some(
"<REQUIREMENTS_DIR>/editable.txt",
),
},
],
constraints: [],

View File

@ -24,9 +24,15 @@ RequirementsTxt {
),
),
marker: None,
path: Some(
"<REQUIREMENTS_DIR>/for-poetry.txt",
),
},
),
hashes: [],
path: Some(
"<REQUIREMENTS_DIR>/for-poetry.txt",
),
},
RequirementEntry {
requirement: Named(
@ -48,9 +54,15 @@ RequirementsTxt {
),
),
marker: None,
path: Some(
"<REQUIREMENTS_DIR>/for-poetry.txt",
),
},
),
hashes: [],
path: Some(
"<REQUIREMENTS_DIR>/for-poetry.txt",
),
},
RequirementEntry {
requirement: Named(
@ -61,9 +73,15 @@ RequirementsTxt {
extras: [],
version_or_url: None,
marker: None,
path: Some(
"<REQUIREMENTS_DIR>/for-poetry.txt",
),
},
),
hashes: [],
path: Some(
"<REQUIREMENTS_DIR>/for-poetry.txt",
),
},
RequirementEntry {
requirement: Named(
@ -93,9 +111,15 @@ RequirementsTxt {
),
),
marker: None,
path: Some(
"<REQUIREMENTS_DIR>/for-poetry.txt",
),
},
),
hashes: [],
path: Some(
"<REQUIREMENTS_DIR>/for-poetry.txt",
),
},
],
constraints: [],

View File

@ -13,9 +13,15 @@ RequirementsTxt {
extras: [],
version_or_url: None,
marker: None,
path: Some(
"<REQUIREMENTS_DIR>/include-b.txt",
),
},
),
hashes: [],
path: Some(
"<REQUIREMENTS_DIR>/include-b.txt",
),
},
RequirementEntry {
requirement: Named(
@ -37,9 +43,15 @@ RequirementsTxt {
),
),
marker: None,
path: Some(
"<REQUIREMENTS_DIR>/include-a.txt",
),
},
),
hashes: [],
path: Some(
"<REQUIREMENTS_DIR>/include-a.txt",
),
},
],
constraints: [],

View File

@ -13,9 +13,15 @@ RequirementsTxt {
extras: [],
version_or_url: None,
marker: None,
path: Some(
"<REQUIREMENTS_DIR>/include-b.txt",
),
},
),
hashes: [],
path: Some(
"<REQUIREMENTS_DIR>/include-b.txt",
),
},
],
constraints: [],

View File

@ -51,11 +51,17 @@ RequirementsTxt {
],
),
),
path: Some(
"<REQUIREMENTS_DIR>/poetry-with-hashes.txt",
),
},
),
hashes: [
"sha256:2e1ccc9417d4da358b9de6f174e3ac094391ea1d4fbef2d667865d819dfd0afe",
],
path: Some(
"<REQUIREMENTS_DIR>/poetry-with-hashes.txt",
),
},
RequirementEntry {
requirement: Named(
@ -104,11 +110,17 @@ RequirementsTxt {
],
),
),
path: Some(
"<REQUIREMENTS_DIR>/poetry-with-hashes.txt",
),
},
),
hashes: [
"sha256:8a388717b9476f934a21484e8c8e61875ab60644d29b9b39e11e4b9dc1c6b305",
],
path: Some(
"<REQUIREMENTS_DIR>/poetry-with-hashes.txt",
),
},
RequirementEntry {
requirement: Named(
@ -168,11 +180,17 @@ RequirementsTxt {
],
),
),
path: Some(
"<REQUIREMENTS_DIR>/poetry-with-hashes.txt",
),
},
),
hashes: [
"sha256:e4d039def5768a47e4afec8e89e83ec3ae5a26bf00ad851f914d1240b444d2b1",
],
path: Some(
"<REQUIREMENTS_DIR>/poetry-with-hashes.txt",
),
},
RequirementEntry {
requirement: Named(
@ -221,12 +239,18 @@ RequirementsTxt {
],
),
),
path: Some(
"<REQUIREMENTS_DIR>/poetry-with-hashes.txt",
),
},
),
hashes: [
"sha256:2577c501a2fb8d05a304c09d090d6e47c306fef15809d102b327cf8364bddab5",
"sha256:75beac4a47881eeb94d5ea5d6ad31ef88856affe2332b9aafb52c6452ccf0d7a",
],
path: Some(
"<REQUIREMENTS_DIR>/poetry-with-hashes.txt",
),
},
RequirementEntry {
requirement: Named(
@ -275,6 +299,9 @@ RequirementsTxt {
],
),
),
path: Some(
"<REQUIREMENTS_DIR>/poetry-with-hashes.txt",
),
},
),
hashes: [
@ -283,6 +310,9 @@ RequirementsTxt {
"sha256:1a5c7d7d577e0eabfcf15eb87d1e19314c8c4f0e722a301f98e0e3a65e238b4e",
"sha256:1e5a38aa85bd660c53947bd28aeaafb6a97d70423606f1ccb044a03a1203fe4a",
],
path: Some(
"<REQUIREMENTS_DIR>/poetry-with-hashes.txt",
),
},
],
constraints: [],

View File

@ -24,9 +24,15 @@ RequirementsTxt {
),
),
marker: None,
path: Some(
"<REQUIREMENTS_DIR>/small.txt",
),
},
),
hashes: [],
path: Some(
"<REQUIREMENTS_DIR>/small.txt",
),
},
RequirementEntry {
requirement: Named(
@ -48,9 +54,15 @@ RequirementsTxt {
),
),
marker: None,
path: Some(
"<REQUIREMENTS_DIR>/small.txt",
),
},
),
hashes: [],
path: Some(
"<REQUIREMENTS_DIR>/small.txt",
),
},
],
constraints: [],

View File

@ -13,9 +13,15 @@ RequirementsTxt {
extras: [],
version_or_url: None,
marker: None,
path: Some(
"<REQUIREMENTS_DIR>/whitespace.txt",
),
},
),
hashes: [],
path: Some(
"<REQUIREMENTS_DIR>/whitespace.txt",
),
},
RequirementEntry {
requirement: Named(
@ -53,9 +59,15 @@ RequirementsTxt {
),
),
marker: None,
path: Some(
"<REQUIREMENTS_DIR>/whitespace.txt",
),
},
),
hashes: [],
path: Some(
"<REQUIREMENTS_DIR>/whitespace.txt",
),
},
],
constraints: [],

View File

@ -24,9 +24,15 @@ RequirementsTxt {
),
),
marker: None,
path: Some(
"<REQUIREMENTS_DIR>/basic.txt",
),
},
),
hashes: [],
path: Some(
"<REQUIREMENTS_DIR>/basic.txt",
),
},
RequirementEntry {
requirement: Named(
@ -48,9 +54,15 @@ RequirementsTxt {
),
),
marker: None,
path: Some(
"<REQUIREMENTS_DIR>/basic.txt",
),
},
),
hashes: [],
path: Some(
"<REQUIREMENTS_DIR>/basic.txt",
),
},
RequirementEntry {
requirement: Named(
@ -72,9 +84,15 @@ RequirementsTxt {
),
),
marker: None,
path: Some(
"<REQUIREMENTS_DIR>/basic.txt",
),
},
),
hashes: [],
path: Some(
"<REQUIREMENTS_DIR>/basic.txt",
),
},
RequirementEntry {
requirement: Named(
@ -96,9 +114,15 @@ RequirementsTxt {
),
),
marker: None,
path: Some(
"<REQUIREMENTS_DIR>/basic.txt",
),
},
),
hashes: [],
path: Some(
"<REQUIREMENTS_DIR>/basic.txt",
),
},
RequirementEntry {
requirement: Named(
@ -120,9 +144,15 @@ RequirementsTxt {
),
),
marker: None,
path: Some(
"<REQUIREMENTS_DIR>/basic.txt",
),
},
),
hashes: [],
path: Some(
"<REQUIREMENTS_DIR>/basic.txt",
),
},
RequirementEntry {
requirement: Named(
@ -144,9 +174,15 @@ RequirementsTxt {
),
),
marker: None,
path: Some(
"<REQUIREMENTS_DIR>/basic.txt",
),
},
),
hashes: [],
path: Some(
"<REQUIREMENTS_DIR>/basic.txt",
),
},
],
constraints: [],

View File

@ -24,9 +24,15 @@ RequirementsTxt {
),
),
marker: None,
path: Some(
"<REQUIREMENTS_DIR>/constraints-a.txt",
),
},
),
hashes: [],
path: Some(
"<REQUIREMENTS_DIR>/constraints-a.txt",
),
},
],
constraints: [
@ -48,6 +54,9 @@ RequirementsTxt {
),
),
marker: None,
path: Some(
"<REQUIREMENTS_DIR>/constraints-b.txt",
),
},
Requirement {
name: PackageName(
@ -67,6 +76,9 @@ RequirementsTxt {
),
),
marker: None,
path: Some(
"<REQUIREMENTS_DIR>/constraints-b.txt",
),
},
],
editables: [],

View File

@ -24,9 +24,15 @@ RequirementsTxt {
),
),
marker: None,
path: Some(
"<REQUIREMENTS_DIR>/constraints-b.txt",
),
},
),
hashes: [],
path: Some(
"<REQUIREMENTS_DIR>/constraints-b.txt",
),
},
RequirementEntry {
requirement: Named(
@ -48,9 +54,15 @@ RequirementsTxt {
),
),
marker: None,
path: Some(
"<REQUIREMENTS_DIR>/constraints-b.txt",
),
},
),
hashes: [],
path: Some(
"<REQUIREMENTS_DIR>/constraints-b.txt",
),
},
],
constraints: [],

View File

@ -24,9 +24,15 @@ RequirementsTxt {
),
),
marker: None,
path: Some(
"<REQUIREMENTS_DIR>/for-poetry.txt",
),
},
),
hashes: [],
path: Some(
"<REQUIREMENTS_DIR>/for-poetry.txt",
),
},
RequirementEntry {
requirement: Named(
@ -48,9 +54,15 @@ RequirementsTxt {
),
),
marker: None,
path: Some(
"<REQUIREMENTS_DIR>/for-poetry.txt",
),
},
),
hashes: [],
path: Some(
"<REQUIREMENTS_DIR>/for-poetry.txt",
),
},
RequirementEntry {
requirement: Named(
@ -61,9 +73,15 @@ RequirementsTxt {
extras: [],
version_or_url: None,
marker: None,
path: Some(
"<REQUIREMENTS_DIR>/for-poetry.txt",
),
},
),
hashes: [],
path: Some(
"<REQUIREMENTS_DIR>/for-poetry.txt",
),
},
RequirementEntry {
requirement: Named(
@ -93,9 +111,15 @@ RequirementsTxt {
),
),
marker: None,
path: Some(
"<REQUIREMENTS_DIR>/for-poetry.txt",
),
},
),
hashes: [],
path: Some(
"<REQUIREMENTS_DIR>/for-poetry.txt",
),
},
],
constraints: [],

View File

@ -13,9 +13,15 @@ RequirementsTxt {
extras: [],
version_or_url: None,
marker: None,
path: Some(
"<REQUIREMENTS_DIR>/include-b.txt",
),
},
),
hashes: [],
path: Some(
"<REQUIREMENTS_DIR>/include-b.txt",
),
},
RequirementEntry {
requirement: Named(
@ -37,9 +43,15 @@ RequirementsTxt {
),
),
marker: None,
path: Some(
"<REQUIREMENTS_DIR>/include-a.txt",
),
},
),
hashes: [],
path: Some(
"<REQUIREMENTS_DIR>/include-a.txt",
),
},
],
constraints: [],

View File

@ -13,9 +13,15 @@ RequirementsTxt {
extras: [],
version_or_url: None,
marker: None,
path: Some(
"<REQUIREMENTS_DIR>/include-b.txt",
),
},
),
hashes: [],
path: Some(
"<REQUIREMENTS_DIR>/include-b.txt",
),
},
],
constraints: [],

View File

@ -51,11 +51,17 @@ RequirementsTxt {
],
),
),
path: Some(
"<REQUIREMENTS_DIR>/poetry-with-hashes.txt",
),
},
),
hashes: [
"sha256:2e1ccc9417d4da358b9de6f174e3ac094391ea1d4fbef2d667865d819dfd0afe",
],
path: Some(
"<REQUIREMENTS_DIR>/poetry-with-hashes.txt",
),
},
RequirementEntry {
requirement: Named(
@ -104,11 +110,17 @@ RequirementsTxt {
],
),
),
path: Some(
"<REQUIREMENTS_DIR>/poetry-with-hashes.txt",
),
},
),
hashes: [
"sha256:8a388717b9476f934a21484e8c8e61875ab60644d29b9b39e11e4b9dc1c6b305",
],
path: Some(
"<REQUIREMENTS_DIR>/poetry-with-hashes.txt",
),
},
RequirementEntry {
requirement: Named(
@ -168,11 +180,17 @@ RequirementsTxt {
],
),
),
path: Some(
"<REQUIREMENTS_DIR>/poetry-with-hashes.txt",
),
},
),
hashes: [
"sha256:e4d039def5768a47e4afec8e89e83ec3ae5a26bf00ad851f914d1240b444d2b1",
],
path: Some(
"<REQUIREMENTS_DIR>/poetry-with-hashes.txt",
),
},
RequirementEntry {
requirement: Named(
@ -221,12 +239,18 @@ RequirementsTxt {
],
),
),
path: Some(
"<REQUIREMENTS_DIR>/poetry-with-hashes.txt",
),
},
),
hashes: [
"sha256:2577c501a2fb8d05a304c09d090d6e47c306fef15809d102b327cf8364bddab5",
"sha256:75beac4a47881eeb94d5ea5d6ad31ef88856affe2332b9aafb52c6452ccf0d7a",
],
path: Some(
"<REQUIREMENTS_DIR>/poetry-with-hashes.txt",
),
},
RequirementEntry {
requirement: Named(
@ -275,6 +299,9 @@ RequirementsTxt {
],
),
),
path: Some(
"<REQUIREMENTS_DIR>/poetry-with-hashes.txt",
),
},
),
hashes: [
@ -283,6 +310,9 @@ RequirementsTxt {
"sha256:1a5c7d7d577e0eabfcf15eb87d1e19314c8c4f0e722a301f98e0e3a65e238b4e",
"sha256:1e5a38aa85bd660c53947bd28aeaafb6a97d70423606f1ccb044a03a1203fe4a",
],
path: Some(
"<REQUIREMENTS_DIR>/poetry-with-hashes.txt",
),
},
],
constraints: [],

View File

@ -24,9 +24,15 @@ RequirementsTxt {
),
),
marker: None,
path: Some(
"<REQUIREMENTS_DIR>/small.txt",
),
},
),
hashes: [],
path: Some(
"<REQUIREMENTS_DIR>/small.txt",
),
},
RequirementEntry {
requirement: Named(
@ -48,9 +54,15 @@ RequirementsTxt {
),
),
marker: None,
path: Some(
"<REQUIREMENTS_DIR>/small.txt",
),
},
),
hashes: [],
path: Some(
"<REQUIREMENTS_DIR>/small.txt",
),
},
],
constraints: [],

View File

@ -25,9 +25,15 @@ RequirementsTxt {
},
extras: [],
marker: None,
path: Some(
"[WORKSPACE_DIR]/bare-url.txt",
),
},
),
hashes: [],
path: Some(
"[WORKSPACE_DIR]/bare-url.txt",
),
},
RequirementEntry {
requirement: Unnamed(
@ -54,9 +60,15 @@ RequirementsTxt {
),
],
marker: None,
path: Some(
"[WORKSPACE_DIR]/bare-url.txt",
),
},
),
hashes: [],
path: Some(
"[WORKSPACE_DIR]/bare-url.txt",
),
},
RequirementEntry {
requirement: Unnamed(
@ -79,9 +91,15 @@ RequirementsTxt {
},
extras: [],
marker: None,
path: Some(
"[WORKSPACE_DIR]/bare-url.txt",
),
},
),
hashes: [],
path: Some(
"[WORKSPACE_DIR]/bare-url.txt",
),
},
],
constraints: [],

View File

@ -13,9 +13,15 @@ RequirementsTxt {
extras: [],
version_or_url: None,
marker: None,
path: Some(
"<REQUIREMENTS_DIR>/whitespace.txt",
),
},
),
hashes: [],
path: Some(
"<REQUIREMENTS_DIR>/whitespace.txt",
),
},
RequirementEntry {
requirement: Named(
@ -53,9 +59,15 @@ RequirementsTxt {
),
),
marker: None,
path: Some(
"<REQUIREMENTS_DIR>/whitespace.txt",
),
},
),
hashes: [],
path: Some(
"<REQUIREMENTS_DIR>/whitespace.txt",
),
},
],
constraints: [],

View File

@ -25,9 +25,15 @@ RequirementsTxt {
},
extras: [],
marker: None,
path: Some(
"[WORKSPACE_DIR]/bare-url.txt",
),
},
),
hashes: [],
path: Some(
"[WORKSPACE_DIR]/bare-url.txt",
),
},
RequirementEntry {
requirement: Unnamed(
@ -54,9 +60,15 @@ RequirementsTxt {
),
],
marker: None,
path: Some(
"[WORKSPACE_DIR]/bare-url.txt",
),
},
),
hashes: [],
path: Some(
"[WORKSPACE_DIR]/bare-url.txt",
),
},
RequirementEntry {
requirement: Unnamed(
@ -79,9 +91,15 @@ RequirementsTxt {
},
extras: [],
marker: None,
path: Some(
"[WORKSPACE_DIR]/bare-url.txt",
),
},
),
hashes: [],
path: Some(
"[WORKSPACE_DIR]/bare-url.txt",
),
},
],
constraints: [],

View File

@ -132,6 +132,7 @@ pub(crate) async fn resolve_many(args: ResolveManyArgs) -> Result<()> {
extras: requirement.extras,
version_or_url: Some(equals_version),
marker: None,
path: requirement.path,
}
} else {
requirement

View File

@ -3,11 +3,16 @@ use std::path::{Component, Path, PathBuf};
use once_cell::sync::Lazy;
pub static CWD: Lazy<PathBuf> = Lazy::new(|| {
/// The current working directory.
pub static CWD: Lazy<PathBuf> =
Lazy::new(|| std::env::current_dir().expect("The current directory must exist"));
/// The current working directory, canonicalized.
pub static CANONICAL_CWD: Lazy<PathBuf> = Lazy::new(|| {
std::env::current_dir()
.unwrap()
.canonicalize()
.expect("The current directory must exist")
.canonicalize()
.expect("The current directory must be canonicalized")
});
pub trait Simplified {
@ -22,6 +27,9 @@ pub trait Simplified {
/// equivalent to [`std::path::Display`].
fn simplified_display(&self) -> std::path::Display;
/// Canonicalize a path without a `\\?\` prefix on Windows.
fn simple_canonicalize(&self) -> std::io::Result<PathBuf>;
/// Render a [`Path`] for user-facing display.
///
/// Like [`simplified_display`], but relativizes the path against the current working directory.
@ -37,10 +45,20 @@ impl<T: AsRef<Path>> Simplified for T {
dunce::simplified(self.as_ref()).display()
}
fn simple_canonicalize(&self) -> std::io::Result<PathBuf> {
dunce::canonicalize(self.as_ref())
}
fn user_display(&self) -> std::path::Display {
let path = dunce::simplified(self.as_ref());
// Attempt to strip the current working directory, then the canonicalized current working
// directory, in case they differ.
path.strip_prefix(CWD.simplified())
.unwrap_or(path)
.unwrap_or_else(|_| {
path.strip_prefix(CANONICAL_CWD.simplified())
.unwrap_or(path)
})
.display()
}
}
@ -136,7 +154,7 @@ pub fn normalize_path(path: &Path) -> Result<PathBuf, std::io::Error> {
pub fn absolutize_path(path: &Path) -> Result<Cow<Path>, std::io::Error> {
use path_absolutize::Absolutize;
path.absolutize_from(&*CWD)
path.absolutize_from(CWD.simplified())
}
/// Like `fs_err::canonicalize`, but with permissive failures on Windows.

View File

@ -342,6 +342,7 @@ impl<'a> SitePackages<'a> {
Requirement::from_pep508(dependency)?,
),
hashes: vec![],
path: None,
};
if seen.insert(dependency.clone()) {
stack.push(dependency);
@ -406,6 +407,7 @@ impl<'a> SitePackages<'a> {
Requirement::from_pep508(dependency)?,
),
hashes: vec![],
path: None,
};
if seen.insert(dependency.clone()) {
stack.push(dependency);

View File

@ -238,6 +238,7 @@ impl Pep621Metadata {
pub(crate) fn try_from(
pyproject: PyProjectToml,
extras: &ExtrasSpecification,
pyproject_path: &Path,
project_dir: &Path,
workspace_sources: &HashMap<PackageName, Source>,
workspace_packages: &HashMap<PackageName, String>,
@ -281,6 +282,7 @@ impl Pep621Metadata {
let requirements = lower_requirements(
&project.dependencies.unwrap_or_default(),
&project.optional_dependencies.unwrap_or_default(),
pyproject_path,
&project.name,
project_dir,
&project_sources.unwrap_or_default(),
@ -320,6 +322,7 @@ impl Pep621Metadata {
pub(crate) fn lower_requirements(
dependencies: &[String],
optional_dependencies: &IndexMap<ExtraName, Vec<String>>,
pyproject_path: &Path,
project_name: &PackageName,
project_dir: &Path,
project_sources: &HashMap<PackageName, Source>,
@ -330,7 +333,8 @@ pub(crate) fn lower_requirements(
let dependencies = dependencies
.iter()
.map(|dependency| {
let requirement = pep508_rs::Requirement::from_str(dependency)?;
let requirement = pep508_rs::Requirement::from_str(dependency)?
.with_source(Some(pyproject_path.to_path_buf()));
let name = requirement.name.clone();
lower_requirement(
requirement,
@ -350,7 +354,8 @@ pub(crate) fn lower_requirements(
let dependencies: Vec<_> = dependencies
.iter()
.map(|dependency| {
let requirement = pep508_rs::Requirement::from_str(dependency)?;
let requirement = pep508_rs::Requirement::from_str(dependency)?
.with_source(Some(pyproject_path.to_path_buf()));
let name = requirement.name.clone();
lower_requirement(
requirement,
@ -532,6 +537,7 @@ pub(crate) fn lower_requirement(
extras: requirement.extras,
marker: requirement.marker,
source,
path: Some(project_dir.join("pyproject.toml")),
})
}

View File

@ -71,6 +71,7 @@ impl RequirementsSpecification {
RequirementEntry {
requirement,
hashes: vec![],
path: None,
},
)?],
constraints: vec![],
@ -87,7 +88,7 @@ impl RequirementsSpecification {
}
}
RequirementsSource::Editable(name) => {
let requirement = EditableRequirement::parse(name, std::env::current_dir()?)
let requirement = EditableRequirement::parse(name, None, std::env::current_dir()?)
.with_context(|| format!("Failed to parse `{name}`"))?;
Self {
project: None,
@ -145,10 +146,6 @@ impl RequirementsSpecification {
}
RequirementsSource::PyprojectToml(path) => {
let contents = uv_fs::read_to_string(&path).await?;
// We need use this path as base for the relative paths inside pyproject.toml, so
// we need the absolute path instead of a potentially relative path. E.g. with
// `foo = { path = "../foo" }`, we will join `../foo` onto this path.
let path = uv_fs::absolutize_path(path)?;
Self::parse_direct_pyproject_toml(&contents, extras, path.as_ref(), preview)
.with_context(|| format!("Failed to parse `{}`", path.user_display()))?
}
@ -187,19 +184,25 @@ impl RequirementsSpecification {
pub(crate) fn parse_direct_pyproject_toml(
contents: &str,
extras: &ExtrasSpecification,
path: &Path,
pyproject_path: &Path,
preview: PreviewMode,
) -> Result<Self> {
let pyproject = toml::from_str::<PyProjectToml>(contents)?;
// We need use this path as base for the relative paths inside pyproject.toml, so
// we need the absolute path instead of a potentially relative path. E.g. with
// `foo = { path = "../foo" }`, we will join `../foo` onto this path.
let absolute_path = uv_fs::absolutize_path(pyproject_path)?;
let project_dir = absolute_path
.parent()
.context("`pyproject.toml` has no parent directory")?;
let workspace_sources = HashMap::default();
let workspace_packages = HashMap::default();
let project_dir = path
.parent()
.context("pyproject.toml has no parent directory")?;
match Pep621Metadata::try_from(
pyproject,
extras,
pyproject_path,
project_dir,
&workspace_sources,
&workspace_packages,
@ -221,11 +224,13 @@ impl RequirementsSpecification {
url,
path,
extras: requirement.extras,
source: Some(pyproject_path.to_path_buf()),
})
} else {
Either::Right(UnresolvedRequirementSpecification {
requirement: UnresolvedRequirement::Named(requirement),
hashes: vec![],
path: Some(pyproject_path.to_string_lossy().to_string()),
})
}
});
@ -239,8 +244,11 @@ impl RequirementsSpecification {
})
}
Ok(None) => {
debug!("Dynamic pyproject.toml at: `{}`", path.user_display());
let path = fs_err::canonicalize(path)?;
debug!(
"Dynamic pyproject.toml at: `{}`",
pyproject_path.user_display()
);
let path = fs_err::canonicalize(pyproject_path)?;
let source_tree = path.parent().ok_or_else(|| {
anyhow::anyhow!(
"The file `{}` appears to be a `pyproject.toml` file, which must be in a directory",

View File

@ -101,6 +101,7 @@ impl<'a, Context: BuildContext> NamedRequirementsResolver<'a, Context> {
extras: requirement.extras,
version_or_url: Some(VersionOrUrl::Url(requirement.url)),
marker: requirement.marker,
path: requirement.path,
});
}
@ -119,6 +120,7 @@ impl<'a, Context: BuildContext> NamedRequirementsResolver<'a, Context> {
extras: requirement.extras,
version_or_url: Some(VersionOrUrl::Url(requirement.url)),
marker: requirement.marker,
path: requirement.path,
});
}
@ -146,11 +148,13 @@ impl<'a, Context: BuildContext> NamedRequirementsResolver<'a, Context> {
extras: requirement.extras,
version_or_url: Some(VersionOrUrl::Url(requirement.url)),
marker: requirement.marker,
path: requirement.path,
});
}
// Attempt to read a `pyproject.toml` file.
if let Some(pyproject) = fs_err::read_to_string(path.join("pyproject.toml"))
let project_path = path.join("pyproject.toml");
if let Some(pyproject) = fs_err::read_to_string(&project_path)
.ok()
.and_then(|contents| toml::from_str::<PyProjectToml>(&contents).ok())
{
@ -166,6 +170,7 @@ impl<'a, Context: BuildContext> NamedRequirementsResolver<'a, Context> {
extras: requirement.extras,
version_or_url: Some(VersionOrUrl::Url(requirement.url)),
marker: requirement.marker,
path: Some(project_path.clone()),
});
}
@ -183,6 +188,7 @@ impl<'a, Context: BuildContext> NamedRequirementsResolver<'a, Context> {
extras: requirement.extras,
version_or_url: Some(VersionOrUrl::Url(requirement.url)),
marker: requirement.marker,
path: Some(project_path.clone()),
});
}
}
@ -211,6 +217,7 @@ impl<'a, Context: BuildContext> NamedRequirementsResolver<'a, Context> {
extras: requirement.extras,
version_or_url: Some(VersionOrUrl::Url(requirement.url)),
marker: requirement.marker,
path: requirement.path,
});
}
}
@ -269,6 +276,7 @@ impl<'a, Context: BuildContext> NamedRequirementsResolver<'a, Context> {
extras: requirement.extras,
version_or_url: Some(VersionOrUrl::Url(requirement.url)),
marker: requirement.marker,
path: requirement.path,
})
}
}

View File

@ -1,4 +1,5 @@
use std::borrow::Cow;
use std::collections::{BTreeMap, BTreeSet};
use std::hash::BuildHasherDefault;
use std::rc::Rc;
@ -14,7 +15,7 @@ use rustc_hash::{FxHashMap, FxHashSet};
use distribution_types::{
Dist, DistributionMetadata, IndexUrl, LocalEditable, Name, ParsedUrlError, Requirement,
ResolvedDist, Verbatim, VersionId, VersionOrUrlRef,
ResolvedDist, SourceAnnotation, Verbatim, VersionId, VersionOrUrlRef,
};
use once_map::OnceMap;
use pep440_rs::Version;
@ -543,12 +544,15 @@ pub struct DisplayResolutionGraph<'a> {
/// The style of annotation comments, used to indicate the dependencies that requested each
/// package.
annotation_style: AnnotationStyle,
/// External sources for each package: requirements, constraints, and overrides.
sources: BTreeMap<String, BTreeSet<SourceAnnotation>>,
}
impl<'a> From<&'a ResolutionGraph> for DisplayResolutionGraph<'a> {
fn from(resolution: &'a ResolutionGraph) -> Self {
Self::new(
resolution,
BTreeMap::default(),
&[],
false,
false,
@ -561,9 +565,10 @@ impl<'a> From<&'a ResolutionGraph> for DisplayResolutionGraph<'a> {
impl<'a> DisplayResolutionGraph<'a> {
/// Create a new [`DisplayResolutionGraph`] for the given graph.
#[allow(clippy::fn_params_excessive_bools)]
#[allow(clippy::fn_params_excessive_bools, clippy::too_many_arguments)]
pub fn new(
underlying: &'a ResolutionGraph,
sources: BTreeMap<String, BTreeSet<SourceAnnotation>>,
no_emit_packages: &'a [PackageName],
show_hashes: bool,
include_extras: bool,
@ -579,6 +584,7 @@ impl<'a> DisplayResolutionGraph<'a> {
include_annotations,
include_index_annotation,
annotation_style,
sources,
}
}
}
@ -719,13 +725,24 @@ impl std::fmt::Display for DisplayResolutionGraph<'_> {
.collect::<Vec<_>>();
edges.sort_unstable_by_key(|package| package.name());
// Include all external sources (e.g., requirements files).
let source_name: String = match node {
Node::Editable(_package_name, local_editable) => {
local_editable.url.given().unwrap_or_default().to_string()
}
Node::Distribution(name, _, _) => name.to_string(),
};
let source = self.sources.get(&source_name).cloned().unwrap_or_default();
match self.annotation_style {
AnnotationStyle::Line => {
if !edges.is_empty() {
let separator = if has_hashes { "\n " } else { " " };
let deps = edges
.into_iter()
.map(|dependency| dependency.name().to_string())
.map(|dependency| format!("{}", dependency.name()))
.chain(source.into_iter().map(|source| source.to_string()))
.collect::<Vec<_>>()
.join(", ");
let comment = format!("# via {deps}").green().to_string();
@ -733,17 +750,30 @@ impl std::fmt::Display for DisplayResolutionGraph<'_> {
}
}
AnnotationStyle::Split => match edges.as_slice() {
[] => {}
[edge] => {
[] if source.is_empty() => {}
[] if source.len() == 1 => {
let separator = "\n";
let comment = format!(" # via {}", source.iter().next().unwrap())
.green()
.to_string();
annotation = Some((separator, comment));
}
[edge] if source.is_empty() => {
let separator = "\n";
let comment = format!(" # via {}", edge.name()).green().to_string();
annotation = Some((separator, comment));
}
edges => {
let separator = "\n";
let deps = edges
.iter()
.map(|dependency| format!(" # {}", dependency.name()))
let deps = source
.into_iter()
.map(|source| source.to_string())
.chain(
edges
.iter()
.map(|dependency| format!("{}", dependency.name())),
)
.map(|name| format!(" # {name}"))
.collect::<Vec<_>>()
.join("\n");
let comment = format!(" # via\n{deps}").green().to_string();

View File

@ -1,5 +1,5 @@
use indexmap::IndexMap;
use std::borrow::Cow;
use std::collections::{BTreeMap, BTreeSet};
use std::env;
use std::fmt::Write;
use std::io::stdout;
@ -10,15 +10,17 @@ use std::str::FromStr;
use anstream::{eprint, AutoStream, StripStream};
use anyhow::{anyhow, Context, Result};
use fs_err as fs;
use indexmap::IndexMap;
use itertools::Itertools;
use owo_colors::OwoColorize;
use tempfile::tempdir_in;
use tracing::debug;
use distribution_types::{IndexLocations, LocalEditable, LocalEditables, ParsedUrlError, Verbatim};
use distribution_types::{
IndexLocations, LocalEditable, LocalEditables, ParsedUrlError, SourceAnnotation, Verbatim,
};
use distribution_types::{Requirement, Requirements};
use install_wheel_rs::linker::LinkMode;
use platform_tags::Tags;
use pypi_types::Metadata23;
use requirements_txt::EditableRequirement;
@ -352,6 +354,65 @@ pub(crate) async fn pip_compile(
.resolve()
.await?;
let mut sources: BTreeMap<String, BTreeSet<SourceAnnotation>> = BTreeMap::new();
for requirement in &requirements {
if let Some(path) = &requirement.path {
if path.ends_with("pyproject.toml") {
sources
.entry(requirement.name.to_string())
.or_default()
.insert(SourceAnnotation::PyProject {
path: path.clone(),
project_name: project.as_ref().map(ToString::to_string),
});
} else {
sources
.entry(requirement.name.to_string())
.or_default()
.insert(SourceAnnotation::Requirement(path.clone()));
}
}
}
for requirement in &constraints {
if let Some(path) = &requirement.path {
sources
.entry(requirement.name.to_string())
.or_default()
.insert(SourceAnnotation::Constraint(path.clone()));
}
}
for requirement in &overrides {
if let Some(path) = &requirement.path {
sources
.entry(requirement.name.to_string())
.or_default()
.insert(SourceAnnotation::Override(path.clone()));
}
}
for editable in &editables {
let package_name = editable.url.given().unwrap_or_default().to_string();
if let Some(source) = &editable.source {
if source.ends_with("pyproject.toml") {
sources
.entry(package_name)
.or_default()
.insert(SourceAnnotation::PyProject {
path: source.clone(),
project_name: project.as_ref().map(ToString::to_string),
});
} else {
sources
.entry(package_name)
.or_default()
.insert(SourceAnnotation::Requirement(source.clone()));
}
}
}
// Collect constraints and overrides.
let constraints = Constraints::from_requirements(constraints);
let overrides = Overrides::from_requirements(overrides);
@ -363,7 +424,12 @@ pub(crate) async fn pip_compile(
let start = std::time::Instant::now();
let editables = LocalEditables::from_editables(editables.into_iter().map(|editable| {
let EditableRequirement { url, extras, path } = editable;
let EditableRequirement {
url,
extras,
path,
source: _,
} = editable;
LocalEditable { url, path, extras }
}));
@ -588,6 +654,7 @@ pub(crate) async fn pip_compile(
"{}",
DisplayResolutionGraph::new(
&resolution,
sources,
&no_emit_packages,
generate_hashes,
include_extras,

View File

@ -575,7 +575,12 @@ async fn build_editables(
.with_reporter(DownloadReporter::from(printer).with_length(editables.len() as u64));
let editables = LocalEditables::from_editables(editables.iter().map(|editable| {
let EditableRequirement { url, extras, path } = editable;
let EditableRequirement {
url,
extras,
path,
source: _,
} = editable;
LocalEditable {
url: url.clone(),
extras: extras.clone(),
@ -673,6 +678,7 @@ async fn resolve(
extras: vec![],
marker: None,
source,
path: None,
};
Ok(Preference::from_requirement(requirement))
})

View File

@ -684,7 +684,12 @@ async fn resolve_editables(
.with_reporter(DownloadReporter::from(printer).with_length(uninstalled.len() as u64));
let editables = LocalEditables::from_editables(uninstalled.iter().map(|editable| {
let EditableRequirement { url, path, extras } = editable;
let EditableRequirement {
url,
path,
extras,
source: _,
} = editable;
LocalEditable {
url: url.clone(),
path: path.clone(),

File diff suppressed because it is too large Load Diff

View File

@ -82,6 +82,7 @@ fn incompatible_python_compatible_override() -> Result<()> {
# This file was autogenerated by uv via the following command:
# uv pip compile requirements.in --cache-dir [CACHE_DIR] --python-version=3.11
package-a==1.0.0
# via -r requirements.in
----- stderr -----
warning: The requested Python version 3.11 is not available; 3.9.[X] will be used to build dependencies instead.
@ -240,6 +241,7 @@ fn incompatible_python_compatible_override_available_no_wheels() -> Result<()> {
# This file was autogenerated by uv via the following command:
# uv pip compile requirements.in --cache-dir [CACHE_DIR] --python-version=3.11
package-a==1.0.0
# via -r requirements.in
----- stderr -----
Resolved 1 package in [TIME]
@ -456,6 +458,7 @@ fn python_patch_override_patch_compatible() -> Result<()> {
# This file was autogenerated by uv via the following command:
# uv pip compile requirements.in --cache-dir [CACHE_DIR] --python-version=3.8.0
package-a==1.0.0
# via -r requirements.in
----- stderr -----
warning: The requested Python version 3.8.0 is not available; 3.8.18 will be used to build dependencies instead.