Move requirements resolution into its own crate (#2579)

## Summary

No functional changes, but this was a lot of core logic in the `uv`
crate, which is mostly meant to be the CLI.
This commit is contained in:
Charlie Marsh 2024-03-21 09:52:47 -04:00 committed by GitHub
parent 2979918320
commit f91ce521c5
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
17 changed files with 914 additions and 66 deletions

45
Cargo.lock generated
View File

@ -4355,15 +4355,10 @@ dependencies = [
"chrono",
"clap",
"clap_complete_command",
"configparser",
"console",
"ctrlc",
"distribution-filename",
"distribution-types",
"filetime",
"flate2",
"fs-err",
"futures",
"indexmap 2.2.5",
"indicatif",
"indoc",
@ -4401,11 +4396,11 @@ dependencies = [
"uv-cache",
"uv-client",
"uv-dispatch",
"uv-distribution",
"uv-fs",
"uv-installer",
"uv-interpreter",
"uv-normalize",
"uv-requirements",
"uv-resolver",
"uv-traits",
"uv-virtualenv",
@ -4769,6 +4764,44 @@ dependencies = [
"serde",
]
[[package]]
name = "uv-requirements"
version = "0.1.0"
dependencies = [
"anyhow",
"configparser",
"console",
"ctrlc",
"distribution-filename",
"distribution-types",
"fs-err",
"futures",
"indexmap 2.2.5",
"itertools 0.12.1",
"once_cell",
"pep508_rs",
"pypi-types",
"pyproject-toml",
"regex",
"requirements-txt",
"rustc-hash",
"serde",
"serde_json",
"tempfile",
"textwrap",
"thiserror",
"tokio",
"toml",
"tracing",
"uv-cache",
"uv-client",
"uv-distribution",
"uv-fs",
"uv-normalize",
"uv-resolver",
"uv-warnings",
]
[[package]]
name = "uv-resolver"
version = "0.0.1"

View File

@ -41,6 +41,7 @@ uv-git = { path = "crates/uv-git" }
uv-installer = { path = "crates/uv-installer" }
uv-interpreter = { path = "crates/uv-interpreter" }
uv-normalize = { path = "crates/uv-normalize" }
uv-requirements = { path = "crates/uv-requirements" }
uv-resolver = { path = "crates/uv-resolver" }
uv-traits = { path = "crates/uv-traits" }
uv-trampoline = { path = "crates/uv-trampoline" }

View File

@ -101,6 +101,10 @@ Normalize package and extra names as per Python specifications.
Types and functionality for working with Python packages, e.g., parsing wheel files.
## [uv-requirements](./uv-requirements)
Utilities for reading package requirements from `pyproject.toml` and `requirements.txt` files.
## [uv-resolver](./uv-resolver)
Functionality for resolving Python packages and their dependencies.

View File

@ -0,0 +1,48 @@
[package]
name = "uv-requirements"
version = "0.1.0"
edition.workspace = true
rust-version.workspace = true
homepage.workspace = true
documentation.workspace = true
repository.workspace = true
authors.workspace = true
license.workspace = true
[dependencies]
distribution-filename = { workspace = true }
distribution-types = { workspace = true }
pep508_rs = { workspace = true }
pypi-types = { workspace = true }
requirements-txt = { workspace = true, features = ["reqwest"] }
uv-cache = { workspace = true, features = ["clap"] }
uv-client = { workspace = true }
uv-distribution = { workspace = true }
uv-fs = { workspace = true }
uv-normalize = { workspace = true }
uv-resolver = { workspace = true, features = ["clap"] }
uv-warnings = { workspace = true }
anyhow = { workspace = true }
configparser = { workspace = true }
console = { workspace = true }
ctrlc = { workspace = true }
fs-err = { workspace = true, features = ["tokio"] }
futures = { workspace = true }
indexmap = { workspace = true }
itertools = { workspace = true }
once_cell = { workspace = true }
pyproject-toml = { workspace = true }
regex = { workspace = true }
rustc-hash = { workspace = true }
serde = { workspace = true }
serde_json = { workspace = true }
tempfile = { workspace = true }
textwrap = { workspace = true }
thiserror = { workspace = true }
tokio = { workspace = true }
toml = { workspace = true }
tracing = { workspace = true }
[lints]
workspace = true

View File

@ -0,0 +1,9 @@
pub use crate::named::*;
pub use crate::sources::*;
pub use crate::specification::*;
mod confirm;
mod named;
mod sources;
mod specification;
pub mod upgrade;

View File

@ -0,0 +1,260 @@
use std::path::Path;
use std::str::FromStr;
use anyhow::{Context, Result};
use configparser::ini::Ini;
use futures::{StreamExt, TryStreamExt};
use once_cell::sync::Lazy;
use regex::Regex;
use serde::Deserialize;
use tracing::debug;
use distribution_filename::{SourceDistFilename, WheelFilename};
use distribution_types::RemoteSource;
use pep508_rs::{Requirement, RequirementsTxtRequirement, UnnamedRequirement, VersionOrUrl};
use pypi_types::Metadata10;
use requirements_txt::EditableRequirement;
use uv_cache::Cache;
use uv_client::RegistryClient;
use uv_distribution::download_and_extract_archive;
use uv_normalize::PackageName;
/// Like [`RequirementsSpecification`], but with concrete names for all requirements.
#[derive(Debug, Default)]
pub struct NamedRequirements {
/// The requirements for the project.
pub requirements: Vec<Requirement>,
/// The constraints for the project.
pub constraints: Vec<Requirement>,
/// The overrides for the project.
pub overrides: Vec<Requirement>,
/// Package to install as editable installs
pub editables: Vec<EditableRequirement>,
}
impl NamedRequirements {
/// Convert a [`RequirementsSpecification`] into a [`NamedRequirements`].
pub async fn from_spec(
requirements: Vec<RequirementsTxtRequirement>,
constraints: Vec<Requirement>,
overrides: Vec<Requirement>,
editables: Vec<EditableRequirement>,
cache: &Cache,
client: &RegistryClient,
) -> Result<Self> {
// Resolve all unnamed references.
let requirements = futures::stream::iter(requirements)
.map(|requirement| async {
match requirement {
RequirementsTxtRequirement::Pep508(requirement) => Ok(requirement),
RequirementsTxtRequirement::Unnamed(requirement) => {
Self::name_requirement(requirement, cache, client).await
}
}
})
.buffer_unordered(50)
.try_collect()
.await?;
Ok(Self {
requirements,
constraints,
overrides,
editables,
})
}
/// Infer the package name for a given "unnamed" requirement.
async fn name_requirement(
requirement: UnnamedRequirement,
cache: &Cache,
client: &RegistryClient,
) -> Result<Requirement> {
// If the requirement is a wheel, extract the package name from the wheel filename.
//
// Ex) `anyio-4.3.0-py3-none-any.whl`
if Path::new(requirement.url.path())
.extension()
.is_some_and(|ext| ext.eq_ignore_ascii_case("whl"))
{
let filename = WheelFilename::from_str(&requirement.url.filename()?)?;
return Ok(Requirement {
name: filename.name,
extras: requirement.extras,
version_or_url: Some(VersionOrUrl::Url(requirement.url)),
marker: requirement.marker,
});
}
// If the requirement is a source archive, try to extract the package name from the archive
// filename. This isn't guaranteed to work.
//
// Ex) `anyio-4.3.0.tar.gz`
if let Some(filename) = requirement
.url
.filename()
.ok()
.and_then(|filename| SourceDistFilename::parsed_normalized_filename(&filename).ok())
{
return Ok(Requirement {
name: filename.name,
extras: requirement.extras,
version_or_url: Some(VersionOrUrl::Url(requirement.url)),
marker: requirement.marker,
});
}
// Download the archive and attempt to infer the package name from the archive contents.
let source = download_and_extract_archive(&requirement.url, cache, client)
.await
.with_context(|| {
format!("Unable to infer package name for the unnamed requirement: {requirement}")
})?;
// Extract the path to the root of the distribution.
let path = source.path();
// Attempt to read a `PKG-INFO` from the directory.
if let Some(metadata) = fs_err::read(path.join("PKG-INFO"))
.ok()
.and_then(|contents| Metadata10::parse_pkg_info(&contents).ok())
{
debug!(
"Found PKG-INFO metadata for {path} ({name})",
path = path.display(),
name = metadata.name
);
return Ok(Requirement {
name: metadata.name,
extras: requirement.extras,
version_or_url: Some(VersionOrUrl::Url(requirement.url)),
marker: requirement.marker,
});
}
// Attempt to read a `pyproject.toml` file.
if let Some(pyproject) = fs_err::read_to_string(path.join("pyproject.toml"))
.ok()
.and_then(|contents| toml::from_str::<PyProjectToml>(&contents).ok())
{
// Read PEP 621 metadata from the `pyproject.toml`.
if let Some(project) = pyproject.project {
debug!(
"Found PEP 621 metadata for {path} in `pyproject.toml` ({name})",
path = path.display(),
name = project.name
);
return Ok(Requirement {
name: project.name,
extras: requirement.extras,
version_or_url: Some(VersionOrUrl::Url(requirement.url)),
marker: requirement.marker,
});
}
// Read Poetry-specific metadata from the `pyproject.toml`.
if let Some(tool) = pyproject.tool {
if let Some(poetry) = tool.poetry {
if let Some(name) = poetry.name {
debug!(
"Found Poetry metadata for {path} in `pyproject.toml` ({name})",
path = path.display(),
name = name
);
return Ok(Requirement {
name,
extras: requirement.extras,
version_or_url: Some(VersionOrUrl::Url(requirement.url)),
marker: requirement.marker,
});
}
}
}
}
// Attempt to read a `setup.cfg` from the directory.
if let Some(setup_cfg) = fs_err::read_to_string(path.join("setup.cfg"))
.ok()
.and_then(|contents| {
let mut ini = Ini::new_cs();
ini.set_multiline(true);
ini.read(contents).ok()
})
{
if let Some(section) = setup_cfg.get("metadata") {
if let Some(Some(name)) = section.get("name") {
if let Ok(name) = PackageName::from_str(name) {
debug!(
"Found setuptools metadata for {path} in `setup.cfg` ({name})",
path = path.display(),
name = name
);
return Ok(Requirement {
name,
extras: requirement.extras,
version_or_url: Some(VersionOrUrl::Url(requirement.url)),
marker: requirement.marker,
});
}
}
}
}
// Attempt to read a `setup.py` from the directory.
if let Ok(setup_py) = fs_err::read_to_string(path.join("setup.py")) {
static SETUP_PY_NAME: Lazy<Regex> =
Lazy::new(|| Regex::new(r#"name\s*[=:]\s*['"](?P<name>[^'"]+)['"]"#).unwrap());
if let Some(name) = SETUP_PY_NAME
.captures(&setup_py)
.and_then(|captures| captures.name("name"))
.map(|name| name.as_str())
{
if let Ok(name) = PackageName::from_str(name) {
debug!(
"Found setuptools metadata for {path} in `setup.py` ({name})",
path = path.display(),
name = name
);
return Ok(Requirement {
name,
extras: requirement.extras,
version_or_url: Some(VersionOrUrl::Url(requirement.url)),
marker: requirement.marker,
});
}
}
}
// TODO(charlie): If this is common, consider running the PEP 517 build hooks.
Err(anyhow::anyhow!(
"Unable to infer package name for the unnamed requirement: {requirement}"
))
}
}
/// A pyproject.toml as specified in PEP 517.
#[derive(Deserialize, Debug)]
#[serde(rename_all = "kebab-case")]
struct PyProjectToml {
project: Option<Project>,
tool: Option<Tool>,
}
#[derive(Deserialize, Debug)]
#[serde(rename_all = "kebab-case")]
struct Project {
name: PackageName,
}
#[derive(Deserialize, Debug)]
#[serde(rename_all = "kebab-case")]
struct Tool {
poetry: Option<ToolPoetry>,
}
#[derive(Deserialize, Debug)]
#[serde(rename_all = "kebab-case")]
struct ToolPoetry {
name: Option<PackageName>,
}

View File

@ -0,0 +1,86 @@
use std::path::{Path, PathBuf};
use console::Term;
use uv_fs::Simplified;
use uv_normalize::ExtraName;
use crate::confirm;
#[derive(Debug)]
pub enum RequirementsSource {
/// A package was provided on the command line (e.g., `pip install flask`).
Package(String),
/// An editable path was provided on the command line (e.g., `pip install -e ../flask`).
Editable(String),
/// Dependencies were provided via a `requirements.txt` file (e.g., `pip install -r requirements.txt`).
RequirementsTxt(PathBuf),
/// Dependencies were provided via a `pyproject.toml` file (e.g., `pip-compile pyproject.toml`).
PyprojectToml(PathBuf),
}
impl RequirementsSource {
/// Parse a [`RequirementsSource`] from a [`PathBuf`].
pub fn from_path(path: PathBuf) -> Self {
if path.ends_with("pyproject.toml") {
Self::PyprojectToml(path)
} else {
Self::RequirementsTxt(path)
}
}
/// Parse a [`RequirementsSource`] from a user-provided string, assumed to be a package.
///
/// If the user provided a value that appears to be a `requirements.txt` file or a local
/// directory, prompt them to correct it (if the terminal is interactive).
pub fn from_package(name: String) -> Self {
// If the user provided a `requirements.txt` file without `-r` (as in
// `uv pip install requirements.txt`), prompt them to correct it.
#[allow(clippy::case_sensitive_file_extension_comparisons)]
if (name.ends_with(".txt") || name.ends_with(".in")) && Path::new(&name).is_file() {
let term = Term::stderr();
if term.is_term() {
let prompt = format!(
"`{name}` looks like a requirements file but was passed as a package name. Did you mean `-r {name}`?"
);
let confirmation = confirm::confirm(&prompt, &term, true).unwrap();
if confirmation {
return Self::RequirementsTxt(name.into());
}
}
}
Self::Package(name)
}
}
impl std::fmt::Display for RequirementsSource {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Self::Editable(path) => write!(f, "-e {path}"),
Self::RequirementsTxt(path) | Self::PyprojectToml(path) => {
write!(f, "{}", path.simplified_display())
}
Self::Package(package) => write!(f, "{package}"),
}
}
}
#[derive(Debug, Default, Clone)]
pub enum ExtrasSpecification<'a> {
#[default]
None,
All,
Some(&'a [ExtraName]),
}
impl ExtrasSpecification<'_> {
/// Returns true if a name is included in the extra specification.
pub fn contains(&self, name: &ExtraName) -> bool {
match self {
ExtrasSpecification::All => true,
ExtrasSpecification::None => false,
ExtrasSpecification::Some(extras) => extras.contains(name),
}
}
}

View File

@ -0,0 +1,368 @@
use std::str::FromStr;
use anyhow::{Context, Result};
use indexmap::IndexMap;
use rustc_hash::FxHashSet;
use tracing::{instrument, Level};
use crate::{ExtrasSpecification, RequirementsSource};
use distribution_types::{FlatIndexLocation, IndexUrl};
use pep508_rs::{Requirement, RequirementsTxtRequirement};
use requirements_txt::{EditableRequirement, FindLink, RequirementsTxt};
use uv_client::Connectivity;
use uv_fs::Simplified;
use uv_normalize::{ExtraName, PackageName};
use uv_warnings::warn_user;
#[derive(Debug, Default)]
pub struct RequirementsSpecification {
/// The name of the project specifying requirements.
pub project: Option<PackageName>,
/// The requirements for the project.
pub requirements: Vec<RequirementsTxtRequirement>,
/// The constraints for the project.
pub constraints: Vec<Requirement>,
/// The overrides for the project.
pub overrides: Vec<Requirement>,
/// Package to install as editable installs
pub editables: Vec<EditableRequirement>,
/// The extras used to collect requirements.
pub extras: FxHashSet<ExtraName>,
/// The index URL to use for fetching packages.
pub index_url: Option<IndexUrl>,
/// The extra index URLs to use for fetching packages.
pub extra_index_urls: Vec<IndexUrl>,
/// Whether to disallow index usage.
pub no_index: bool,
/// The `--find-links` locations to use for fetching packages.
pub find_links: Vec<FlatIndexLocation>,
}
impl RequirementsSpecification {
/// Read the requirements and constraints from a source.
#[instrument(skip_all, level = Level::DEBUG, fields(source = % source))]
pub async fn from_source(
source: &RequirementsSource,
extras: &ExtrasSpecification<'_>,
connectivity: Connectivity,
) -> Result<Self> {
Ok(match source {
RequirementsSource::Package(name) => {
let requirement = RequirementsTxtRequirement::parse(name, std::env::current_dir()?)
.with_context(|| format!("Failed to parse `{name}`"))?;
Self {
project: None,
requirements: vec![requirement],
constraints: vec![],
overrides: vec![],
editables: vec![],
extras: FxHashSet::default(),
index_url: None,
extra_index_urls: vec![],
no_index: false,
find_links: vec![],
}
}
RequirementsSource::Editable(name) => {
let requirement = EditableRequirement::parse(name, std::env::current_dir()?)
.with_context(|| format!("Failed to parse `{name}`"))?;
Self {
project: None,
requirements: vec![],
constraints: vec![],
overrides: vec![],
editables: vec![requirement],
extras: FxHashSet::default(),
index_url: None,
extra_index_urls: vec![],
no_index: false,
find_links: vec![],
}
}
RequirementsSource::RequirementsTxt(path) => {
let requirements_txt =
RequirementsTxt::parse(path, std::env::current_dir()?, connectivity).await?;
Self {
project: None,
requirements: requirements_txt
.requirements
.into_iter()
.map(|entry| entry.requirement)
.collect(),
constraints: requirements_txt.constraints,
editables: requirements_txt.editables,
overrides: vec![],
extras: FxHashSet::default(),
index_url: requirements_txt.index_url.map(IndexUrl::from),
extra_index_urls: requirements_txt
.extra_index_urls
.into_iter()
.map(IndexUrl::from)
.collect(),
no_index: requirements_txt.no_index,
find_links: requirements_txt
.find_links
.into_iter()
.map(|link| match link {
FindLink::Url(url) => FlatIndexLocation::Url(url),
FindLink::Path(path) => FlatIndexLocation::Path(path),
})
.collect(),
}
}
RequirementsSource::PyprojectToml(path) => {
let contents = uv_fs::read_to_string(path).await?;
let pyproject_toml = toml::from_str::<pyproject_toml::PyProjectToml>(&contents)
.with_context(|| format!("Failed to parse `{}`", path.user_display()))?;
let mut used_extras = FxHashSet::default();
let mut requirements = Vec::new();
let mut project_name = None;
if let Some(project) = pyproject_toml.project {
// Parse the project name.
let parsed_project_name =
PackageName::new(project.name).with_context(|| {
format!("Invalid `project.name` in {}", path.user_display())
})?;
// Include the default dependencies.
requirements.extend(project.dependencies.unwrap_or_default());
// Include any optional dependencies specified in `extras`.
if !matches!(extras, ExtrasSpecification::None) {
if let Some(optional_dependencies) = project.optional_dependencies {
for (extra_name, optional_requirements) in &optional_dependencies {
// TODO(konstin): It's not ideal that pyproject-toml doesn't use
// `ExtraName`
let normalized_name = ExtraName::from_str(extra_name)?;
if extras.contains(&normalized_name) {
used_extras.insert(normalized_name);
requirements.extend(flatten_extra(
&parsed_project_name,
optional_requirements,
&optional_dependencies,
)?);
}
}
}
}
project_name = Some(parsed_project_name);
}
if requirements.is_empty()
&& pyproject_toml.build_system.is_some_and(|build_system| {
build_system.requires.iter().any(|requirement| {
requirement.name.as_dist_info_name().starts_with("poetry")
})
})
{
warn_user!("`{}` does not contain any dependencies (hint: specify dependencies in the `project.dependencies` section; `tool.poetry.dependencies` is not currently supported)", path.user_display());
}
Self {
project: project_name,
requirements: requirements
.into_iter()
.map(RequirementsTxtRequirement::Pep508)
.collect(),
constraints: vec![],
overrides: vec![],
editables: vec![],
extras: used_extras,
index_url: None,
extra_index_urls: vec![],
no_index: false,
find_links: vec![],
}
}
})
}
/// Read the combined requirements and constraints from a set of sources.
pub async fn from_sources(
requirements: &[RequirementsSource],
constraints: &[RequirementsSource],
overrides: &[RequirementsSource],
extras: &ExtrasSpecification<'_>,
connectivity: Connectivity,
) -> Result<Self> {
let mut spec = Self::default();
// Read all requirements, and keep track of all requirements _and_ constraints.
// A `requirements.txt` can contain a `-c constraints.txt` directive within it, so reading
// a requirements file can also add constraints.
for source in requirements {
let source = Self::from_source(source, extras, connectivity).await?;
spec.requirements.extend(source.requirements);
spec.constraints.extend(source.constraints);
spec.overrides.extend(source.overrides);
spec.extras.extend(source.extras);
spec.editables.extend(source.editables);
// Use the first project name discovered.
if spec.project.is_none() {
spec.project = source.project;
}
if let Some(url) = source.index_url {
if let Some(existing) = spec.index_url {
return Err(anyhow::anyhow!(
"Multiple index URLs specified: `{existing}` vs.` {url}",
));
}
spec.index_url = Some(url);
}
spec.no_index |= source.no_index;
spec.extra_index_urls.extend(source.extra_index_urls);
spec.find_links.extend(source.find_links);
}
// Read all constraints, treating _everything_ as a constraint.
for source in constraints {
let source = Self::from_source(source, extras, connectivity).await?;
for requirement in source.requirements {
match requirement {
RequirementsTxtRequirement::Pep508(requirement) => {
spec.constraints.push(requirement);
}
RequirementsTxtRequirement::Unnamed(requirement) => {
return Err(anyhow::anyhow!(
"Unnamed requirements are not allowed as constraints (found: `{requirement}`)"
));
}
}
}
spec.constraints.extend(source.constraints);
spec.constraints.extend(source.overrides);
if let Some(url) = source.index_url {
if let Some(existing) = spec.index_url {
return Err(anyhow::anyhow!(
"Multiple index URLs specified: `{existing}` vs.` {url}",
));
}
spec.index_url = Some(url);
}
spec.no_index |= source.no_index;
spec.extra_index_urls.extend(source.extra_index_urls);
spec.find_links.extend(source.find_links);
}
// Read all overrides, treating both requirements _and_ constraints as overrides.
for source in overrides {
let source = Self::from_source(source, extras, connectivity).await?;
for requirement in source.requirements {
match requirement {
RequirementsTxtRequirement::Pep508(requirement) => {
spec.overrides.push(requirement);
}
RequirementsTxtRequirement::Unnamed(requirement) => {
return Err(anyhow::anyhow!(
"Unnamed requirements are not allowed as overrides (found: `{requirement}`)"
));
}
}
}
spec.overrides.extend(source.constraints);
spec.overrides.extend(source.overrides);
if let Some(url) = source.index_url {
if let Some(existing) = spec.index_url {
return Err(anyhow::anyhow!(
"Multiple index URLs specified: `{existing}` vs.` {url}",
));
}
spec.index_url = Some(url);
}
spec.no_index |= source.no_index;
spec.extra_index_urls.extend(source.extra_index_urls);
spec.find_links.extend(source.find_links);
}
Ok(spec)
}
/// Read the requirements from a set of sources.
pub async fn from_simple_sources(
requirements: &[RequirementsSource],
connectivity: Connectivity,
) -> Result<Self> {
Self::from_sources(
requirements,
&[],
&[],
&ExtrasSpecification::None,
connectivity,
)
.await
}
}
/// Given an extra in a project that may contain references to the project
/// itself, flatten it into a list of requirements.
///
/// For example:
/// ```toml
/// [project]
/// name = "my-project"
/// version = "0.0.1"
/// dependencies = [
/// "tomli",
/// ]
///
/// [project.optional-dependencies]
/// test = [
/// "pep517",
/// ]
/// dev = [
/// "my-project[test]",
/// ]
/// ```
fn flatten_extra(
project_name: &PackageName,
requirements: &[Requirement],
extras: &IndexMap<String, Vec<Requirement>>,
) -> Result<Vec<Requirement>> {
fn inner(
project_name: &PackageName,
requirements: &[Requirement],
extras: &IndexMap<String, Vec<Requirement>>,
seen: &mut FxHashSet<ExtraName>,
) -> Result<Vec<Requirement>> {
let mut flattened = Vec::with_capacity(requirements.len());
for requirement in requirements {
if requirement.name == *project_name {
for extra in &requirement.extras {
// Avoid infinite recursion on mutually recursive extras.
if !seen.insert(extra.clone()) {
continue;
}
// Flatten the extra requirements.
for (name, extra_requirements) in extras {
let normalized_name = ExtraName::from_str(name)?;
if normalized_name == *extra {
flattened.extend(inner(
project_name,
extra_requirements,
extras,
seen,
)?);
}
}
}
} else {
flattened.push(requirement.clone());
}
}
Ok(flattened)
}
inner(
project_name,
requirements,
extras,
&mut FxHashSet::default(),
)
}

View File

@ -0,0 +1,83 @@
use std::path::Path;
use anyhow::Result;
use rustc_hash::FxHashSet;
use requirements_txt::RequirementsTxt;
use uv_client::Connectivity;
use uv_normalize::PackageName;
use uv_resolver::{Preference, PreferenceError};
/// Whether to allow package upgrades.
#[derive(Debug)]
pub enum Upgrade {
/// Prefer pinned versions from the existing lockfile, if possible.
None,
/// Allow package upgrades for all packages, ignoring the existing lockfile.
All,
/// Allow package upgrades, but only for the specified packages.
Packages(FxHashSet<PackageName>),
}
impl Upgrade {
/// Determine the upgrade strategy from the command-line arguments.
pub fn from_args(upgrade: bool, upgrade_package: Vec<PackageName>) -> Self {
if upgrade {
Self::All
} else if !upgrade_package.is_empty() {
Self::Packages(upgrade_package.into_iter().collect())
} else {
Self::None
}
}
/// Returns `true` if no packages should be upgraded.
pub fn is_none(&self) -> bool {
matches!(self, Self::None)
}
/// Returns `true` if all packages should be upgraded.
pub fn is_all(&self) -> bool {
matches!(self, Self::All)
}
}
/// Load the preferred requirements from an existing lockfile, applying the upgrade strategy.
pub async fn read_lockfile(
output_file: Option<&Path>,
upgrade: Upgrade,
) -> Result<Vec<Preference>> {
// As an optimization, skip reading the lockfile is we're upgrading all packages anyway.
let Some(output_file) = output_file
.filter(|_| !upgrade.is_all())
.filter(|output_file| output_file.exists())
else {
return Ok(Vec::new());
};
// Parse the requirements from the lockfile.
let requirements_txt =
RequirementsTxt::parse(output_file, std::env::current_dir()?, Connectivity::Offline)
.await?;
let preferences = requirements_txt
.requirements
.into_iter()
.filter(|entry| !entry.editable)
.map(Preference::from_entry)
.collect::<Result<Vec<_>, PreferenceError>>()?;
// Apply the upgrade strategy to the requirements.
Ok(match upgrade {
// Respect all pinned versions from the existing lockfile.
Upgrade::None => preferences,
// Ignore all pinned versions from the existing lockfile.
Upgrade::All => vec![],
// Ignore pinned versions for the specified packages.
Upgrade::Packages(packages) => preferences
.into_iter()
.filter(|preference| !packages.contains(preference.name()))
.collect(),
})
}

View File

@ -14,7 +14,6 @@ default-run = "uv"
workspace = true
[dependencies]
distribution-filename = { workspace = true }
distribution-types = { workspace = true }
install-wheel-rs = { workspace = true, features = ["clap"], default-features = false }
pep508_rs = { workspace = true }
@ -25,12 +24,12 @@ uv-auth = { workspace = true, features = ["clap"] }
uv-cache = { workspace = true, features = ["clap"] }
uv-client = { workspace = true }
uv-dispatch = { workspace = true }
uv-distribution = { workspace = true }
uv-fs = { workspace = true }
uv-installer = { workspace = true }
uv-interpreter = { workspace = true }
uv-normalize = { workspace = true }
uv-resolver = { workspace = true, features = ["clap"] }
uv-requirements = { workspace = true }
uv-traits = { workspace = true }
uv-virtualenv = { workspace = true }
uv-warnings = { workspace = true }
@ -38,16 +37,11 @@ uv-warnings = { workspace = true }
anstream = { workspace = true }
anyhow = { workspace = true }
axoupdater = { workspace = true, features = ["github_releases", "tokio"] }
base64 = { workspace = true }
chrono = { workspace = true }
clap = { workspace = true, features = ["derive", "string"] }
clap_complete_command = { workspace = true }
configparser = { workspace = true }
console = { workspace = true }
ctrlc = { workspace = true }
flate2 = { workspace = true, default-features = false }
fs-err = { workspace = true, features = ["tokio"] }
futures = { workspace = true }
indexmap = { workspace = true }
indicatif = { workspace = true }
itertools = { workspace = true }
@ -80,6 +74,7 @@ tikv-jemallocator = { version = "0.5.4" }
[dev-dependencies]
assert_cmd = { version = "2.0.14" }
assert_fs = { version = "1.1.0" }
base64 = { version = "0.21.7" }
byteorder = { version = "1.5.0" }
filetime = { version = "0.2.23" }
indoc = { version = "2.0.4" }

View File

@ -8,7 +8,7 @@ pub(crate) use cache_clean::cache_clean;
pub(crate) use cache_dir::cache_dir;
use distribution_types::InstalledMetadata;
pub(crate) use pip_check::pip_check;
pub(crate) use pip_compile::{extra_name_with_clap_error, pip_compile, Upgrade};
pub(crate) use pip_compile::{extra_name_with_clap_error, pip_compile};
pub(crate) use pip_freeze::pip_freeze;
pub(crate) use pip_install::pip_install;
pub(crate) use pip_list::pip_list;

View File

@ -11,7 +11,6 @@ use anyhow::{anyhow, Context, Result};
use chrono::{DateTime, Utc};
use itertools::Itertools;
use owo_colors::OwoColorize;
use rustc_hash::FxHashSet;
use tempfile::tempdir_in;
use tracing::debug;
@ -26,6 +25,10 @@ use uv_fs::Simplified;
use uv_installer::{Downloader, NoBinary};
use uv_interpreter::{find_best_python, PythonEnvironment, PythonVersion};
use uv_normalize::{ExtraName, PackageName};
use uv_requirements::{
upgrade::{read_lockfile, Upgrade},
ExtrasSpecification, NamedRequirements, RequirementsSource, RequirementsSpecification,
};
use uv_resolver::{
AnnotationStyle, DependencyMode, DisplayResolutionGraph, InMemoryIndex, Manifest,
OptionsBuilder, PreReleaseMode, PythonRequirement, ResolutionMode, Resolver,
@ -36,10 +39,6 @@ use uv_warnings::warn_user;
use crate::commands::reporters::{DownloadReporter, ResolverReporter};
use crate::commands::{elapsed, ExitStatus};
use crate::printer::Printer;
use crate::requirements::{
read_lockfile, ExtrasSpecification, NamedRequirements, RequirementsSource,
RequirementsSpecification,
};
/// Resolve a set of requirements into a set of pinned versions.
#[allow(clippy::too_many_arguments, clippy::fn_params_excessive_bools)]
@ -577,42 +576,6 @@ impl OutputWriter {
}
}
/// Whether to allow package upgrades.
#[derive(Debug)]
pub(crate) enum Upgrade {
/// Prefer pinned versions from the existing lockfile, if possible.
None,
/// Allow package upgrades for all packages, ignoring the existing lockfile.
All,
/// Allow package upgrades, but only for the specified packages.
Packages(FxHashSet<PackageName>),
}
impl Upgrade {
/// Determine the upgrade strategy from the command-line arguments.
pub(crate) fn from_args(upgrade: bool, upgrade_package: Vec<PackageName>) -> Self {
if upgrade {
Self::All
} else if !upgrade_package.is_empty() {
Self::Packages(upgrade_package.into_iter().collect())
} else {
Self::None
}
}
/// Returns `true` if no packages should be upgraded.
pub(crate) fn is_none(&self) -> bool {
matches!(self, Self::None)
}
/// Returns `true` if all packages should be upgraded.
pub(crate) fn is_all(&self) -> bool {
matches!(self, Self::All)
}
}
pub(crate) fn extra_name_with_clap_error(arg: &str) -> Result<ExtraName> {
ExtraName::from_str(arg).map_err(|_err| {
anyhow!(

View File

@ -30,6 +30,10 @@ use uv_installer::{
};
use uv_interpreter::{Interpreter, PythonEnvironment};
use uv_normalize::PackageName;
use uv_requirements::{
upgrade::Upgrade, ExtrasSpecification, NamedRequirements, RequirementsSource,
RequirementsSpecification,
};
use uv_resolver::{
DependencyMode, InMemoryIndex, Manifest, Options, OptionsBuilder, PreReleaseMode, Preference,
ResolutionGraph, ResolutionMode, Resolver,
@ -40,11 +44,8 @@ use uv_warnings::warn_user;
use crate::commands::reporters::{DownloadReporter, InstallReporter, ResolverReporter};
use crate::commands::{compile_bytecode, elapsed, ChangeEvent, ChangeEventKind, ExitStatus};
use crate::printer::Printer;
use crate::requirements::{
ExtrasSpecification, NamedRequirements, RequirementsSource, RequirementsSpecification,
};
use super::{DryRunEvent, Upgrade};
use super::DryRunEvent;
/// Install packages into the current environment.
#[allow(clippy::too_many_arguments, clippy::fn_params_excessive_bools)]

View File

@ -26,7 +26,7 @@ use uv_warnings::warn_user;
use crate::commands::reporters::{DownloadReporter, FinderReporter, InstallReporter};
use crate::commands::{compile_bytecode, elapsed, ChangeEvent, ChangeEventKind, ExitStatus};
use crate::printer::Printer;
use crate::requirements::{NamedRequirements, RequirementsSource, RequirementsSpecification};
use uv_requirements::{NamedRequirements, RequirementsSource, RequirementsSpecification};
/// Install a set of locked requirements into the current Python environment.
#[allow(clippy::too_many_arguments, clippy::fn_params_excessive_bools)]

View File

@ -14,7 +14,7 @@ use uv_interpreter::PythonEnvironment;
use crate::commands::{elapsed, ExitStatus};
use crate::printer::Printer;
use crate::requirements::{RequirementsSource, RequirementsSpecification};
use uv_requirements::{RequirementsSource, RequirementsSpecification};
/// Uninstall packages from the current environment.
pub(crate) async fn pip_uninstall(

View File

@ -13,21 +13,20 @@ use owo_colors::OwoColorize;
use tracing::instrument;
use distribution_types::{FlatIndexLocation, IndexLocations, IndexUrl};
use requirements::ExtrasSpecification;
use uv_auth::KeyringProvider;
use uv_cache::{Cache, CacheArgs, Refresh};
use uv_client::Connectivity;
use uv_installer::{NoBinary, Reinstall};
use uv_interpreter::PythonVersion;
use uv_normalize::{ExtraName, PackageName};
use uv_requirements::{upgrade::Upgrade, ExtrasSpecification, RequirementsSource};
use uv_resolver::{AnnotationStyle, DependencyMode, PreReleaseMode, ResolutionMode};
use uv_traits::{
ConfigSettingEntry, ConfigSettings, NoBuild, PackageNameSpecifier, SetupPyStrategy,
};
use crate::commands::{extra_name_with_clap_error, ExitStatus, ListFormat, Upgrade, VersionFormat};
use crate::commands::{extra_name_with_clap_error, ExitStatus, ListFormat, VersionFormat};
use crate::compat::CompatArgs;
use crate::requirements::RequirementsSource;
#[cfg(target_os = "windows")]
#[global_allocator]
@ -47,10 +46,8 @@ static GLOBAL: tikv_jemallocator::Jemalloc = tikv_jemallocator::Jemalloc;
mod commands;
mod compat;
mod confirm;
mod logging;
mod printer;
mod requirements;
mod shell;
mod version;