mirror of https://github.com/astral-sh/uv
Respect dynamic extras in `uv lock` and `uv sync` (#8091)
## Summary We can't rely on reading these from the `pyproject.toml`; instead, we resolve the project metadata (which will typically just require reading the `pyproject.toml`, but will go through our standard metadata paths). Closes https://github.com/astral-sh/uv/issues/8071.
This commit is contained in:
parent
7b80b18166
commit
dc3f628de1
|
|
@ -0,0 +1,150 @@
|
|||
use std::sync::Arc;
|
||||
|
||||
use futures::{stream::FuturesOrdered, TryStreamExt};
|
||||
use thiserror::Error;
|
||||
|
||||
use uv_distribution::{DistributionDatabase, Reporter};
|
||||
use uv_distribution_types::{BuiltDist, Dist, DistributionMetadata, SourceDist};
|
||||
use uv_pypi_types::Requirement;
|
||||
use uv_resolver::{InMemoryIndex, MetadataResponse};
|
||||
use uv_types::{BuildContext, HashStrategy};
|
||||
|
||||
use crate::required_dist;
|
||||
|
||||
#[derive(Debug, Error)]
|
||||
pub enum ExtrasError {
|
||||
#[error("Failed to download: `{0}`")]
|
||||
Download(BuiltDist, #[source] uv_distribution::Error),
|
||||
#[error("Failed to download and build: `{0}`")]
|
||||
DownloadAndBuild(SourceDist, #[source] uv_distribution::Error),
|
||||
#[error("Failed to build: `{0}`")]
|
||||
Build(SourceDist, #[source] uv_distribution::Error),
|
||||
#[error(transparent)]
|
||||
UnsupportedUrl(#[from] uv_distribution_types::Error),
|
||||
}
|
||||
|
||||
/// A resolver to expand the requested extras for a set of requirements to include all defined
|
||||
/// extras.
|
||||
pub struct ExtrasResolver<'a, Context: BuildContext> {
|
||||
/// Whether to check hashes for distributions.
|
||||
hasher: &'a HashStrategy,
|
||||
/// The in-memory index for resolving dependencies.
|
||||
index: &'a InMemoryIndex,
|
||||
/// The database for fetching and building distributions.
|
||||
database: DistributionDatabase<'a, Context>,
|
||||
}
|
||||
|
||||
impl<'a, Context: BuildContext> ExtrasResolver<'a, Context> {
|
||||
/// Instantiate a new [`ExtrasResolver`] for a given set of requirements.
|
||||
pub fn new(
|
||||
hasher: &'a HashStrategy,
|
||||
index: &'a InMemoryIndex,
|
||||
database: DistributionDatabase<'a, Context>,
|
||||
) -> Self {
|
||||
Self {
|
||||
hasher,
|
||||
index,
|
||||
database,
|
||||
}
|
||||
}
|
||||
|
||||
/// Set the [`Reporter`] to use for this resolver.
|
||||
#[must_use]
|
||||
pub fn with_reporter(self, reporter: impl Reporter + 'static) -> Self {
|
||||
Self {
|
||||
database: self.database.with_reporter(reporter),
|
||||
..self
|
||||
}
|
||||
}
|
||||
|
||||
/// Expand the set of available extras for a given set of requirements.
|
||||
pub async fn resolve(
|
||||
self,
|
||||
requirements: impl Iterator<Item = Requirement>,
|
||||
) -> Result<Vec<Requirement>, ExtrasError> {
|
||||
let Self {
|
||||
hasher,
|
||||
index,
|
||||
database,
|
||||
} = self;
|
||||
requirements
|
||||
.map(|requirement| async {
|
||||
Self::resolve_requirement(requirement, hasher, index, &database)
|
||||
.await
|
||||
.map(Requirement::from)
|
||||
})
|
||||
.collect::<FuturesOrdered<_>>()
|
||||
.try_collect()
|
||||
.await
|
||||
}
|
||||
|
||||
/// Expand the set of available extras for a given [`Requirement`].
|
||||
async fn resolve_requirement(
|
||||
requirement: Requirement,
|
||||
hasher: &HashStrategy,
|
||||
index: &InMemoryIndex,
|
||||
database: &DistributionDatabase<'a, Context>,
|
||||
) -> Result<Requirement, ExtrasError> {
|
||||
// Determine whether the requirement represents a local distribution and convert to a
|
||||
// buildable distribution.
|
||||
let Some(dist) = required_dist(&requirement)? else {
|
||||
return Ok(requirement);
|
||||
};
|
||||
|
||||
// Fetch the metadata for the distribution.
|
||||
let metadata = {
|
||||
let id = dist.version_id();
|
||||
if let Some(archive) = index
|
||||
.distributions()
|
||||
.get(&id)
|
||||
.as_deref()
|
||||
.and_then(|response| {
|
||||
if let MetadataResponse::Found(archive, ..) = response {
|
||||
Some(archive)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
{
|
||||
// If the metadata is already in the index, return it.
|
||||
archive.metadata.clone()
|
||||
} else {
|
||||
// Run the PEP 517 build process to extract metadata from the source distribution.
|
||||
let archive = database
|
||||
.get_or_build_wheel_metadata(&dist, hasher.get(&dist))
|
||||
.await
|
||||
.map_err(|err| match &dist {
|
||||
Dist::Built(built) => ExtrasError::Download(built.clone(), err),
|
||||
Dist::Source(source) => {
|
||||
if source.is_local() {
|
||||
ExtrasError::Build(source.clone(), err)
|
||||
} else {
|
||||
ExtrasError::DownloadAndBuild(source.clone(), err)
|
||||
}
|
||||
}
|
||||
})?;
|
||||
|
||||
let metadata = archive.metadata.clone();
|
||||
|
||||
// Insert the metadata into the index.
|
||||
index
|
||||
.distributions()
|
||||
.done(id, Arc::new(MetadataResponse::Found(archive)));
|
||||
|
||||
metadata
|
||||
}
|
||||
};
|
||||
|
||||
// Sort extras for consistency.
|
||||
let extras = {
|
||||
let mut extras = metadata.provides_extras;
|
||||
extras.sort_unstable();
|
||||
extras
|
||||
};
|
||||
|
||||
Ok(Requirement {
|
||||
extras,
|
||||
..requirement
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
@ -1,12 +1,75 @@
|
|||
use uv_distribution_types::{Dist, GitSourceDist, SourceDist};
|
||||
use uv_git::GitUrl;
|
||||
use uv_pypi_types::{Requirement, RequirementSource};
|
||||
|
||||
pub use crate::extras::*;
|
||||
pub use crate::lookahead::*;
|
||||
pub use crate::source_tree::*;
|
||||
pub use crate::sources::*;
|
||||
pub use crate::specification::*;
|
||||
pub use crate::unnamed::*;
|
||||
|
||||
mod extras;
|
||||
mod lookahead;
|
||||
mod source_tree;
|
||||
mod sources;
|
||||
mod specification;
|
||||
mod unnamed;
|
||||
pub mod upgrade;
|
||||
|
||||
/// Convert a [`Requirement`] into a [`Dist`], if it is a direct URL.
|
||||
pub(crate) fn required_dist(
|
||||
requirement: &Requirement,
|
||||
) -> Result<Option<Dist>, uv_distribution_types::Error> {
|
||||
Ok(Some(match &requirement.source {
|
||||
RequirementSource::Registry { .. } => return Ok(None),
|
||||
RequirementSource::Url {
|
||||
subdirectory,
|
||||
location,
|
||||
ext,
|
||||
url,
|
||||
} => Dist::from_http_url(
|
||||
requirement.name.clone(),
|
||||
url.clone(),
|
||||
location.clone(),
|
||||
subdirectory.clone(),
|
||||
*ext,
|
||||
)?,
|
||||
RequirementSource::Git {
|
||||
repository,
|
||||
reference,
|
||||
precise,
|
||||
subdirectory,
|
||||
url,
|
||||
} => {
|
||||
let git_url = if let Some(precise) = precise {
|
||||
GitUrl::from_commit(repository.clone(), reference.clone(), *precise)
|
||||
} else {
|
||||
GitUrl::from_reference(repository.clone(), reference.clone())
|
||||
};
|
||||
Dist::Source(SourceDist::Git(GitSourceDist {
|
||||
name: requirement.name.clone(),
|
||||
git: Box::new(git_url),
|
||||
subdirectory: subdirectory.clone(),
|
||||
url: url.clone(),
|
||||
}))
|
||||
}
|
||||
RequirementSource::Path {
|
||||
install_path,
|
||||
ext,
|
||||
url,
|
||||
} => Dist::from_file_url(requirement.name.clone(), url.clone(), install_path, *ext)?,
|
||||
RequirementSource::Directory {
|
||||
install_path,
|
||||
r#virtual,
|
||||
url,
|
||||
editable,
|
||||
} => Dist::from_directory_url(
|
||||
requirement.name.clone(),
|
||||
url.clone(),
|
||||
install_path,
|
||||
*editable,
|
||||
*r#virtual,
|
||||
)?,
|
||||
}))
|
||||
}
|
||||
|
|
|
|||
|
|
@ -6,10 +6,10 @@ use rustc_hash::FxHashSet;
|
|||
use thiserror::Error;
|
||||
use tracing::trace;
|
||||
|
||||
use crate::required_dist;
|
||||
use uv_configuration::{Constraints, Overrides};
|
||||
use uv_distribution::{DistributionDatabase, Reporter};
|
||||
use uv_distribution_types::{BuiltDist, Dist, DistributionMetadata, GitSourceDist, SourceDist};
|
||||
use uv_git::GitUrl;
|
||||
use uv_distribution_types::{BuiltDist, Dist, DistributionMetadata, SourceDist};
|
||||
use uv_normalize::GroupName;
|
||||
use uv_pypi_types::{Requirement, RequirementSource};
|
||||
use uv_resolver::{InMemoryIndex, MetadataResponse, ResolverMarkers};
|
||||
|
|
@ -245,58 +245,3 @@ impl<'a, Context: BuildContext> LookaheadResolver<'a, Context> {
|
|||
)))
|
||||
}
|
||||
}
|
||||
|
||||
/// Convert a [`Requirement`] into a [`Dist`], if it is a direct URL.
|
||||
fn required_dist(requirement: &Requirement) -> Result<Option<Dist>, uv_distribution_types::Error> {
|
||||
Ok(Some(match &requirement.source {
|
||||
RequirementSource::Registry { .. } => return Ok(None),
|
||||
RequirementSource::Url {
|
||||
subdirectory,
|
||||
location,
|
||||
ext,
|
||||
url,
|
||||
} => Dist::from_http_url(
|
||||
requirement.name.clone(),
|
||||
url.clone(),
|
||||
location.clone(),
|
||||
subdirectory.clone(),
|
||||
*ext,
|
||||
)?,
|
||||
RequirementSource::Git {
|
||||
repository,
|
||||
reference,
|
||||
precise,
|
||||
subdirectory,
|
||||
url,
|
||||
} => {
|
||||
let git_url = if let Some(precise) = precise {
|
||||
GitUrl::from_commit(repository.clone(), reference.clone(), *precise)
|
||||
} else {
|
||||
GitUrl::from_reference(repository.clone(), reference.clone())
|
||||
};
|
||||
Dist::Source(SourceDist::Git(GitSourceDist {
|
||||
name: requirement.name.clone(),
|
||||
git: Box::new(git_url),
|
||||
subdirectory: subdirectory.clone(),
|
||||
url: url.clone(),
|
||||
}))
|
||||
}
|
||||
RequirementSource::Path {
|
||||
install_path,
|
||||
ext,
|
||||
url,
|
||||
} => Dist::from_file_url(requirement.name.clone(), url.clone(), install_path, *ext)?,
|
||||
RequirementSource::Directory {
|
||||
install_path,
|
||||
r#virtual,
|
||||
url,
|
||||
editable,
|
||||
} => Dist::from_directory_url(
|
||||
requirement.name.clone(),
|
||||
url.clone(),
|
||||
install_path,
|
||||
*editable,
|
||||
*r#virtual,
|
||||
)?,
|
||||
}))
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
use std::borrow::Cow;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::path::Path;
|
||||
use std::sync::Arc;
|
||||
|
||||
use anyhow::{Context, Result};
|
||||
|
|
@ -34,8 +34,6 @@ pub struct SourceTreeResolution {
|
|||
/// Used, e.g., to determine the input requirements when a user specifies a `pyproject.toml`
|
||||
/// file, which may require running PEP 517 build hooks to extract metadata.
|
||||
pub struct SourceTreeResolver<'a, Context: BuildContext> {
|
||||
/// The requirements for the project.
|
||||
source_trees: Vec<PathBuf>,
|
||||
/// The extras to include when resolving requirements.
|
||||
extras: &'a ExtrasSpecification,
|
||||
/// The hash policy to enforce.
|
||||
|
|
@ -49,14 +47,12 @@ pub struct SourceTreeResolver<'a, Context: BuildContext> {
|
|||
impl<'a, Context: BuildContext> SourceTreeResolver<'a, Context> {
|
||||
/// Instantiate a new [`SourceTreeResolver`] for a given set of `source_trees`.
|
||||
pub fn new(
|
||||
source_trees: Vec<PathBuf>,
|
||||
extras: &'a ExtrasSpecification,
|
||||
hasher: &'a HashStrategy,
|
||||
index: &'a InMemoryIndex,
|
||||
database: DistributionDatabase<'a, Context>,
|
||||
) -> Self {
|
||||
Self {
|
||||
source_trees,
|
||||
extras,
|
||||
hasher,
|
||||
index,
|
||||
|
|
@ -74,10 +70,11 @@ impl<'a, Context: BuildContext> SourceTreeResolver<'a, Context> {
|
|||
}
|
||||
|
||||
/// Resolve the requirements from the provided source trees.
|
||||
pub async fn resolve(self) -> Result<Vec<SourceTreeResolution>> {
|
||||
let resolutions: Vec<_> = self
|
||||
.source_trees
|
||||
.iter()
|
||||
pub async fn resolve(
|
||||
self,
|
||||
source_trees: impl Iterator<Item = &Path>,
|
||||
) -> Result<Vec<SourceTreeResolution>> {
|
||||
let resolutions: Vec<_> = source_trees
|
||||
.map(|source_tree| async { self.resolve_source_tree(source_tree).await })
|
||||
.collect::<FuturesOrdered<_>>()
|
||||
.try_collect()
|
||||
|
|
|
|||
|
|
@ -36,8 +36,6 @@ pub enum NamedRequirementsError {
|
|||
|
||||
/// Like [`RequirementsSpecification`], but with concrete names for all requirements.
|
||||
pub struct NamedRequirementsResolver<'a, Context: BuildContext> {
|
||||
/// The requirements for the project.
|
||||
requirements: Vec<UnnamedRequirement<VerbatimParsedUrl>>,
|
||||
/// Whether to check hashes for distributions.
|
||||
hasher: &'a HashStrategy,
|
||||
/// The in-memory index for resolving dependencies.
|
||||
|
|
@ -47,15 +45,13 @@ pub struct NamedRequirementsResolver<'a, Context: BuildContext> {
|
|||
}
|
||||
|
||||
impl<'a, Context: BuildContext> NamedRequirementsResolver<'a, Context> {
|
||||
/// Instantiate a new [`NamedRequirementsResolver`] for a given set of requirements.
|
||||
/// Instantiate a new [`NamedRequirementsResolver`].
|
||||
pub fn new(
|
||||
requirements: Vec<UnnamedRequirement<VerbatimParsedUrl>>,
|
||||
hasher: &'a HashStrategy,
|
||||
index: &'a InMemoryIndex,
|
||||
database: DistributionDatabase<'a, Context>,
|
||||
) -> Self {
|
||||
Self {
|
||||
requirements,
|
||||
hasher,
|
||||
index,
|
||||
database,
|
||||
|
|
@ -72,15 +68,16 @@ impl<'a, Context: BuildContext> NamedRequirementsResolver<'a, Context> {
|
|||
}
|
||||
|
||||
/// Resolve any unnamed requirements in the specification.
|
||||
pub async fn resolve(self) -> Result<Vec<Requirement>, NamedRequirementsError> {
|
||||
pub async fn resolve(
|
||||
self,
|
||||
requirements: impl Iterator<Item = UnnamedRequirement<VerbatimParsedUrl>>,
|
||||
) -> Result<Vec<Requirement>, NamedRequirementsError> {
|
||||
let Self {
|
||||
requirements,
|
||||
hasher,
|
||||
index,
|
||||
database,
|
||||
} = self;
|
||||
requirements
|
||||
.into_iter()
|
||||
.map(|requirement| async {
|
||||
Self::resolve_requirement(requirement, hasher, index, &database)
|
||||
.await
|
||||
|
|
|
|||
|
|
@ -266,23 +266,12 @@ impl Workspace {
|
|||
/// Returns the set of requirements that include all packages in the workspace.
|
||||
pub fn members_requirements(&self) -> impl Iterator<Item = Requirement> + '_ {
|
||||
self.packages.values().filter_map(|member| {
|
||||
let project = member.pyproject_toml.project.as_ref()?;
|
||||
// Extract the extras available in the project.
|
||||
let extras = project
|
||||
.optional_dependencies
|
||||
.as_ref()
|
||||
.map(|optional_dependencies| {
|
||||
// It's a `BTreeMap` so the keys are sorted.
|
||||
optional_dependencies.keys().cloned().collect::<Vec<_>>()
|
||||
})
|
||||
.unwrap_or_default();
|
||||
|
||||
let url = VerbatimUrl::from_absolute_path(&member.root)
|
||||
.expect("path is valid URL")
|
||||
.with_given(member.root.to_string_lossy());
|
||||
Some(Requirement {
|
||||
name: project.name.clone(),
|
||||
extras,
|
||||
name: member.pyproject_toml.project.as_ref()?.name.clone(),
|
||||
extras: vec![],
|
||||
marker: MarkerTree::TRUE,
|
||||
source: if member.pyproject_toml.is_package() {
|
||||
RequirementSource::Directory {
|
||||
|
|
|
|||
|
|
@ -134,13 +134,12 @@ pub(crate) async fn resolve<InstalledPackages: InstalledPackagesProvider>(
|
|||
if !unnamed.is_empty() {
|
||||
requirements.extend(
|
||||
NamedRequirementsResolver::new(
|
||||
unnamed,
|
||||
hasher,
|
||||
index,
|
||||
DistributionDatabase::new(client, build_dispatch, concurrency.downloads),
|
||||
)
|
||||
.with_reporter(ResolverReporter::from(printer))
|
||||
.resolve()
|
||||
.resolve(unnamed.into_iter())
|
||||
.await?,
|
||||
);
|
||||
}
|
||||
|
|
@ -148,14 +147,13 @@ pub(crate) async fn resolve<InstalledPackages: InstalledPackagesProvider>(
|
|||
// Resolve any source trees into requirements.
|
||||
if !source_trees.is_empty() {
|
||||
let resolutions = SourceTreeResolver::new(
|
||||
source_trees,
|
||||
extras,
|
||||
hasher,
|
||||
index,
|
||||
DistributionDatabase::new(client, build_dispatch, concurrency.downloads),
|
||||
)
|
||||
.with_reporter(ResolverReporter::from(printer))
|
||||
.resolve()
|
||||
.resolve(source_trees.iter().map(PathBuf::as_path))
|
||||
.await?;
|
||||
|
||||
// If we resolved a single project, use it for the project name.
|
||||
|
|
@ -219,13 +217,12 @@ pub(crate) async fn resolve<InstalledPackages: InstalledPackagesProvider>(
|
|||
if !unnamed.is_empty() {
|
||||
overrides.extend(
|
||||
NamedRequirementsResolver::new(
|
||||
unnamed,
|
||||
hasher,
|
||||
index,
|
||||
DistributionDatabase::new(client, build_dispatch, concurrency.downloads),
|
||||
)
|
||||
.with_reporter(ResolverReporter::from(printer))
|
||||
.resolve()
|
||||
.resolve(unnamed.into_iter())
|
||||
.await?,
|
||||
);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -326,13 +326,12 @@ pub(crate) async fn add(
|
|||
if !unnamed.is_empty() {
|
||||
requirements.extend(
|
||||
NamedRequirementsResolver::new(
|
||||
unnamed,
|
||||
&hasher,
|
||||
&state.index,
|
||||
DistributionDatabase::new(&client, &build_dispatch, concurrency.downloads),
|
||||
)
|
||||
.with_reporter(ResolverReporter::from(printer))
|
||||
.resolve()
|
||||
.resolve(unnamed.into_iter())
|
||||
.await?,
|
||||
);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -26,6 +26,7 @@ use uv_pep440::Version;
|
|||
use uv_pypi_types::{Requirement, SupportedEnvironments};
|
||||
use uv_python::{Interpreter, PythonDownloads, PythonEnvironment, PythonPreference, PythonRequest};
|
||||
use uv_requirements::upgrade::{read_lock_requirements, LockedRequirements};
|
||||
use uv_requirements::ExtrasResolver;
|
||||
use uv_resolver::{
|
||||
FlatIndex, Lock, Options, OptionsBuilder, PythonRequirement, RequiresPython, ResolverManifest,
|
||||
ResolverMarkers, SatisfiesResult,
|
||||
|
|
@ -38,6 +39,7 @@ use crate::commands::pip::loggers::{DefaultResolveLogger, ResolveLogger, Summary
|
|||
use crate::commands::project::{
|
||||
find_requires_python, ProjectError, ProjectInterpreter, SharedState,
|
||||
};
|
||||
use crate::commands::reporters::ResolverReporter;
|
||||
use crate::commands::{diagnostics, pip, ExitStatus};
|
||||
use crate::printer::Printer;
|
||||
use crate::settings::{ResolverSettings, ResolverSettingsRef};
|
||||
|
|
@ -534,8 +536,11 @@ async fn do_lock(
|
|||
|
||||
// Resolve the requirements.
|
||||
let resolution = pip::operations::resolve(
|
||||
workspace
|
||||
.members_requirements()
|
||||
ExtrasResolver::new(&hasher, &state.index, database)
|
||||
.with_reporter(ResolverReporter::from(printer))
|
||||
.resolve(workspace.members_requirements())
|
||||
.await?
|
||||
.into_iter()
|
||||
.chain(requirements.iter().cloned())
|
||||
.map(UnresolvedRequirementSpecification::from)
|
||||
.collect(),
|
||||
|
|
|
|||
|
|
@ -27,9 +27,7 @@ use uv_python::{
|
|||
VersionRequest,
|
||||
};
|
||||
use uv_requirements::upgrade::{read_lock_requirements, LockedRequirements};
|
||||
use uv_requirements::{
|
||||
NamedRequirementsError, NamedRequirementsResolver, RequirementsSpecification,
|
||||
};
|
||||
use uv_requirements::{NamedRequirementsResolver, RequirementsSpecification};
|
||||
use uv_resolver::{
|
||||
FlatIndex, Lock, OptionsBuilder, PythonRequirement, RequiresPython, ResolutionGraph,
|
||||
ResolverMarkers,
|
||||
|
|
@ -171,6 +169,9 @@ pub(crate) enum ProjectError {
|
|||
#[error(transparent)]
|
||||
NamedRequirements(#[from] uv_requirements::NamedRequirementsError),
|
||||
|
||||
#[error(transparent)]
|
||||
Extras(#[from] uv_requirements::ExtrasError),
|
||||
|
||||
#[error(transparent)]
|
||||
PyprojectMut(#[from] uv_workspace::pyproject_mut::Error),
|
||||
|
||||
|
|
@ -610,7 +611,7 @@ pub(crate) async fn resolve_names(
|
|||
native_tls: bool,
|
||||
cache: &Cache,
|
||||
printer: Printer,
|
||||
) -> Result<Vec<Requirement>, NamedRequirementsError> {
|
||||
) -> Result<Vec<Requirement>, uv_requirements::NamedRequirementsError> {
|
||||
// Partition the requirements into named and unnamed requirements.
|
||||
let (mut requirements, unnamed): (Vec<_>, Vec<_>) =
|
||||
requirements
|
||||
|
|
@ -711,13 +712,12 @@ pub(crate) async fn resolve_names(
|
|||
// Resolve the unnamed requirements.
|
||||
requirements.extend(
|
||||
NamedRequirementsResolver::new(
|
||||
unnamed,
|
||||
&hasher,
|
||||
&state.index,
|
||||
DistributionDatabase::new(&client, &build_dispatch, concurrency.downloads),
|
||||
)
|
||||
.with_reporter(ResolverReporter::from(printer))
|
||||
.resolve()
|
||||
.resolve(unnamed.into_iter())
|
||||
.await?,
|
||||
);
|
||||
|
||||
|
|
|
|||
|
|
@ -2610,6 +2610,119 @@ fn sync_scripts_project_not_packaged() -> Result<()> {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn sync_dynamic_extra() -> Result<()> {
|
||||
let context = TestContext::new("3.12");
|
||||
|
||||
let pyproject_toml = context.temp_dir.child("pyproject.toml");
|
||||
pyproject_toml.write_str(
|
||||
r#"
|
||||
[project]
|
||||
name = "project"
|
||||
version = "0.1.0"
|
||||
requires-python = ">=3.12"
|
||||
dependencies = ["iniconfig"]
|
||||
dynamic = ["optional-dependencies"]
|
||||
|
||||
[tool.setuptools.dynamic.optional-dependencies]
|
||||
dev = { file = "requirements-dev.txt" }
|
||||
|
||||
[build-system]
|
||||
requires = ["setuptools>=42"]
|
||||
build-backend = "setuptools.build_meta"
|
||||
"#,
|
||||
)?;
|
||||
|
||||
context
|
||||
.temp_dir
|
||||
.child("requirements-dev.txt")
|
||||
.write_str("typing-extensions")?;
|
||||
|
||||
uv_snapshot!(context.filters(), context.sync().arg("--extra").arg("dev"), @r###"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
Resolved 3 packages in [TIME]
|
||||
Prepared 3 packages in [TIME]
|
||||
Installed 3 packages in [TIME]
|
||||
+ iniconfig==2.0.0
|
||||
+ project==0.1.0 (from file://[TEMP_DIR]/)
|
||||
+ typing-extensions==4.10.0
|
||||
"###);
|
||||
|
||||
let lock = context.read("uv.lock");
|
||||
|
||||
insta::with_settings!(
|
||||
{
|
||||
filters => context.filters(),
|
||||
},
|
||||
{
|
||||
assert_snapshot!(
|
||||
lock, @r###"
|
||||
version = 1
|
||||
requires-python = ">=3.12"
|
||||
|
||||
[options]
|
||||
exclude-newer = "2024-03-25T00:00:00Z"
|
||||
|
||||
[[package]]
|
||||
name = "iniconfig"
|
||||
version = "2.0.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/d7/4b/cbd8e699e64a6f16ca3a8220661b5f83792b3017d0f79807cb8708d33913/iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3", size = 4646 }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374", size = 5892 },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "project"
|
||||
version = "0.1.0"
|
||||
source = { editable = "." }
|
||||
dependencies = [
|
||||
{ name = "iniconfig" },
|
||||
]
|
||||
|
||||
[package.optional-dependencies]
|
||||
dev = [
|
||||
{ name = "typing-extensions" },
|
||||
]
|
||||
|
||||
[package.metadata]
|
||||
requires-dist = [
|
||||
{ name = "iniconfig" },
|
||||
{ name = "typing-extensions", marker = "extra == 'dev'" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "typing-extensions"
|
||||
version = "4.10.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/16/3a/0d26ce356c7465a19c9ea8814b960f8a36c3b0d07c323176620b7b483e44/typing_extensions-4.10.0.tar.gz", hash = "sha256:b0abd7c89e8fb96f98db18d86106ff1d90ab692004eb746cf6eda2682f91b3cb", size = 77558 }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/f9/de/dc04a3ea60b22624b51c703a84bbe0184abcd1d0b9bc8074b5d6b7ab90bb/typing_extensions-4.10.0-py3-none-any.whl", hash = "sha256:69b1a937c3a517342112fb4c6df7e72fc39a38e7891a5730ed4985b5214b5475", size = 33926 },
|
||||
]
|
||||
"###
|
||||
);
|
||||
}
|
||||
);
|
||||
|
||||
// Check that we can re-read the lockfile.
|
||||
uv_snapshot!(context.filters(), context.sync().arg("--locked"), @r###"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
Resolved 3 packages in [TIME]
|
||||
Uninstalled 1 package in [TIME]
|
||||
- typing-extensions==4.10.0
|
||||
"###);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn sync_invalid_environment() -> Result<()> {
|
||||
let context = TestContext::new_with_versions(&["3.11", "3.12"])
|
||||
|
|
|
|||
Loading…
Reference in New Issue