Respect `[tool.uv.sources]` in build requirements (#7172)

## Summary

We weren't respecting `tool.uv.sources` for `build-requires`.

Closes https://github.com/astral-sh/uv/issues/7147.
This commit is contained in:
Charlie Marsh 2024-10-15 11:31:04 -04:00 committed by GitHub
parent 0943144cf5
commit 855c1917e1
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
15 changed files with 459 additions and 65 deletions

1
Cargo.lock generated
View File

@ -4286,6 +4286,7 @@ dependencies = [
"toml_edit",
"tracing",
"uv-configuration",
"uv-distribution",
"uv-distribution-types",
"uv-fs",
"uv-pep440",

View File

@ -18,6 +18,7 @@ workspace = true
[dependencies]
uv-configuration = { workspace = true }
uv-distribution = { workspace = true }
uv-distribution-types = { workspace = true }
uv-fs = { workspace = true }
uv-pep440 = { workspace = true }

View File

@ -57,6 +57,8 @@ static DISTUTILS_NOT_FOUND_RE: LazyLock<Regex> =
pub enum Error {
#[error(transparent)]
Io(#[from] io::Error),
#[error(transparent)]
Lowering(#[from] uv_distribution::MetadataError),
#[error("{} does not appear to be a Python project, as neither `pyproject.toml` nor `setup.py` are present in the directory", _0.simplified_display())]
InvalidSourceDist(PathBuf),
#[error("Invalid `pyproject.toml`")]

View File

@ -28,7 +28,8 @@ use tokio::sync::{Mutex, Semaphore};
use tracing::{debug, info_span, instrument, Instrument};
pub use crate::error::{Error, MissingHeaderCause};
use uv_configuration::{BuildKind, BuildOutput, ConfigSettings};
use uv_configuration::{BuildKind, BuildOutput, ConfigSettings, SourceStrategy};
use uv_distribution::{LowerBound, RequiresDist};
use uv_distribution_types::Resolution;
use uv_fs::{rename_with_retry, PythonExt, Simplified};
use uv_pep440::Version;
@ -249,6 +250,7 @@ impl SourceBuild {
build_context: &impl BuildContext,
source_build_context: SourceBuildContext,
version_id: Option<String>,
source_strategy: SourceStrategy,
config_settings: ConfigSettings,
build_isolation: BuildIsolation<'_>,
build_kind: BuildKind,
@ -267,8 +269,14 @@ impl SourceBuild {
let default_backend: Pep517Backend = DEFAULT_BACKEND.clone();
// Check if we have a PEP 517 build backend.
let (pep517_backend, project) =
Self::extract_pep517_backend(&source_tree, &default_backend).map_err(|err| *err)?;
let (pep517_backend, project) = Self::extract_pep517_backend(
&source_tree,
fallback_package_name,
source_strategy,
&default_backend,
)
.await
.map_err(|err| *err)?;
let package_name = project
.as_ref()
@ -363,6 +371,7 @@ impl SourceBuild {
package_name.as_ref(),
package_version.as_ref(),
version_id.as_deref(),
source_strategy,
build_kind,
level,
&config_settings,
@ -421,8 +430,10 @@ impl SourceBuild {
}
/// Extract the PEP 517 backend from the `pyproject.toml` or `setup.py` file.
fn extract_pep517_backend(
async fn extract_pep517_backend(
source_tree: &Path,
package_name: Option<&PackageName>,
source_strategy: SourceStrategy,
default_backend: &Pep517Backend,
) -> Result<(Pep517Backend, Option<Project>), Box<Error>> {
match fs::read_to_string(source_tree.join("pyproject.toml")) {
@ -433,7 +444,48 @@ impl SourceBuild {
let pyproject_toml: PyProjectToml =
PyProjectToml::deserialize(pyproject_toml.into_deserializer())
.map_err(Error::InvalidPyprojectTomlSchema)?;
let backend = if let Some(build_system) = pyproject_toml.build_system {
// If necessary, lower the requirements.
let requirements = match source_strategy {
SourceStrategy::Enabled => {
if let Some(name) = pyproject_toml
.project
.as_ref()
.map(|project| &project.name)
.or(package_name)
{
// TODO(charlie): Add a type to lower requirements without providing
// empty extras.
let requires_dist = uv_pypi_types::RequiresDist {
name: name.clone(),
requires_dist: build_system.requires,
provides_extras: vec![],
};
let requires_dist = RequiresDist::from_project_maybe_workspace(
requires_dist,
source_tree,
source_strategy,
LowerBound::Allow,
)
.await
.map_err(Error::Lowering)?;
requires_dist.requires_dist
} else {
build_system
.requires
.into_iter()
.map(Requirement::from)
.collect()
}
}
SourceStrategy::Disabled => build_system
.requires
.into_iter()
.map(Requirement::from)
.collect(),
};
Pep517Backend {
// If `build-backend` is missing, inject the legacy setuptools backend, but
// retain the `requires`, to match `pip` and `build`. Note that while PEP 517
@ -446,11 +498,7 @@ impl SourceBuild {
.build_backend
.unwrap_or_else(|| "setuptools.build_meta:__legacy__".to_string()),
backend_path: build_system.backend_path,
requirements: build_system
.requires
.into_iter()
.map(Requirement::from)
.collect(),
requirements,
}
} else {
// If a `pyproject.toml` is present, but `[build-system]` is missing, proceed with
@ -755,6 +803,7 @@ async fn create_pep517_build_environment(
package_name: Option<&PackageName>,
package_version: Option<&Version>,
version_id: Option<&str>,
source_strategy: SourceStrategy,
build_kind: BuildKind,
level: BuildOutput,
config_settings: &ConfigSettings,
@ -851,7 +900,33 @@ async fn create_pep517_build_environment(
version_id,
)
})?;
let extra_requires: Vec<_> = extra_requires.into_iter().map(Requirement::from).collect();
// If necessary, lower the requirements.
let extra_requires = match source_strategy {
SourceStrategy::Enabled => {
if let Some(package_name) = package_name {
// TODO(charlie): Add a type to lower requirements without providing
// empty extras.
let requires_dist = uv_pypi_types::RequiresDist {
name: package_name.clone(),
requires_dist: extra_requires,
provides_extras: vec![],
};
let requires_dist = RequiresDist::from_project_maybe_workspace(
requires_dist,
source_tree,
source_strategy,
LowerBound::Allow,
)
.await
.map_err(Error::Lowering)?;
requires_dist.requires_dist
} else {
extra_requires.into_iter().map(Requirement::from).collect()
}
}
SourceStrategy::Disabled => extra_requires.into_iter().map(Requirement::from).collect(),
};
// Some packages (such as tqdm 4.66.1) list only extra requires that have already been part of
// the pyproject.toml requires (in this case, `wheel`). We can skip doing the whole resolution

View File

@ -229,7 +229,7 @@ impl<'a> BuildContext for BuildDispatch<'a> {
} = Planner::new(resolution).build(
site_packages,
&Reinstall::default(),
&BuildOptions::default(),
self.build_options,
self.hasher,
self.index_locations,
self.config_settings,
@ -312,6 +312,7 @@ impl<'a> BuildContext for BuildDispatch<'a> {
subdirectory: Option<&'data Path>,
version_id: Option<String>,
dist: Option<&'data SourceDist>,
sources: SourceStrategy,
build_kind: BuildKind,
build_output: BuildOutput,
) -> Result<SourceBuild> {
@ -349,6 +350,7 @@ impl<'a> BuildContext for BuildDispatch<'a> {
self,
self.source_build_context.clone(),
version_id,
sources,
self.config_settings.clone(),
self.build_isolation,
build_kind,

View File

@ -2,7 +2,9 @@ pub use distribution_database::{DistributionDatabase, HttpArchivePointer, LocalA
pub use download::LocalWheel;
pub use error::Error;
pub use index::{BuiltWheelIndex, RegistryWheelIndex};
pub use metadata::{ArchiveMetadata, LoweredRequirement, Metadata, RequiresDist};
pub use metadata::{
ArchiveMetadata, LowerBound, LoweredRequirement, Metadata, MetadataError, RequiresDist,
};
pub use reporter::Reporter;
pub use source::prune;

View File

@ -34,6 +34,7 @@ impl LoweredRequirement {
project_dir: &'data Path,
project_sources: &'data BTreeMap<PackageName, Sources>,
workspace: &'data Workspace,
lower_bound: LowerBound,
) -> impl Iterator<Item = Result<LoweredRequirement, LoweringError>> + 'data {
let (source, origin) = if let Some(source) = project_sources.get(&requirement.name) {
(Some(source), Origin::Project)
@ -62,15 +63,17 @@ impl LoweredRequirement {
let Some(source) = source else {
let has_sources = !project_sources.is_empty() || !workspace.sources().is_empty();
// Support recursive editable inclusions.
if has_sources
&& requirement.version_or_url.is_none()
&& &requirement.name != project_name
{
warn_user_once!(
"Missing version constraint (e.g., a lower bound) for `{}`",
requirement.name
);
if matches!(lower_bound, LowerBound::Warn) {
// Support recursive editable inclusions.
if has_sources
&& requirement.version_or_url.is_none()
&& &requirement.name != project_name
{
warn_user_once!(
"Missing version constraint (e.g., a lower bound) for `{}`",
requirement.name
);
}
}
return Either::Left(std::iter::once(Ok(Self(Requirement::from(requirement)))));
};
@ -533,3 +536,11 @@ fn path_source(
})
}
}
#[derive(Debug, Copy, Clone)]
pub enum LowerBound {
/// Allow missing lower bounds.
Allow,
/// Warn about missing lower bounds.
Warn,
}

View File

@ -9,8 +9,8 @@ use uv_pep440::{Version, VersionSpecifiers};
use uv_pypi_types::{HashDigest, ResolutionMetadata};
use uv_workspace::WorkspaceError;
pub use crate::metadata::lowering::LoweredRequirement;
use crate::metadata::lowering::LoweringError;
pub use crate::metadata::lowering::{LowerBound, LoweredRequirement};
pub use crate::metadata::requires_dist::RequiresDist;
mod lowering;
@ -77,6 +77,7 @@ impl Metadata {
},
install_path,
sources,
LowerBound::Warn,
)
.await?;

View File

@ -1,6 +1,7 @@
use crate::metadata::{LoweredRequirement, MetadataError};
use crate::Metadata;
use crate::metadata::lowering::LowerBound;
use std::collections::BTreeMap;
use std::path::Path;
use uv_configuration::SourceStrategy;
@ -38,6 +39,7 @@ impl RequiresDist {
metadata: uv_pypi_types::RequiresDist,
install_path: &Path,
sources: SourceStrategy,
lower_bound: LowerBound,
) -> Result<Self, MetadataError> {
// TODO(konsti): Limit discovery for Git checkouts to Git root.
// TODO(konsti): Cache workspace discovery.
@ -48,13 +50,14 @@ impl RequiresDist {
return Ok(Self::from_metadata23(metadata));
};
Self::from_project_workspace(metadata, &project_workspace, sources)
Self::from_project_workspace(metadata, &project_workspace, sources, lower_bound)
}
fn from_project_workspace(
metadata: uv_pypi_types::RequiresDist,
project_workspace: &ProjectWorkspace,
source_strategy: SourceStrategy,
lower_bound: LowerBound,
) -> Result<Self, MetadataError> {
// Collect any `tool.uv.sources` and `tool.uv.dev_dependencies` from `pyproject.toml`.
let empty = BTreeMap::default();
@ -92,6 +95,7 @@ impl RequiresDist {
project_workspace.project_root(),
sources,
project_workspace.workspace(),
lower_bound,
)
.map(move |requirement| match requirement {
Ok(requirement) => Ok(requirement.into_inner()),
@ -124,6 +128,7 @@ impl RequiresDist {
project_workspace.project_root(),
sources,
project_workspace.workspace(),
lower_bound,
)
.map(move |requirement| match requirement {
Ok(requirement) => Ok(requirement.into_inner()),
@ -170,6 +175,7 @@ mod test {
use uv_workspace::pyproject::PyProjectToml;
use uv_workspace::{DiscoveryOptions, ProjectWorkspace};
use crate::metadata::lowering::LowerBound;
use crate::RequiresDist;
async fn requires_dist_from_pyproject_toml(contents: &str) -> anyhow::Result<RequiresDist> {
@ -193,6 +199,7 @@ mod test {
requires_dist,
&project_workspace,
SourceStrategy::Enabled,
LowerBound::Warn,
)?)
}

View File

@ -11,7 +11,7 @@ use crate::metadata::{ArchiveMetadata, Metadata};
use crate::reporter::Facade;
use crate::source::built_wheel_metadata::BuiltWheelMetadata;
use crate::source::revision::Revision;
use crate::{Reporter, RequiresDist};
use crate::{LowerBound, Reporter, RequiresDist};
use fs_err::tokio as fs;
use futures::{FutureExt, TryStreamExt};
use reqwest::Response;
@ -24,7 +24,7 @@ use uv_cache_key::cache_digest;
use uv_client::{
CacheControl, CachedClientError, Connectivity, DataWithCachePolicy, RegistryClient,
};
use uv_configuration::{BuildKind, BuildOutput};
use uv_configuration::{BuildKind, BuildOutput, SourceStrategy};
use uv_distribution_filename::{SourceDistExtension, WheelFilename};
use uv_distribution_types::{
BuildableSource, DirectorySourceUrl, FileLocation, GitSourceUrl, HashPolicy, Hashed,
@ -389,6 +389,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
requires_dist,
project_root,
self.build_context.sources(),
LowerBound::Warn,
)
.await?;
Ok(requires_dist)
@ -465,7 +466,13 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
// Build the source distribution.
let (disk_filename, wheel_filename, metadata) = self
.build_distribution(source, source_dist_entry.path(), subdirectory, &cache_shard)
.build_distribution(
source,
source_dist_entry.path(),
subdirectory,
&cache_shard,
self.build_context.sources(),
)
.await?;
if let Some(task) = task {
@ -573,7 +580,12 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
// Otherwise, we either need to build the metadata.
// If the backend supports `prepare_metadata_for_build_wheel`, use it.
if let Some(metadata) = self
.build_metadata(source, source_dist_entry.path(), subdirectory)
.build_metadata(
source,
source_dist_entry.path(),
subdirectory,
self.build_context.sources(),
)
.boxed_local()
.await?
{
@ -598,7 +610,13 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
// Build the source distribution.
let (_disk_filename, _wheel_filename, metadata) = self
.build_distribution(source, source_dist_entry.path(), subdirectory, &cache_shard)
.build_distribution(
source,
source_dist_entry.path(),
subdirectory,
&cache_shard,
self.build_context.sources(),
)
.await?;
// Store the metadata.
@ -750,7 +768,13 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
.map(|reporter| reporter.on_build_start(source));
let (disk_filename, filename, metadata) = self
.build_distribution(source, source_entry.path(), None, &cache_shard)
.build_distribution(
source,
source_entry.path(),
None,
&cache_shard,
self.build_context.sources(),
)
.await?;
if let Some(task) = task {
@ -836,7 +860,12 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
// If the backend supports `prepare_metadata_for_build_wheel`, use it.
if let Some(metadata) = self
.build_metadata(source, source_entry.path(), None)
.build_metadata(
source,
source_entry.path(),
None,
self.build_context.sources(),
)
.boxed_local()
.await?
{
@ -869,7 +898,13 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
.map(|reporter| reporter.on_build_start(source));
let (_disk_filename, _filename, metadata) = self
.build_distribution(source, source_entry.path(), None, &cache_shard)
.build_distribution(
source,
source_entry.path(),
None,
&cache_shard,
self.build_context.sources(),
)
.await?;
if let Some(task) = task {
@ -998,7 +1033,13 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
.map(|reporter| reporter.on_build_start(source));
let (disk_filename, filename, metadata) = self
.build_distribution(source, &resource.install_path, None, &cache_shard)
.build_distribution(
source,
&resource.install_path,
None,
&cache_shard,
self.build_context.sources(),
)
.await?;
if let Some(task) = task {
@ -1087,7 +1128,12 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
// If the backend supports `prepare_metadata_for_build_wheel`, use it.
if let Some(metadata) = self
.build_metadata(source, &resource.install_path, None)
.build_metadata(
source,
&resource.install_path,
None,
self.build_context.sources(),
)
.boxed_local()
.await?
{
@ -1124,7 +1170,13 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
.map(|reporter| reporter.on_build_start(source));
let (_disk_filename, _filename, metadata) = self
.build_distribution(source, &resource.install_path, None, &cache_shard)
.build_distribution(
source,
&resource.install_path,
None,
&cache_shard,
self.build_context.sources(),
)
.await?;
if let Some(task) = task {
@ -1246,7 +1298,13 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
.map(|reporter| reporter.on_build_start(source));
let (disk_filename, filename, metadata) = self
.build_distribution(source, fetch.path(), resource.subdirectory, &cache_shard)
.build_distribution(
source,
fetch.path(),
resource.subdirectory,
&cache_shard,
self.build_context.sources(),
)
.await?;
if let Some(task) = task {
@ -1344,7 +1402,12 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
// If the backend supports `prepare_metadata_for_build_wheel`, use it.
if let Some(metadata) = self
.build_metadata(source, fetch.path(), resource.subdirectory)
.build_metadata(
source,
fetch.path(),
resource.subdirectory,
self.build_context.sources(),
)
.boxed_local()
.await?
{
@ -1376,7 +1439,13 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
.map(|reporter| reporter.on_build_start(source));
let (_disk_filename, _filename, metadata) = self
.build_distribution(source, fetch.path(), resource.subdirectory, &cache_shard)
.build_distribution(
source,
fetch.path(),
resource.subdirectory,
&cache_shard,
self.build_context.sources(),
)
.await?;
if let Some(task) = task {
@ -1584,6 +1653,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
source_root: &Path,
subdirectory: Option<&Path>,
cache_shard: &CacheShard,
source_strategy: SourceStrategy,
) -> Result<(String, WheelFilename, ResolutionMetadata), Error> {
debug!("Building: {source}");
@ -1611,6 +1681,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
subdirectory,
Some(source.to_string()),
source.as_dist(),
source_strategy,
if source.is_editable() {
BuildKind::Editable
} else {
@ -1642,6 +1713,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
source: &BuildableSource<'_>,
source_root: &Path,
subdirectory: Option<&Path>,
source_strategy: SourceStrategy,
) -> Result<Option<ResolutionMetadata>, Error> {
debug!("Preparing metadata for: {source}");
@ -1653,6 +1725,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
subdirectory,
Some(source.to_string()),
source.as_dist(),
source_strategy,
if source.is_editable() {
BuildKind::Editable
} else {

View File

@ -108,6 +108,7 @@ pub trait BuildContext {
subdirectory: Option<&'a Path>,
version_id: Option<String>,
dist: Option<&'a SourceDist>,
sources: SourceStrategy,
build_kind: BuildKind,
build_output: BuildOutput,
) -> impl Future<Output = Result<Self::SourceDistBuilder>> + 'a;

View File

@ -150,7 +150,7 @@ async fn build_impl(
let src = std::path::absolute(src)?;
let metadata = match fs_err::tokio::metadata(&src).await {
Ok(metadata) => metadata,
Err(err) if err.kind() == std::io::ErrorKind::NotFound => {
Err(err) if err.kind() == io::ErrorKind::NotFound => {
return Err(anyhow::anyhow!(
"Source `{}` does not exist",
src.user_display()
@ -559,6 +559,7 @@ async fn build_package(
subdirectory,
version_id.map(ToString::to_string),
dist,
sources,
BuildKind::Sdist,
build_output,
)
@ -596,6 +597,7 @@ async fn build_package(
subdirectory,
version_id.map(ToString::to_string),
dist,
sources,
BuildKind::Wheel,
build_output,
)
@ -617,6 +619,7 @@ async fn build_package(
subdirectory,
version_id.map(ToString::to_string),
dist,
sources,
BuildKind::Sdist,
build_output,
)
@ -638,6 +641,7 @@ async fn build_package(
subdirectory,
version_id.map(ToString::to_string),
dist,
sources,
BuildKind::Wheel,
build_output,
)
@ -658,6 +662,7 @@ async fn build_package(
subdirectory,
version_id.map(ToString::to_string),
dist,
sources,
BuildKind::Sdist,
build_output,
)
@ -675,6 +680,7 @@ async fn build_package(
subdirectory,
version_id.map(ToString::to_string),
dist,
sources,
BuildKind::Wheel,
build_output,
)
@ -714,6 +720,7 @@ async fn build_package(
subdirectory,
version_id.map(ToString::to_string),
dist,
sources,
BuildKind::Wheel,
build_output,
)

View File

@ -1,6 +1,7 @@
use anyhow::Result;
use assert_cmd::prelude::*;
use assert_fs::{fixture::ChildPath, prelude::*};
use indoc::indoc;
use insta::assert_snapshot;
use predicates::prelude::predicate;
@ -2722,6 +2723,169 @@ fn sync_dynamic_extra() -> Result<()> {
Ok(())
}
#[test]
fn build_system_requires_workspace() -> Result<()> {
let context = TestContext::new("3.12");
let build = context.temp_dir.child("backend");
build.child("pyproject.toml").write_str(
r#"
[project]
name = "backend"
version = "0.1.0"
requires-python = ">=3.12"
dependencies = ["typing-extensions>=3.10"]
[build-system]
requires = ["setuptools>=42"]
build-backend = "setuptools.build_meta"
"#,
)?;
build
.child("src")
.child("backend")
.child("__init__.py")
.write_str(indoc! { r#"
def hello() -> str:
return "Hello, world!"
"#})?;
build.child("README.md").touch()?;
let pyproject_toml = context.temp_dir.child("project").child("pyproject.toml");
pyproject_toml.write_str(
r#"
[project]
name = "project"
version = "0.1.0"
requires-python = ">=3.12"
dependencies = ["iniconfig>1"]
[build-system]
requires = ["setuptools>=42", "backend==0.1.0"]
build-backend = "setuptools.build_meta"
[tool.uv.workspace]
members = ["../backend"]
[tool.uv.sources]
backend = { workspace = true }
"#,
)?;
context
.temp_dir
.child("project")
.child("setup.py")
.write_str(indoc! {r"
from setuptools import setup
from backend import hello
hello()
setup()
",
})?;
uv_snapshot!(context.filters(), context.sync().current_dir(context.temp_dir.child("project")), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Using CPython 3.12.[X] interpreter at: [PYTHON-3.12]
Creating virtual environment at: .venv
Resolved 4 packages in [TIME]
Prepared 2 packages in [TIME]
Installed 2 packages in [TIME]
+ iniconfig==2.0.0
+ project==0.1.0 (from file://[TEMP_DIR]/project)
"###);
Ok(())
}
#[test]
fn build_system_requires_path() -> Result<()> {
let context = TestContext::new("3.12");
let build = context.temp_dir.child("backend");
build.child("pyproject.toml").write_str(
r#"
[project]
name = "backend"
version = "0.1.0"
requires-python = ">=3.12"
dependencies = ["typing-extensions>=3.10"]
[build-system]
requires = ["setuptools>=42"]
build-backend = "setuptools.build_meta"
"#,
)?;
build
.child("src")
.child("backend")
.child("__init__.py")
.write_str(indoc! { r#"
def hello() -> str:
return "Hello, world!"
"#})?;
build.child("README.md").touch()?;
let pyproject_toml = context.temp_dir.child("project").child("pyproject.toml");
pyproject_toml.write_str(
r#"
[project]
name = "project"
version = "0.1.0"
requires-python = ">=3.12"
dependencies = ["iniconfig>1"]
[build-system]
requires = ["setuptools>=42", "backend==0.1.0"]
build-backend = "setuptools.build_meta"
[tool.uv.sources]
backend = { path = "../backend" }
"#,
)?;
context
.temp_dir
.child("project")
.child("setup.py")
.write_str(indoc! {r"
from setuptools import setup
from backend import hello
hello()
setup()
",
})?;
uv_snapshot!(context.filters(), context.sync().current_dir(context.temp_dir.child("project")), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Using CPython 3.12.[X] interpreter at: [PYTHON-3.12]
Creating virtual environment at: .venv
Resolved 2 packages in [TIME]
Prepared 2 packages in [TIME]
Installed 2 packages in [TIME]
+ iniconfig==2.0.0
+ project==0.1.0 (from file://[TEMP_DIR]/project)
"###);
Ok(())
}
#[test]
fn sync_invalid_environment() -> Result<()> {
let context = TestContext::new_with_versions(&["3.11", "3.12"])

View File

@ -323,6 +323,73 @@ To add a development dependency, include the `--dev` flag:
$ uv add ruff --dev
```
## Build dependencies
If a project is structured as [Python package](./projects.md#build-systems), it may declare
dependencies that are required to build the project, but not required to run it. These dependencies
are specified in the `[build-system]` table under `build-system.requires`, following
[PEP 518](https://peps.python.org/pep-0518/).
For example, if a project uses `setuptools` as its build backend, it should declare `setuptools` as
a build dependency:
```toml title="pyproject.toml"
[project]
name = "pandas"
version = "0.1.0"
[build-system]
requires = ["setuptools>=42"]
build-backend = "setuptools.build_meta"
```
By default, uv will respect `tool.uv.sources` when resolving build dependencies. For example, to use
a local version of `setuptools` for building, add the source to `tool.uv.sources`:
```toml title="pyproject.toml"
[project]
name = "pandas"
version = "0.1.0"
[build-system]
requires = ["setuptools>=42"]
build-backend = "setuptools.build_meta"
[tool.uv.sources]
setuptools = { path = "./packages/setuptools" }
```
When publishing a package, we recommend running `uv build --no-sources` to ensure that the package
builds correctly when `tool.uv.sources` is disabled, as is the case when using other build tools,
like [`pypa/build`](https://github.com/pypa/build).
## Editable dependencies
A regular installation of a directory with a Python package first builds a wheel and then installs
that wheel into your virtual environment, copying all source files. When the package source files
are edited, the virtual environment will contain outdated versions.
Editable installations solve this problem by adding a link to the project within the virtual
environment (a `.pth` file), which instructs the interpreter to include the source files directly.
There are some limitations to editables (mainly: the build backend needs to support them, and native
modules aren't recompiled before import), but they are useful for development, as the virtual
environment will always use the latest changes to the package.
uv uses editable installation for workspace packages by default.
To add an editable dependency, use the `--editable` flag:
```console
$ uv add --editable ./path/foo
```
Or, to opt-out of using an editable dependency in a workspace:
```console
$ uv add --no-editable ./path/foo
```
## PEP 508
[PEP 508](https://peps.python.org/pep-0508/) defines a syntax for dependency specification. It is
@ -355,30 +422,3 @@ Markers are combined with `and`, `or`, and parentheses, e.g.,
`aiohttp >=3.7.4,<4; (sys_platform != 'win32' or implementation_name != 'pypy') and python_version >= '3.10'`.
Note that versions within markers must be quoted, while versions _outside_ of markers must _not_ be
quoted.
## Editable dependencies
A regular installation of a directory with a Python package first builds a wheel and then installs
that wheel into your virtual environment, copying all source files. When the package source files
are edited, the virtual environment will contain outdated versions.
Editable installations solve this problem by adding a link to the project within the virtual
environment (a `.pth` file), which instructs the interpreter to include the source files directly.
There are some limitations to editables (mainly: the build backend needs to support them, and native
modules aren't recompiled before import), but they are useful for development, as the virtual
environment will always use the latest changes to the package.
uv uses editable installation for workspace packages by default.
To add an editable dependency, use the `--editable` flag:
```console
$ uv add --editable ./path/foo
```
Or, to opt-out of using an editable dependency in a workspace:
```console
$ uv add --no-editable ./path/foo
```

View File

@ -27,6 +27,13 @@ artifacts in a `dist/` subdirectory.
Alternatively, `uv build <SRC>` will build the package in the specified directory, while
`uv build --package <PACKAGE>` will build the specified package within the current workspace.
!!! info
By default, `uv build` respects `tool.uv.sources` when resolving build dependencies from the
`build-system.requires` section of the `pyproject.toml`. When publishing a package, we recommend
running `uv build --no-sources` to ensure that the package builds correctly when `tool.uv.sources`
is disabled, as is the case when using other build tools, like [`pypa/build`](https://github.com/pypa/build).
## Publishing your package
Publish your package with `uv publish`: