Support requirements without an extension (#16923)

Duplicate of #16889 since I merged it without realizing it was stacked.

Co-authored-by: Charlie Marsh <charlie.r.marsh@gmail.com>
This commit is contained in:
Zanie Blue 2025-12-02 05:02:24 -06:00 committed by GitHub
parent 54f9932362
commit 5947fb0c83
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
8 changed files with 379 additions and 328 deletions

View File

@ -47,7 +47,7 @@ use url::Url;
#[cfg(feature = "http")]
use uv_client::BaseClient;
use uv_client::BaseClientBuilder;
use uv_client::{BaseClientBuilder, Connectivity};
use uv_configuration::{NoBinary, NoBuild, PackageNameSpecifier};
use uv_distribution_types::{
Requirement, UnresolvedRequirement, UnresolvedRequirementSpecification,
@ -165,7 +165,7 @@ pub struct RequirementsTxt {
}
impl RequirementsTxt {
/// See module level documentation
/// See module level documentation.
#[instrument(
skip_all,
fields(requirements_txt = requirements_txt.as_ref().as_os_str().to_str())
@ -173,12 +173,11 @@ impl RequirementsTxt {
pub async fn parse(
requirements_txt: impl AsRef<Path>,
working_dir: impl AsRef<Path>,
client_builder: &BaseClientBuilder<'_>,
) -> Result<Self, RequirementsTxtFileError> {
Self::parse_with_cache(
requirements_txt,
working_dir,
client_builder,
&BaseClientBuilder::default().connectivity(Connectivity::Offline),
&mut SourceCache::default(),
)
.await
@ -209,6 +208,40 @@ impl RequirementsTxt {
.await
}
/// Parse requirements from a string, using the given path for error messages and resolving
/// relative paths.
pub async fn parse_str(
content: &str,
requirements_txt: impl AsRef<Path>,
working_dir: impl AsRef<Path>,
client_builder: &BaseClientBuilder<'_>,
source_contents: &mut SourceCache,
) -> Result<Self, RequirementsTxtFileError> {
let requirements_txt = requirements_txt.as_ref();
let working_dir = working_dir.as_ref();
let requirements_dir = requirements_txt.parent().unwrap_or(working_dir);
let mut visited = VisitedFiles::Requirements {
requirements: &mut FxHashSet::default(),
constraints: &mut FxHashSet::default(),
};
Self::parse_inner(
content,
working_dir,
requirements_dir,
client_builder,
requirements_txt,
&mut visited,
source_contents,
)
.await
.map_err(|err| RequirementsTxtFileError {
file: requirements_txt.to_path_buf(),
error: err,
})
}
/// See module level documentation
#[instrument(
skip_all,
@ -1509,7 +1542,6 @@ mod test {
use test_case::test_case;
use unscanny::Scanner;
use uv_client::BaseClientBuilder;
use uv_fs::Simplified;
use crate::{RequirementsTxt, calculate_row_column};
@ -1547,13 +1579,9 @@ mod test {
let working_dir = workspace_test_data_dir().join("requirements-txt");
let requirements_txt = working_dir.join(path);
let actual = RequirementsTxt::parse(
requirements_txt.clone(),
&working_dir,
&BaseClientBuilder::default(),
)
.await
.unwrap();
let actual = RequirementsTxt::parse(requirements_txt.clone(), &working_dir)
.await
.unwrap();
let snapshot = format!("parse-{}", path.to_string_lossy());
@ -1599,13 +1627,9 @@ mod test {
let requirements_txt = temp_dir.path().join(path);
fs::write(&requirements_txt, contents).unwrap();
let actual = RequirementsTxt::parse(
&requirements_txt,
&working_dir,
&BaseClientBuilder::default(),
)
.await
.unwrap();
let actual = RequirementsTxt::parse(&requirements_txt, &working_dir)
.await
.unwrap();
let snapshot = format!("line-endings-{}", path.to_string_lossy());
@ -1624,13 +1648,9 @@ mod test {
let working_dir = workspace_test_data_dir().join("requirements-txt");
let requirements_txt = working_dir.join(path);
let actual = RequirementsTxt::parse(
requirements_txt,
&working_dir,
&BaseClientBuilder::default(),
)
.await
.unwrap();
let actual = RequirementsTxt::parse(requirements_txt, &working_dir)
.await
.unwrap();
let snapshot = format!("parse-unix-{}", path.to_string_lossy());
@ -1649,13 +1669,9 @@ mod test {
let working_dir = workspace_test_data_dir().join("requirements-txt");
let requirements_txt = working_dir.join(path);
let actual = RequirementsTxt::parse(
requirements_txt,
&working_dir,
&BaseClientBuilder::default(),
)
.await
.unwrap_err();
let actual = RequirementsTxt::parse(requirements_txt, &working_dir)
.await
.unwrap_err();
let snapshot = format!("parse-unix-{}", path.to_string_lossy());
@ -1674,13 +1690,9 @@ mod test {
let working_dir = workspace_test_data_dir().join("requirements-txt");
let requirements_txt = working_dir.join(path);
let actual = RequirementsTxt::parse(
requirements_txt,
&working_dir,
&BaseClientBuilder::default(),
)
.await
.unwrap();
let actual = RequirementsTxt::parse(requirements_txt, &working_dir)
.await
.unwrap();
let snapshot = format!("parse-windows-{}", path.to_string_lossy());
@ -1700,13 +1712,9 @@ mod test {
-r missing.txt
"})?;
let error = RequirementsTxt::parse(
requirements_txt.path(),
temp_dir.path(),
&BaseClientBuilder::default(),
)
.await
.unwrap_err();
let error = RequirementsTxt::parse(requirements_txt.path(), temp_dir.path())
.await
.unwrap_err();
let errors = anyhow::Error::new(error)
.chain()
// The last error is operating-system specific.
@ -1745,13 +1753,9 @@ mod test {
numpy[ö]==1.29
"})?;
let error = RequirementsTxt::parse(
requirements_txt.path(),
temp_dir.path(),
&BaseClientBuilder::default(),
)
.await
.unwrap_err();
let error = RequirementsTxt::parse(requirements_txt.path(), temp_dir.path())
.await
.unwrap_err();
let errors = anyhow::Error::new(error).chain().join("\n");
let requirement_txt = regex::escape(&requirements_txt.path().user_display().to_string());
@ -1778,13 +1782,9 @@ mod test {
numpy @ https:///
"})?;
let error = RequirementsTxt::parse(
requirements_txt.path(),
temp_dir.path(),
&BaseClientBuilder::default(),
)
.await
.unwrap_err();
let error = RequirementsTxt::parse(requirements_txt.path(), temp_dir.path())
.await
.unwrap_err();
let errors = anyhow::Error::new(error).chain().join("\n");
let requirement_txt = regex::escape(&requirements_txt.path().user_display().to_string());
@ -1811,13 +1811,9 @@ mod test {
-e https://localhost:8080/
"})?;
let error = RequirementsTxt::parse(
requirements_txt.path(),
temp_dir.path(),
&BaseClientBuilder::default(),
)
.await
.unwrap_err();
let error = RequirementsTxt::parse(requirements_txt.path(), temp_dir.path())
.await
.unwrap_err();
let errors = anyhow::Error::new(error).chain().join("\n");
let requirement_txt = regex::escape(&requirements_txt.path().user_display().to_string());
@ -1844,13 +1840,9 @@ mod test {
-e https://files.pythonhosted.org/packages/f7/69/96766da2cdb5605e6a31ef2734aff0be17901cefb385b885c2ab88896d76/ruff-0.5.6.tar.gz
"})?;
let error = RequirementsTxt::parse(
requirements_txt.path(),
temp_dir.path(),
&BaseClientBuilder::default(),
)
.await
.unwrap_err();
let error = RequirementsTxt::parse(requirements_txt.path(), temp_dir.path())
.await
.unwrap_err();
let errors = anyhow::Error::new(error).chain().join("\n");
let requirement_txt = regex::escape(&requirements_txt.path().user_display().to_string());
@ -1875,13 +1867,9 @@ mod test {
-e black[,abcdef]
"})?;
let error = RequirementsTxt::parse(
requirements_txt.path(),
temp_dir.path(),
&BaseClientBuilder::default(),
)
.await
.unwrap_err();
let error = RequirementsTxt::parse(requirements_txt.path(), temp_dir.path())
.await
.unwrap_err();
let errors = anyhow::Error::new(error).chain().join("\n");
let requirement_txt = regex::escape(&requirements_txt.path().user_display().to_string());
@ -1908,13 +1896,9 @@ mod test {
--index-url 123
"})?;
let error = RequirementsTxt::parse(
requirements_txt.path(),
temp_dir.path(),
&BaseClientBuilder::default(),
)
.await
.unwrap_err();
let error = RequirementsTxt::parse(requirements_txt.path(), temp_dir.path())
.await
.unwrap_err();
let errors = anyhow::Error::new(error).chain().join("\n");
let requirement_txt = regex::escape(&requirements_txt.path().user_display().to_string());
@ -1939,13 +1923,9 @@ mod test {
--index-url https:////
"})?;
let error = RequirementsTxt::parse(
requirements_txt.path(),
temp_dir.path(),
&BaseClientBuilder::default(),
)
.await
.unwrap_err();
let error = RequirementsTxt::parse(requirements_txt.path(), temp_dir.path())
.await
.unwrap_err();
let errors = anyhow::Error::new(error).chain().join("\n");
let requirement_txt = regex::escape(&requirements_txt.path().user_display().to_string());
@ -1971,13 +1951,9 @@ mod test {
--no-binary
"})?;
let error = RequirementsTxt::parse(
requirements_txt.path(),
temp_dir.path(),
&BaseClientBuilder::default(),
)
.await
.unwrap_err();
let error = RequirementsTxt::parse(requirements_txt.path(), temp_dir.path())
.await
.unwrap_err();
let errors = anyhow::Error::new(error).chain().join("\n");
let requirement_txt = regex::escape(&requirements_txt.path().user_display().to_string());
@ -2004,13 +1980,9 @@ mod test {
file.txt
"})?;
let error = RequirementsTxt::parse(
requirements_txt.path(),
temp_dir.path(),
&BaseClientBuilder::default(),
)
.await
.unwrap_err();
let error = RequirementsTxt::parse(requirements_txt.path(), temp_dir.path())
.await
.unwrap_err();
let errors = anyhow::Error::new(error).chain().join("\n");
let requirement_txt = regex::escape(&requirements_txt.path().user_display().to_string());
@ -2047,13 +2019,9 @@ mod test {
-r subdir/child.txt
"})?;
let requirements = RequirementsTxt::parse(
parent_txt.path(),
temp_dir.path(),
&BaseClientBuilder::default(),
)
.await
.unwrap();
let requirements = RequirementsTxt::parse(parent_txt.path(), temp_dir.path())
.await
.unwrap();
insta::with_settings!({
filters => path_filters(&path_filter(temp_dir.path())),
@ -2111,13 +2079,9 @@ mod test {
--no-binary flask
"})?;
let requirements = RequirementsTxt::parse(
requirements_txt.path(),
temp_dir.path(),
&BaseClientBuilder::default(),
)
.await
.unwrap();
let requirements = RequirementsTxt::parse(requirements_txt.path(), temp_dir.path())
.await
.unwrap();
insta::with_settings!({
filters => path_filters(&path_filter(temp_dir.path())),
@ -2186,13 +2150,9 @@ mod test {
--no-index
"})?;
let requirements = RequirementsTxt::parse(
requirements_txt.path(),
temp_dir.path(),
&BaseClientBuilder::default(),
)
.await
.unwrap();
let requirements = RequirementsTxt::parse(requirements_txt.path(), temp_dir.path())
.await
.unwrap();
insta::with_settings!({
filters => path_filters(&path_filter(temp_dir.path())),
@ -2288,13 +2248,9 @@ mod test {
--index-url https://fake.pypi.org/simple
"})?;
let error = RequirementsTxt::parse(
requirements_txt.path(),
temp_dir.path(),
&BaseClientBuilder::default(),
)
.await
.unwrap_err();
let error = RequirementsTxt::parse(requirements_txt.path(), temp_dir.path())
.await
.unwrap_err();
let errors = anyhow::Error::new(error).chain().join("\n");
let requirement_txt = regex::escape(&requirements_txt.path().user_display().to_string());
@ -2336,13 +2292,9 @@ mod test {
httpx # comment
"})?;
let requirements = RequirementsTxt::parse(
requirements_txt.path(),
temp_dir.path(),
&BaseClientBuilder::default(),
)
.await
.unwrap();
let requirements = RequirementsTxt::parse(requirements_txt.path(), temp_dir.path())
.await
.unwrap();
insta::with_settings!({
filters => path_filters(&path_filter(temp_dir.path())),
@ -2551,13 +2503,9 @@ mod test {
importlib_metadata-8.2.0+local-py3-none-any.whl[extra]
"})?;
let requirements = RequirementsTxt::parse(
requirements_txt.path(),
temp_dir.path(),
&BaseClientBuilder::default(),
)
.await
.unwrap();
let requirements = RequirementsTxt::parse(requirements_txt.path(), temp_dir.path())
.await
.unwrap();
insta::with_settings!({
filters => path_filters(&path_filter(temp_dir.path())),
@ -2893,13 +2841,9 @@ mod test {
tqdm
"})?;
let error = RequirementsTxt::parse(
requirements_txt.path(),
temp_dir.path(),
&BaseClientBuilder::default(),
)
.await
.unwrap_err();
let error = RequirementsTxt::parse(requirements_txt.path(), temp_dir.path())
.await
.unwrap_err();
let errors = anyhow::Error::new(error).chain().join("\n");
let requirement_txt = regex::escape(&requirements_txt.path().user_display().to_string());
@ -2995,12 +2939,7 @@ mod test {
-c constraints-only-recursive.txt
"})?;
let parsed = RequirementsTxt::parse(
&requirements,
temp_dir.path(),
&BaseClientBuilder::default(),
)
.await?;
let parsed = RequirementsTxt::parse(&requirements, temp_dir.path()).await?;
let requirements: BTreeSet<String> = parsed
.requirements

View File

@ -27,6 +27,9 @@ pub enum RequirementsSource {
SetupCfg(PathBuf),
/// Dependencies were provided via an unsupported Conda `environment.yml` file (e.g., `pip install -r environment.yml`).
EnvironmentYml(PathBuf),
/// An extensionless file that could be either a PEP 723 script or a requirements.txt file.
/// We detect the format when reading the file.
Extensionless(PathBuf),
}
impl RequirementsSource {
@ -50,7 +53,6 @@ impl RequirementsSource {
.extension()
.is_some_and(|ext| ext.eq_ignore_ascii_case("py") || ext.eq_ignore_ascii_case("pyw"))
{
// TODO(blueraft): Support scripts without an extension.
Ok(Self::Pep723Script(path))
} else if path
.extension()
@ -60,6 +62,17 @@ impl RequirementsSource {
"`{}` is not a valid PEP 751 filename: expected TOML file to start with `pylock.` and end with `.toml` (e.g., `pylock.toml`, `pylock.dev.toml`)",
path.user_display(),
))
} else if path
.extension()
.is_some_and(|ext| ext.eq_ignore_ascii_case("txt") || ext.eq_ignore_ascii_case("in"))
|| path.starts_with("http://")
|| path.starts_with("https://")
{
Ok(Self::RequirementsTxt(path))
} else if path.extension().is_none() {
// If we don't have an extension, mark it as extensionless so we can detect
// the format later (either a PEP 723 script or a requirements.txt file).
Ok(Self::Extensionless(path))
} else {
Ok(Self::RequirementsTxt(path))
}
@ -302,7 +315,8 @@ impl std::fmt::Display for RequirementsSource {
| Self::PyprojectToml(path)
| Self::SetupPy(path)
| Self::SetupCfg(path)
| Self::EnvironmentYml(path) => {
| Self::EnvironmentYml(path)
| Self::Extensionless(path) => {
write!(f, "{}", path.simplified_display())
}
}

View File

@ -46,7 +46,7 @@ use uv_fs::{CWD, Simplified};
use uv_normalize::{ExtraName, PackageName, PipGroupName};
use uv_pypi_types::PyProjectToml;
use uv_requirements_txt::{RequirementsTxt, RequirementsTxtRequirement, SourceCache};
use uv_scripts::{Pep723Error, Pep723Item, Pep723Script};
use uv_scripts::{Pep723Error, Pep723Metadata, Pep723Script};
use uv_warnings::warn_user;
use crate::{RequirementsSource, SourceTree};
@ -95,6 +95,142 @@ impl RequirementsSpecification {
Self::from_source_with_cache(source, client_builder, &mut SourceCache::default()).await
}
/// Create a [`RequirementsSpecification`] from PEP 723 script metadata.
fn from_pep723_metadata(metadata: &Pep723Metadata) -> Self {
let requirements = metadata
.dependencies
.as_ref()
.map(|dependencies| {
dependencies
.iter()
.map(|dependency| {
UnresolvedRequirementSpecification::from(Requirement::from(
dependency.to_owned(),
))
})
.collect::<Vec<UnresolvedRequirementSpecification>>()
})
.unwrap_or_default();
if let Some(tool_uv) = metadata.tool.as_ref().and_then(|tool| tool.uv.as_ref()) {
let constraints = tool_uv
.constraint_dependencies
.as_ref()
.map(|dependencies| {
dependencies
.iter()
.map(|dependency| {
NameRequirementSpecification::from(Requirement::from(
dependency.to_owned(),
))
})
.collect::<Vec<NameRequirementSpecification>>()
})
.unwrap_or_default();
let overrides = tool_uv
.override_dependencies
.as_ref()
.map(|dependencies| {
dependencies
.iter()
.map(|dependency| {
UnresolvedRequirementSpecification::from(Requirement::from(
dependency.to_owned(),
))
})
.collect::<Vec<UnresolvedRequirementSpecification>>()
})
.unwrap_or_default();
Self {
requirements,
constraints,
overrides,
index_url: tool_uv
.top_level
.index_url
.as_ref()
.map(|index| Index::from(index.clone()).url),
extra_index_urls: tool_uv
.top_level
.extra_index_url
.as_ref()
.into_iter()
.flat_map(|urls| urls.iter().map(|index| Index::from(index.clone()).url))
.collect(),
no_index: tool_uv.top_level.no_index.unwrap_or_default(),
find_links: tool_uv
.top_level
.find_links
.as_ref()
.into_iter()
.flat_map(|urls| urls.iter().map(|index| Index::from(index.clone()).url))
.collect(),
no_binary: NoBinary::from_args(
tool_uv.top_level.no_binary,
tool_uv
.top_level
.no_binary_package
.clone()
.unwrap_or_default(),
),
no_build: NoBuild::from_args(
tool_uv.top_level.no_build,
tool_uv
.top_level
.no_build_package
.clone()
.unwrap_or_default(),
),
..Self::default()
}
} else {
Self {
requirements,
..Self::default()
}
}
}
/// Create a [`RequirementsSpecification`] from a parsed `requirements.txt` file.
fn from_requirements_txt(requirements_txt: RequirementsTxt) -> Self {
Self {
requirements: requirements_txt
.requirements
.into_iter()
.map(UnresolvedRequirementSpecification::from)
.chain(
requirements_txt
.editables
.into_iter()
.map(UnresolvedRequirementSpecification::from),
)
.collect(),
constraints: requirements_txt
.constraints
.into_iter()
.map(Requirement::from)
.map(NameRequirementSpecification::from)
.collect(),
index_url: requirements_txt.index_url.map(IndexUrl::from),
extra_index_urls: requirements_txt
.extra_index_urls
.into_iter()
.map(IndexUrl::from)
.collect(),
no_index: requirements_txt.no_index,
find_links: requirements_txt
.find_links
.into_iter()
.map(IndexUrl::from)
.collect(),
no_binary: requirements_txt.no_binary,
no_build: requirements_txt.only_binary,
..Self::default()
}
}
/// Read the requirements and constraints from a source, using a cache for file contents.
#[instrument(skip_all, level = tracing::Level::DEBUG, fields(source = % source))]
pub async fn from_source_with_cache(
@ -116,11 +252,7 @@ impl RequirementsSpecification {
..Self::default()
},
RequirementsSource::RequirementsTxt(path) => {
if !(path == Path::new("-")
|| path.starts_with("http://")
|| path.starts_with("https://")
|| path.exists())
{
if !(path.starts_with("http://") || path.starts_with("https://") || path.exists()) {
return Err(anyhow::anyhow!("File not found: `{}`", path.user_display()));
}
@ -128,50 +260,13 @@ impl RequirementsSpecification {
RequirementsTxt::parse_with_cache(path, &*CWD, client_builder, cache).await?;
if requirements_txt == RequirementsTxt::default() {
if path == Path::new("-") {
warn_user!("No dependencies found in stdin");
} else {
warn_user!(
"Requirements file `{}` does not contain any dependencies",
path.user_display()
);
}
warn_user!(
"Requirements file `{}` does not contain any dependencies",
path.user_display()
);
}
Self {
requirements: requirements_txt
.requirements
.into_iter()
.map(UnresolvedRequirementSpecification::from)
.chain(
requirements_txt
.editables
.into_iter()
.map(UnresolvedRequirementSpecification::from),
)
.collect(),
constraints: requirements_txt
.constraints
.into_iter()
.map(Requirement::from)
.map(NameRequirementSpecification::from)
.collect(),
index_url: requirements_txt.index_url.map(IndexUrl::from),
extra_index_urls: requirements_txt
.extra_index_urls
.into_iter()
.map(IndexUrl::from)
.collect(),
no_index: requirements_txt.no_index,
find_links: requirements_txt
.find_links
.into_iter()
.map(IndexUrl::from)
.collect(),
no_binary: requirements_txt.no_binary,
no_build: requirements_txt.only_binary,
..Self::default()
}
Self::from_requirements_txt(requirements_txt)
}
RequirementsSource::PyprojectToml(path) => {
let contents = match fs_err::tokio::read_to_string(&path).await {
@ -197,7 +292,7 @@ impl RequirementsSpecification {
}
RequirementsSource::Pep723Script(path) => {
let script = match Pep723Script::read(&path).await {
Ok(Some(script)) => Pep723Item::Script(script),
Ok(Some(script)) => script,
Ok(None) => {
return Err(anyhow::anyhow!(
"`{}` does not contain inline script metadata",
@ -213,106 +308,7 @@ impl RequirementsSpecification {
Err(err) => return Err(err.into()),
};
let metadata = script.metadata();
let requirements = metadata
.dependencies
.as_ref()
.map(|dependencies| {
dependencies
.iter()
.map(|dependency| {
UnresolvedRequirementSpecification::from(Requirement::from(
dependency.to_owned(),
))
})
.collect::<Vec<UnresolvedRequirementSpecification>>()
})
.unwrap_or_default();
if let Some(tool_uv) = metadata.tool.as_ref().and_then(|tool| tool.uv.as_ref()) {
let constraints = tool_uv
.constraint_dependencies
.as_ref()
.map(|dependencies| {
dependencies
.iter()
.map(|dependency| {
NameRequirementSpecification::from(Requirement::from(
dependency.to_owned(),
))
})
.collect::<Vec<NameRequirementSpecification>>()
})
.unwrap_or_default();
let overrides = tool_uv
.override_dependencies
.as_ref()
.map(|dependencies| {
dependencies
.iter()
.map(|dependency| {
UnresolvedRequirementSpecification::from(Requirement::from(
dependency.to_owned(),
))
})
.collect::<Vec<UnresolvedRequirementSpecification>>()
})
.unwrap_or_default();
Self {
requirements,
constraints,
overrides,
index_url: tool_uv
.top_level
.index_url
.as_ref()
.map(|index| Index::from(index.clone()).url),
extra_index_urls: tool_uv
.top_level
.extra_index_url
.as_ref()
.into_iter()
.flat_map(|urls| {
urls.iter().map(|index| Index::from(index.clone()).url)
})
.collect(),
no_index: tool_uv.top_level.no_index.unwrap_or_default(),
find_links: tool_uv
.top_level
.find_links
.as_ref()
.into_iter()
.flat_map(|urls| {
urls.iter().map(|index| Index::from(index.clone()).url)
})
.collect(),
no_binary: NoBinary::from_args(
tool_uv.top_level.no_binary,
tool_uv
.top_level
.no_binary_package
.clone()
.unwrap_or_default(),
),
no_build: NoBuild::from_args(
tool_uv.top_level.no_build,
tool_uv
.top_level
.no_build_package
.clone()
.unwrap_or_default(),
),
..Self::default()
}
} else {
Self {
requirements,
..Self::default()
}
}
Self::from_pep723_metadata(&script.metadata)
}
RequirementsSource::SetupPy(path) => {
if !path.is_file() {
@ -350,6 +346,39 @@ impl RequirementsSpecification {
path.user_display()
));
}
RequirementsSource::Extensionless(path) => {
// Read the file content.
let content = if let Some(content) = cache.get(path.as_path()) {
content.clone()
} else {
let content = uv_fs::read_to_string_transcode(&path).await?;
cache.insert(path.clone(), content.clone());
content
};
// Detect if it's a PEP 723 script.
if let Some(metadata) = Pep723Metadata::parse(content.as_bytes())? {
Self::from_pep723_metadata(&metadata)
} else {
// If it's not a PEP 723 script, assume it's a `requirements.txt` file.
let requirements_txt =
RequirementsTxt::parse_str(&content, &path, &*CWD, client_builder, cache)
.await?;
if requirements_txt == RequirementsTxt::default() {
if path == Path::new("-") {
warn_user!("No dependencies found in stdin");
} else {
warn_user!(
"Requirements file `{}` does not contain any dependencies",
path.user_display()
);
}
}
Self::from_requirements_txt(requirements_txt)
}
}
})
}

View File

@ -2,7 +2,6 @@ use std::path::Path;
use anyhow::Result;
use uv_client::{BaseClientBuilder, Connectivity};
use uv_configuration::Upgrade;
use uv_fs::CWD;
use uv_git::ResolvedRepositoryReference;
@ -38,13 +37,7 @@ pub async fn read_requirements_txt(
}
// Parse the requirements from the lockfile.
let requirements_txt = RequirementsTxt::parse(
output_file,
&*CWD,
// Pseudo-client for reading local-only requirements.
&BaseClientBuilder::default().connectivity(Connectivity::Offline),
)
.await?;
let requirements_txt = RequirementsTxt::parse(output_file, &*CWD).await?;
// Map each entry in the lockfile to a preference.
let preferences = requirements_txt

View File

@ -138,6 +138,7 @@ pub(crate) async fn add(
RequirementsSource::Package(_)
| RequirementsSource::Editable(_)
| RequirementsSource::RequirementsTxt(_)
| RequirementsSource::Extensionless(_)
| RequirementsSource::EnvironmentYml(_) => {}
}
}

View File

@ -140,7 +140,7 @@ hint: If you are running a script with `{}` in the shebang, you may need to incl
RequirementsSource::SetupCfg(_) => {
bail!("Adding requirements from a `setup.cfg` is not supported in `uv run`");
}
RequirementsSource::RequirementsTxt(path) => {
RequirementsSource::Extensionless(path) => {
if path == Path::new("-") {
requirements_from_stdin = true;
}

View File

@ -7096,6 +7096,62 @@ fn add_script_without_metadata_table_with_docstring() -> Result<()> {
Ok(())
}
/// Add to a script without a `.py` extension.
#[test]
fn add_extensionless_script() -> Result<()> {
let context = TestContext::new("3.12");
let script = context.temp_dir.child("script");
script.write_str(indoc! {r#"
#!/usr/bin/env python3
# /// script
# requires-python = ">=3.12"
# dependencies = []
# ///
import requests
from rich.pretty import pprint
resp = requests.get("https://peps.python.org/api/peps.json")
data = resp.json()
pprint([(k, v["title"]) for k, v in data.items()][:10])
"#})?;
uv_snapshot!(context.filters(), context.add().args(["rich", "requests<3"]).arg("--script").arg("script"), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Updated `script`
"###);
let script_content = context.read("script");
insta::with_settings!({
filters => context.filters(),
}, {
assert_snapshot!(
script_content, @r###"
#!/usr/bin/env python3
# /// script
# requires-python = ">=3.12"
# dependencies = [
# "requests<3",
# "rich",
# ]
# ///
import requests
from rich.pretty import pprint
resp = requests.get("https://peps.python.org/api/peps.json")
data = resp.json()
pprint([(k, v["title"]) for k, v in data.items()][:10])
"###
);
});
Ok(())
}
/// Remove a dependency that is present in multiple places.
#[test]
fn remove_repeated() -> Result<()> {

View File

@ -2646,8 +2646,7 @@ fn tool_run_with_incompatible_build_constraints() -> Result<()> {
fn tool_run_with_dependencies_from_script() -> Result<()> {
let context = TestContext::new("3.12").with_filtered_counts();
let script = context.temp_dir.child("script.py");
script.write_str(indoc! {r#"
let script_contents = indoc! {r#"
# /// script
# requires-python = ">=3.11"
# dependencies = [
@ -2656,7 +2655,13 @@ fn tool_run_with_dependencies_from_script() -> Result<()> {
# ///
import anyio
"#})?;
"#};
let script = context.temp_dir.child("script.py");
script.write_str(script_contents)?;
let script_without_extension = context.temp_dir.child("script-no-ext");
script_without_extension.write_str(script_contents)?;
// script dependencies (anyio) are now installed.
uv_snapshot!(context.filters(), context.tool_run()
@ -2684,6 +2689,20 @@ fn tool_run_with_dependencies_from_script() -> Result<()> {
+ sniffio==1.3.1
");
uv_snapshot!(context.filters(), context.tool_run()
.arg("--with-requirements")
.arg("script-no-ext")
.arg("black")
.arg("script-no-ext")
.arg("-q"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved [N] packages in [TIME]
");
// Error when the script is not a valid PEP723 script.
let script = context.temp_dir.child("not_pep723_script.py");
script.write_str("import anyio")?;