mirror of https://github.com/astral-sh/uv
Support requirements without an extension (#16923)
Duplicate of #16889 since I merged it without realizing it was stacked. Co-authored-by: Charlie Marsh <charlie.r.marsh@gmail.com>
This commit is contained in:
parent
54f9932362
commit
5947fb0c83
|
|
@ -47,7 +47,7 @@ use url::Url;
|
||||||
|
|
||||||
#[cfg(feature = "http")]
|
#[cfg(feature = "http")]
|
||||||
use uv_client::BaseClient;
|
use uv_client::BaseClient;
|
||||||
use uv_client::BaseClientBuilder;
|
use uv_client::{BaseClientBuilder, Connectivity};
|
||||||
use uv_configuration::{NoBinary, NoBuild, PackageNameSpecifier};
|
use uv_configuration::{NoBinary, NoBuild, PackageNameSpecifier};
|
||||||
use uv_distribution_types::{
|
use uv_distribution_types::{
|
||||||
Requirement, UnresolvedRequirement, UnresolvedRequirementSpecification,
|
Requirement, UnresolvedRequirement, UnresolvedRequirementSpecification,
|
||||||
|
|
@ -165,7 +165,7 @@ pub struct RequirementsTxt {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl RequirementsTxt {
|
impl RequirementsTxt {
|
||||||
/// See module level documentation
|
/// See module level documentation.
|
||||||
#[instrument(
|
#[instrument(
|
||||||
skip_all,
|
skip_all,
|
||||||
fields(requirements_txt = requirements_txt.as_ref().as_os_str().to_str())
|
fields(requirements_txt = requirements_txt.as_ref().as_os_str().to_str())
|
||||||
|
|
@ -173,12 +173,11 @@ impl RequirementsTxt {
|
||||||
pub async fn parse(
|
pub async fn parse(
|
||||||
requirements_txt: impl AsRef<Path>,
|
requirements_txt: impl AsRef<Path>,
|
||||||
working_dir: impl AsRef<Path>,
|
working_dir: impl AsRef<Path>,
|
||||||
client_builder: &BaseClientBuilder<'_>,
|
|
||||||
) -> Result<Self, RequirementsTxtFileError> {
|
) -> Result<Self, RequirementsTxtFileError> {
|
||||||
Self::parse_with_cache(
|
Self::parse_with_cache(
|
||||||
requirements_txt,
|
requirements_txt,
|
||||||
working_dir,
|
working_dir,
|
||||||
client_builder,
|
&BaseClientBuilder::default().connectivity(Connectivity::Offline),
|
||||||
&mut SourceCache::default(),
|
&mut SourceCache::default(),
|
||||||
)
|
)
|
||||||
.await
|
.await
|
||||||
|
|
@ -209,6 +208,40 @@ impl RequirementsTxt {
|
||||||
.await
|
.await
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Parse requirements from a string, using the given path for error messages and resolving
|
||||||
|
/// relative paths.
|
||||||
|
pub async fn parse_str(
|
||||||
|
content: &str,
|
||||||
|
requirements_txt: impl AsRef<Path>,
|
||||||
|
working_dir: impl AsRef<Path>,
|
||||||
|
client_builder: &BaseClientBuilder<'_>,
|
||||||
|
source_contents: &mut SourceCache,
|
||||||
|
) -> Result<Self, RequirementsTxtFileError> {
|
||||||
|
let requirements_txt = requirements_txt.as_ref();
|
||||||
|
let working_dir = working_dir.as_ref();
|
||||||
|
let requirements_dir = requirements_txt.parent().unwrap_or(working_dir);
|
||||||
|
|
||||||
|
let mut visited = VisitedFiles::Requirements {
|
||||||
|
requirements: &mut FxHashSet::default(),
|
||||||
|
constraints: &mut FxHashSet::default(),
|
||||||
|
};
|
||||||
|
|
||||||
|
Self::parse_inner(
|
||||||
|
content,
|
||||||
|
working_dir,
|
||||||
|
requirements_dir,
|
||||||
|
client_builder,
|
||||||
|
requirements_txt,
|
||||||
|
&mut visited,
|
||||||
|
source_contents,
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
.map_err(|err| RequirementsTxtFileError {
|
||||||
|
file: requirements_txt.to_path_buf(),
|
||||||
|
error: err,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
/// See module level documentation
|
/// See module level documentation
|
||||||
#[instrument(
|
#[instrument(
|
||||||
skip_all,
|
skip_all,
|
||||||
|
|
@ -1509,7 +1542,6 @@ mod test {
|
||||||
use test_case::test_case;
|
use test_case::test_case;
|
||||||
use unscanny::Scanner;
|
use unscanny::Scanner;
|
||||||
|
|
||||||
use uv_client::BaseClientBuilder;
|
|
||||||
use uv_fs::Simplified;
|
use uv_fs::Simplified;
|
||||||
|
|
||||||
use crate::{RequirementsTxt, calculate_row_column};
|
use crate::{RequirementsTxt, calculate_row_column};
|
||||||
|
|
@ -1547,11 +1579,7 @@ mod test {
|
||||||
let working_dir = workspace_test_data_dir().join("requirements-txt");
|
let working_dir = workspace_test_data_dir().join("requirements-txt");
|
||||||
let requirements_txt = working_dir.join(path);
|
let requirements_txt = working_dir.join(path);
|
||||||
|
|
||||||
let actual = RequirementsTxt::parse(
|
let actual = RequirementsTxt::parse(requirements_txt.clone(), &working_dir)
|
||||||
requirements_txt.clone(),
|
|
||||||
&working_dir,
|
|
||||||
&BaseClientBuilder::default(),
|
|
||||||
)
|
|
||||||
.await
|
.await
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
|
|
@ -1599,11 +1627,7 @@ mod test {
|
||||||
let requirements_txt = temp_dir.path().join(path);
|
let requirements_txt = temp_dir.path().join(path);
|
||||||
fs::write(&requirements_txt, contents).unwrap();
|
fs::write(&requirements_txt, contents).unwrap();
|
||||||
|
|
||||||
let actual = RequirementsTxt::parse(
|
let actual = RequirementsTxt::parse(&requirements_txt, &working_dir)
|
||||||
&requirements_txt,
|
|
||||||
&working_dir,
|
|
||||||
&BaseClientBuilder::default(),
|
|
||||||
)
|
|
||||||
.await
|
.await
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
|
|
@ -1624,11 +1648,7 @@ mod test {
|
||||||
let working_dir = workspace_test_data_dir().join("requirements-txt");
|
let working_dir = workspace_test_data_dir().join("requirements-txt");
|
||||||
let requirements_txt = working_dir.join(path);
|
let requirements_txt = working_dir.join(path);
|
||||||
|
|
||||||
let actual = RequirementsTxt::parse(
|
let actual = RequirementsTxt::parse(requirements_txt, &working_dir)
|
||||||
requirements_txt,
|
|
||||||
&working_dir,
|
|
||||||
&BaseClientBuilder::default(),
|
|
||||||
)
|
|
||||||
.await
|
.await
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
|
|
@ -1649,11 +1669,7 @@ mod test {
|
||||||
let working_dir = workspace_test_data_dir().join("requirements-txt");
|
let working_dir = workspace_test_data_dir().join("requirements-txt");
|
||||||
let requirements_txt = working_dir.join(path);
|
let requirements_txt = working_dir.join(path);
|
||||||
|
|
||||||
let actual = RequirementsTxt::parse(
|
let actual = RequirementsTxt::parse(requirements_txt, &working_dir)
|
||||||
requirements_txt,
|
|
||||||
&working_dir,
|
|
||||||
&BaseClientBuilder::default(),
|
|
||||||
)
|
|
||||||
.await
|
.await
|
||||||
.unwrap_err();
|
.unwrap_err();
|
||||||
|
|
||||||
|
|
@ -1674,11 +1690,7 @@ mod test {
|
||||||
let working_dir = workspace_test_data_dir().join("requirements-txt");
|
let working_dir = workspace_test_data_dir().join("requirements-txt");
|
||||||
let requirements_txt = working_dir.join(path);
|
let requirements_txt = working_dir.join(path);
|
||||||
|
|
||||||
let actual = RequirementsTxt::parse(
|
let actual = RequirementsTxt::parse(requirements_txt, &working_dir)
|
||||||
requirements_txt,
|
|
||||||
&working_dir,
|
|
||||||
&BaseClientBuilder::default(),
|
|
||||||
)
|
|
||||||
.await
|
.await
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
|
|
@ -1700,11 +1712,7 @@ mod test {
|
||||||
-r missing.txt
|
-r missing.txt
|
||||||
"})?;
|
"})?;
|
||||||
|
|
||||||
let error = RequirementsTxt::parse(
|
let error = RequirementsTxt::parse(requirements_txt.path(), temp_dir.path())
|
||||||
requirements_txt.path(),
|
|
||||||
temp_dir.path(),
|
|
||||||
&BaseClientBuilder::default(),
|
|
||||||
)
|
|
||||||
.await
|
.await
|
||||||
.unwrap_err();
|
.unwrap_err();
|
||||||
let errors = anyhow::Error::new(error)
|
let errors = anyhow::Error::new(error)
|
||||||
|
|
@ -1745,11 +1753,7 @@ mod test {
|
||||||
numpy[ö]==1.29
|
numpy[ö]==1.29
|
||||||
"})?;
|
"})?;
|
||||||
|
|
||||||
let error = RequirementsTxt::parse(
|
let error = RequirementsTxt::parse(requirements_txt.path(), temp_dir.path())
|
||||||
requirements_txt.path(),
|
|
||||||
temp_dir.path(),
|
|
||||||
&BaseClientBuilder::default(),
|
|
||||||
)
|
|
||||||
.await
|
.await
|
||||||
.unwrap_err();
|
.unwrap_err();
|
||||||
let errors = anyhow::Error::new(error).chain().join("\n");
|
let errors = anyhow::Error::new(error).chain().join("\n");
|
||||||
|
|
@ -1778,11 +1782,7 @@ mod test {
|
||||||
numpy @ https:///
|
numpy @ https:///
|
||||||
"})?;
|
"})?;
|
||||||
|
|
||||||
let error = RequirementsTxt::parse(
|
let error = RequirementsTxt::parse(requirements_txt.path(), temp_dir.path())
|
||||||
requirements_txt.path(),
|
|
||||||
temp_dir.path(),
|
|
||||||
&BaseClientBuilder::default(),
|
|
||||||
)
|
|
||||||
.await
|
.await
|
||||||
.unwrap_err();
|
.unwrap_err();
|
||||||
let errors = anyhow::Error::new(error).chain().join("\n");
|
let errors = anyhow::Error::new(error).chain().join("\n");
|
||||||
|
|
@ -1811,11 +1811,7 @@ mod test {
|
||||||
-e https://localhost:8080/
|
-e https://localhost:8080/
|
||||||
"})?;
|
"})?;
|
||||||
|
|
||||||
let error = RequirementsTxt::parse(
|
let error = RequirementsTxt::parse(requirements_txt.path(), temp_dir.path())
|
||||||
requirements_txt.path(),
|
|
||||||
temp_dir.path(),
|
|
||||||
&BaseClientBuilder::default(),
|
|
||||||
)
|
|
||||||
.await
|
.await
|
||||||
.unwrap_err();
|
.unwrap_err();
|
||||||
let errors = anyhow::Error::new(error).chain().join("\n");
|
let errors = anyhow::Error::new(error).chain().join("\n");
|
||||||
|
|
@ -1844,11 +1840,7 @@ mod test {
|
||||||
-e https://files.pythonhosted.org/packages/f7/69/96766da2cdb5605e6a31ef2734aff0be17901cefb385b885c2ab88896d76/ruff-0.5.6.tar.gz
|
-e https://files.pythonhosted.org/packages/f7/69/96766da2cdb5605e6a31ef2734aff0be17901cefb385b885c2ab88896d76/ruff-0.5.6.tar.gz
|
||||||
"})?;
|
"})?;
|
||||||
|
|
||||||
let error = RequirementsTxt::parse(
|
let error = RequirementsTxt::parse(requirements_txt.path(), temp_dir.path())
|
||||||
requirements_txt.path(),
|
|
||||||
temp_dir.path(),
|
|
||||||
&BaseClientBuilder::default(),
|
|
||||||
)
|
|
||||||
.await
|
.await
|
||||||
.unwrap_err();
|
.unwrap_err();
|
||||||
let errors = anyhow::Error::new(error).chain().join("\n");
|
let errors = anyhow::Error::new(error).chain().join("\n");
|
||||||
|
|
@ -1875,11 +1867,7 @@ mod test {
|
||||||
-e black[,abcdef]
|
-e black[,abcdef]
|
||||||
"})?;
|
"})?;
|
||||||
|
|
||||||
let error = RequirementsTxt::parse(
|
let error = RequirementsTxt::parse(requirements_txt.path(), temp_dir.path())
|
||||||
requirements_txt.path(),
|
|
||||||
temp_dir.path(),
|
|
||||||
&BaseClientBuilder::default(),
|
|
||||||
)
|
|
||||||
.await
|
.await
|
||||||
.unwrap_err();
|
.unwrap_err();
|
||||||
let errors = anyhow::Error::new(error).chain().join("\n");
|
let errors = anyhow::Error::new(error).chain().join("\n");
|
||||||
|
|
@ -1908,11 +1896,7 @@ mod test {
|
||||||
--index-url 123
|
--index-url 123
|
||||||
"})?;
|
"})?;
|
||||||
|
|
||||||
let error = RequirementsTxt::parse(
|
let error = RequirementsTxt::parse(requirements_txt.path(), temp_dir.path())
|
||||||
requirements_txt.path(),
|
|
||||||
temp_dir.path(),
|
|
||||||
&BaseClientBuilder::default(),
|
|
||||||
)
|
|
||||||
.await
|
.await
|
||||||
.unwrap_err();
|
.unwrap_err();
|
||||||
let errors = anyhow::Error::new(error).chain().join("\n");
|
let errors = anyhow::Error::new(error).chain().join("\n");
|
||||||
|
|
@ -1939,11 +1923,7 @@ mod test {
|
||||||
--index-url https:////
|
--index-url https:////
|
||||||
"})?;
|
"})?;
|
||||||
|
|
||||||
let error = RequirementsTxt::parse(
|
let error = RequirementsTxt::parse(requirements_txt.path(), temp_dir.path())
|
||||||
requirements_txt.path(),
|
|
||||||
temp_dir.path(),
|
|
||||||
&BaseClientBuilder::default(),
|
|
||||||
)
|
|
||||||
.await
|
.await
|
||||||
.unwrap_err();
|
.unwrap_err();
|
||||||
let errors = anyhow::Error::new(error).chain().join("\n");
|
let errors = anyhow::Error::new(error).chain().join("\n");
|
||||||
|
|
@ -1971,11 +1951,7 @@ mod test {
|
||||||
--no-binary
|
--no-binary
|
||||||
"})?;
|
"})?;
|
||||||
|
|
||||||
let error = RequirementsTxt::parse(
|
let error = RequirementsTxt::parse(requirements_txt.path(), temp_dir.path())
|
||||||
requirements_txt.path(),
|
|
||||||
temp_dir.path(),
|
|
||||||
&BaseClientBuilder::default(),
|
|
||||||
)
|
|
||||||
.await
|
.await
|
||||||
.unwrap_err();
|
.unwrap_err();
|
||||||
let errors = anyhow::Error::new(error).chain().join("\n");
|
let errors = anyhow::Error::new(error).chain().join("\n");
|
||||||
|
|
@ -2004,11 +1980,7 @@ mod test {
|
||||||
file.txt
|
file.txt
|
||||||
"})?;
|
"})?;
|
||||||
|
|
||||||
let error = RequirementsTxt::parse(
|
let error = RequirementsTxt::parse(requirements_txt.path(), temp_dir.path())
|
||||||
requirements_txt.path(),
|
|
||||||
temp_dir.path(),
|
|
||||||
&BaseClientBuilder::default(),
|
|
||||||
)
|
|
||||||
.await
|
.await
|
||||||
.unwrap_err();
|
.unwrap_err();
|
||||||
let errors = anyhow::Error::new(error).chain().join("\n");
|
let errors = anyhow::Error::new(error).chain().join("\n");
|
||||||
|
|
@ -2047,11 +2019,7 @@ mod test {
|
||||||
-r subdir/child.txt
|
-r subdir/child.txt
|
||||||
"})?;
|
"})?;
|
||||||
|
|
||||||
let requirements = RequirementsTxt::parse(
|
let requirements = RequirementsTxt::parse(parent_txt.path(), temp_dir.path())
|
||||||
parent_txt.path(),
|
|
||||||
temp_dir.path(),
|
|
||||||
&BaseClientBuilder::default(),
|
|
||||||
)
|
|
||||||
.await
|
.await
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
|
|
@ -2111,11 +2079,7 @@ mod test {
|
||||||
--no-binary flask
|
--no-binary flask
|
||||||
"})?;
|
"})?;
|
||||||
|
|
||||||
let requirements = RequirementsTxt::parse(
|
let requirements = RequirementsTxt::parse(requirements_txt.path(), temp_dir.path())
|
||||||
requirements_txt.path(),
|
|
||||||
temp_dir.path(),
|
|
||||||
&BaseClientBuilder::default(),
|
|
||||||
)
|
|
||||||
.await
|
.await
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
|
|
@ -2186,11 +2150,7 @@ mod test {
|
||||||
--no-index
|
--no-index
|
||||||
"})?;
|
"})?;
|
||||||
|
|
||||||
let requirements = RequirementsTxt::parse(
|
let requirements = RequirementsTxt::parse(requirements_txt.path(), temp_dir.path())
|
||||||
requirements_txt.path(),
|
|
||||||
temp_dir.path(),
|
|
||||||
&BaseClientBuilder::default(),
|
|
||||||
)
|
|
||||||
.await
|
.await
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
|
|
@ -2288,11 +2248,7 @@ mod test {
|
||||||
--index-url https://fake.pypi.org/simple
|
--index-url https://fake.pypi.org/simple
|
||||||
"})?;
|
"})?;
|
||||||
|
|
||||||
let error = RequirementsTxt::parse(
|
let error = RequirementsTxt::parse(requirements_txt.path(), temp_dir.path())
|
||||||
requirements_txt.path(),
|
|
||||||
temp_dir.path(),
|
|
||||||
&BaseClientBuilder::default(),
|
|
||||||
)
|
|
||||||
.await
|
.await
|
||||||
.unwrap_err();
|
.unwrap_err();
|
||||||
let errors = anyhow::Error::new(error).chain().join("\n");
|
let errors = anyhow::Error::new(error).chain().join("\n");
|
||||||
|
|
@ -2336,11 +2292,7 @@ mod test {
|
||||||
httpx # comment
|
httpx # comment
|
||||||
"})?;
|
"})?;
|
||||||
|
|
||||||
let requirements = RequirementsTxt::parse(
|
let requirements = RequirementsTxt::parse(requirements_txt.path(), temp_dir.path())
|
||||||
requirements_txt.path(),
|
|
||||||
temp_dir.path(),
|
|
||||||
&BaseClientBuilder::default(),
|
|
||||||
)
|
|
||||||
.await
|
.await
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
|
|
@ -2551,11 +2503,7 @@ mod test {
|
||||||
importlib_metadata-8.2.0+local-py3-none-any.whl[extra]
|
importlib_metadata-8.2.0+local-py3-none-any.whl[extra]
|
||||||
"})?;
|
"})?;
|
||||||
|
|
||||||
let requirements = RequirementsTxt::parse(
|
let requirements = RequirementsTxt::parse(requirements_txt.path(), temp_dir.path())
|
||||||
requirements_txt.path(),
|
|
||||||
temp_dir.path(),
|
|
||||||
&BaseClientBuilder::default(),
|
|
||||||
)
|
|
||||||
.await
|
.await
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
|
|
@ -2893,11 +2841,7 @@ mod test {
|
||||||
tqdm
|
tqdm
|
||||||
"})?;
|
"})?;
|
||||||
|
|
||||||
let error = RequirementsTxt::parse(
|
let error = RequirementsTxt::parse(requirements_txt.path(), temp_dir.path())
|
||||||
requirements_txt.path(),
|
|
||||||
temp_dir.path(),
|
|
||||||
&BaseClientBuilder::default(),
|
|
||||||
)
|
|
||||||
.await
|
.await
|
||||||
.unwrap_err();
|
.unwrap_err();
|
||||||
let errors = anyhow::Error::new(error).chain().join("\n");
|
let errors = anyhow::Error::new(error).chain().join("\n");
|
||||||
|
|
@ -2995,12 +2939,7 @@ mod test {
|
||||||
-c constraints-only-recursive.txt
|
-c constraints-only-recursive.txt
|
||||||
"})?;
|
"})?;
|
||||||
|
|
||||||
let parsed = RequirementsTxt::parse(
|
let parsed = RequirementsTxt::parse(&requirements, temp_dir.path()).await?;
|
||||||
&requirements,
|
|
||||||
temp_dir.path(),
|
|
||||||
&BaseClientBuilder::default(),
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
let requirements: BTreeSet<String> = parsed
|
let requirements: BTreeSet<String> = parsed
|
||||||
.requirements
|
.requirements
|
||||||
|
|
|
||||||
|
|
@ -27,6 +27,9 @@ pub enum RequirementsSource {
|
||||||
SetupCfg(PathBuf),
|
SetupCfg(PathBuf),
|
||||||
/// Dependencies were provided via an unsupported Conda `environment.yml` file (e.g., `pip install -r environment.yml`).
|
/// Dependencies were provided via an unsupported Conda `environment.yml` file (e.g., `pip install -r environment.yml`).
|
||||||
EnvironmentYml(PathBuf),
|
EnvironmentYml(PathBuf),
|
||||||
|
/// An extensionless file that could be either a PEP 723 script or a requirements.txt file.
|
||||||
|
/// We detect the format when reading the file.
|
||||||
|
Extensionless(PathBuf),
|
||||||
}
|
}
|
||||||
|
|
||||||
impl RequirementsSource {
|
impl RequirementsSource {
|
||||||
|
|
@ -50,7 +53,6 @@ impl RequirementsSource {
|
||||||
.extension()
|
.extension()
|
||||||
.is_some_and(|ext| ext.eq_ignore_ascii_case("py") || ext.eq_ignore_ascii_case("pyw"))
|
.is_some_and(|ext| ext.eq_ignore_ascii_case("py") || ext.eq_ignore_ascii_case("pyw"))
|
||||||
{
|
{
|
||||||
// TODO(blueraft): Support scripts without an extension.
|
|
||||||
Ok(Self::Pep723Script(path))
|
Ok(Self::Pep723Script(path))
|
||||||
} else if path
|
} else if path
|
||||||
.extension()
|
.extension()
|
||||||
|
|
@ -60,6 +62,17 @@ impl RequirementsSource {
|
||||||
"`{}` is not a valid PEP 751 filename: expected TOML file to start with `pylock.` and end with `.toml` (e.g., `pylock.toml`, `pylock.dev.toml`)",
|
"`{}` is not a valid PEP 751 filename: expected TOML file to start with `pylock.` and end with `.toml` (e.g., `pylock.toml`, `pylock.dev.toml`)",
|
||||||
path.user_display(),
|
path.user_display(),
|
||||||
))
|
))
|
||||||
|
} else if path
|
||||||
|
.extension()
|
||||||
|
.is_some_and(|ext| ext.eq_ignore_ascii_case("txt") || ext.eq_ignore_ascii_case("in"))
|
||||||
|
|| path.starts_with("http://")
|
||||||
|
|| path.starts_with("https://")
|
||||||
|
{
|
||||||
|
Ok(Self::RequirementsTxt(path))
|
||||||
|
} else if path.extension().is_none() {
|
||||||
|
// If we don't have an extension, mark it as extensionless so we can detect
|
||||||
|
// the format later (either a PEP 723 script or a requirements.txt file).
|
||||||
|
Ok(Self::Extensionless(path))
|
||||||
} else {
|
} else {
|
||||||
Ok(Self::RequirementsTxt(path))
|
Ok(Self::RequirementsTxt(path))
|
||||||
}
|
}
|
||||||
|
|
@ -302,7 +315,8 @@ impl std::fmt::Display for RequirementsSource {
|
||||||
| Self::PyprojectToml(path)
|
| Self::PyprojectToml(path)
|
||||||
| Self::SetupPy(path)
|
| Self::SetupPy(path)
|
||||||
| Self::SetupCfg(path)
|
| Self::SetupCfg(path)
|
||||||
| Self::EnvironmentYml(path) => {
|
| Self::EnvironmentYml(path)
|
||||||
|
| Self::Extensionless(path) => {
|
||||||
write!(f, "{}", path.simplified_display())
|
write!(f, "{}", path.simplified_display())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -46,7 +46,7 @@ use uv_fs::{CWD, Simplified};
|
||||||
use uv_normalize::{ExtraName, PackageName, PipGroupName};
|
use uv_normalize::{ExtraName, PackageName, PipGroupName};
|
||||||
use uv_pypi_types::PyProjectToml;
|
use uv_pypi_types::PyProjectToml;
|
||||||
use uv_requirements_txt::{RequirementsTxt, RequirementsTxtRequirement, SourceCache};
|
use uv_requirements_txt::{RequirementsTxt, RequirementsTxtRequirement, SourceCache};
|
||||||
use uv_scripts::{Pep723Error, Pep723Item, Pep723Script};
|
use uv_scripts::{Pep723Error, Pep723Metadata, Pep723Script};
|
||||||
use uv_warnings::warn_user;
|
use uv_warnings::warn_user;
|
||||||
|
|
||||||
use crate::{RequirementsSource, SourceTree};
|
use crate::{RequirementsSource, SourceTree};
|
||||||
|
|
@ -95,126 +95,8 @@ impl RequirementsSpecification {
|
||||||
Self::from_source_with_cache(source, client_builder, &mut SourceCache::default()).await
|
Self::from_source_with_cache(source, client_builder, &mut SourceCache::default()).await
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Read the requirements and constraints from a source, using a cache for file contents.
|
/// Create a [`RequirementsSpecification`] from PEP 723 script metadata.
|
||||||
#[instrument(skip_all, level = tracing::Level::DEBUG, fields(source = % source))]
|
fn from_pep723_metadata(metadata: &Pep723Metadata) -> Self {
|
||||||
pub async fn from_source_with_cache(
|
|
||||||
source: &RequirementsSource,
|
|
||||||
client_builder: &BaseClientBuilder<'_>,
|
|
||||||
cache: &mut SourceCache,
|
|
||||||
) -> Result<Self> {
|
|
||||||
Ok(match source {
|
|
||||||
RequirementsSource::Package(requirement) => Self {
|
|
||||||
requirements: vec![UnresolvedRequirementSpecification::from(
|
|
||||||
requirement.clone(),
|
|
||||||
)],
|
|
||||||
..Self::default()
|
|
||||||
},
|
|
||||||
RequirementsSource::Editable(requirement) => Self {
|
|
||||||
requirements: vec![UnresolvedRequirementSpecification::from(
|
|
||||||
requirement.clone().into_editable()?,
|
|
||||||
)],
|
|
||||||
..Self::default()
|
|
||||||
},
|
|
||||||
RequirementsSource::RequirementsTxt(path) => {
|
|
||||||
if !(path == Path::new("-")
|
|
||||||
|| path.starts_with("http://")
|
|
||||||
|| path.starts_with("https://")
|
|
||||||
|| path.exists())
|
|
||||||
{
|
|
||||||
return Err(anyhow::anyhow!("File not found: `{}`", path.user_display()));
|
|
||||||
}
|
|
||||||
|
|
||||||
let requirements_txt =
|
|
||||||
RequirementsTxt::parse_with_cache(path, &*CWD, client_builder, cache).await?;
|
|
||||||
|
|
||||||
if requirements_txt == RequirementsTxt::default() {
|
|
||||||
if path == Path::new("-") {
|
|
||||||
warn_user!("No dependencies found in stdin");
|
|
||||||
} else {
|
|
||||||
warn_user!(
|
|
||||||
"Requirements file `{}` does not contain any dependencies",
|
|
||||||
path.user_display()
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Self {
|
|
||||||
requirements: requirements_txt
|
|
||||||
.requirements
|
|
||||||
.into_iter()
|
|
||||||
.map(UnresolvedRequirementSpecification::from)
|
|
||||||
.chain(
|
|
||||||
requirements_txt
|
|
||||||
.editables
|
|
||||||
.into_iter()
|
|
||||||
.map(UnresolvedRequirementSpecification::from),
|
|
||||||
)
|
|
||||||
.collect(),
|
|
||||||
constraints: requirements_txt
|
|
||||||
.constraints
|
|
||||||
.into_iter()
|
|
||||||
.map(Requirement::from)
|
|
||||||
.map(NameRequirementSpecification::from)
|
|
||||||
.collect(),
|
|
||||||
index_url: requirements_txt.index_url.map(IndexUrl::from),
|
|
||||||
extra_index_urls: requirements_txt
|
|
||||||
.extra_index_urls
|
|
||||||
.into_iter()
|
|
||||||
.map(IndexUrl::from)
|
|
||||||
.collect(),
|
|
||||||
no_index: requirements_txt.no_index,
|
|
||||||
find_links: requirements_txt
|
|
||||||
.find_links
|
|
||||||
.into_iter()
|
|
||||||
.map(IndexUrl::from)
|
|
||||||
.collect(),
|
|
||||||
no_binary: requirements_txt.no_binary,
|
|
||||||
no_build: requirements_txt.only_binary,
|
|
||||||
..Self::default()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
RequirementsSource::PyprojectToml(path) => {
|
|
||||||
let contents = match fs_err::tokio::read_to_string(&path).await {
|
|
||||||
Ok(contents) => contents,
|
|
||||||
Err(err) if err.kind() == std::io::ErrorKind::NotFound => {
|
|
||||||
return Err(anyhow::anyhow!("File not found: `{}`", path.user_display()));
|
|
||||||
}
|
|
||||||
Err(err) => {
|
|
||||||
return Err(anyhow::anyhow!(
|
|
||||||
"Failed to read `{}`: {}",
|
|
||||||
path.user_display(),
|
|
||||||
err
|
|
||||||
));
|
|
||||||
}
|
|
||||||
};
|
|
||||||
let pyproject_toml = toml::from_str::<PyProjectToml>(&contents)
|
|
||||||
.with_context(|| format!("Failed to parse: `{}`", path.user_display()))?;
|
|
||||||
|
|
||||||
Self {
|
|
||||||
source_trees: vec![SourceTree::PyProjectToml(path.clone(), pyproject_toml)],
|
|
||||||
..Self::default()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
RequirementsSource::Pep723Script(path) => {
|
|
||||||
let script = match Pep723Script::read(&path).await {
|
|
||||||
Ok(Some(script)) => Pep723Item::Script(script),
|
|
||||||
Ok(None) => {
|
|
||||||
return Err(anyhow::anyhow!(
|
|
||||||
"`{}` does not contain inline script metadata",
|
|
||||||
path.user_display(),
|
|
||||||
));
|
|
||||||
}
|
|
||||||
Err(Pep723Error::Io(err)) if err.kind() == std::io::ErrorKind::NotFound => {
|
|
||||||
return Err(anyhow::anyhow!(
|
|
||||||
"Failed to read `{}` (not found)",
|
|
||||||
path.user_display(),
|
|
||||||
));
|
|
||||||
}
|
|
||||||
Err(err) => return Err(err.into()),
|
|
||||||
};
|
|
||||||
|
|
||||||
let metadata = script.metadata();
|
|
||||||
|
|
||||||
let requirements = metadata
|
let requirements = metadata
|
||||||
.dependencies
|
.dependencies
|
||||||
.as_ref()
|
.as_ref()
|
||||||
|
|
@ -275,9 +157,7 @@ impl RequirementsSpecification {
|
||||||
.extra_index_url
|
.extra_index_url
|
||||||
.as_ref()
|
.as_ref()
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.flat_map(|urls| {
|
.flat_map(|urls| urls.iter().map(|index| Index::from(index.clone()).url))
|
||||||
urls.iter().map(|index| Index::from(index.clone()).url)
|
|
||||||
})
|
|
||||||
.collect(),
|
.collect(),
|
||||||
no_index: tool_uv.top_level.no_index.unwrap_or_default(),
|
no_index: tool_uv.top_level.no_index.unwrap_or_default(),
|
||||||
find_links: tool_uv
|
find_links: tool_uv
|
||||||
|
|
@ -285,9 +165,7 @@ impl RequirementsSpecification {
|
||||||
.find_links
|
.find_links
|
||||||
.as_ref()
|
.as_ref()
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.flat_map(|urls| {
|
.flat_map(|urls| urls.iter().map(|index| Index::from(index.clone()).url))
|
||||||
urls.iter().map(|index| Index::from(index.clone()).url)
|
|
||||||
})
|
|
||||||
.collect(),
|
.collect(),
|
||||||
no_binary: NoBinary::from_args(
|
no_binary: NoBinary::from_args(
|
||||||
tool_uv.top_level.no_binary,
|
tool_uv.top_level.no_binary,
|
||||||
|
|
@ -314,6 +192,124 @@ impl RequirementsSpecification {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Create a [`RequirementsSpecification`] from a parsed `requirements.txt` file.
|
||||||
|
fn from_requirements_txt(requirements_txt: RequirementsTxt) -> Self {
|
||||||
|
Self {
|
||||||
|
requirements: requirements_txt
|
||||||
|
.requirements
|
||||||
|
.into_iter()
|
||||||
|
.map(UnresolvedRequirementSpecification::from)
|
||||||
|
.chain(
|
||||||
|
requirements_txt
|
||||||
|
.editables
|
||||||
|
.into_iter()
|
||||||
|
.map(UnresolvedRequirementSpecification::from),
|
||||||
|
)
|
||||||
|
.collect(),
|
||||||
|
constraints: requirements_txt
|
||||||
|
.constraints
|
||||||
|
.into_iter()
|
||||||
|
.map(Requirement::from)
|
||||||
|
.map(NameRequirementSpecification::from)
|
||||||
|
.collect(),
|
||||||
|
index_url: requirements_txt.index_url.map(IndexUrl::from),
|
||||||
|
extra_index_urls: requirements_txt
|
||||||
|
.extra_index_urls
|
||||||
|
.into_iter()
|
||||||
|
.map(IndexUrl::from)
|
||||||
|
.collect(),
|
||||||
|
no_index: requirements_txt.no_index,
|
||||||
|
find_links: requirements_txt
|
||||||
|
.find_links
|
||||||
|
.into_iter()
|
||||||
|
.map(IndexUrl::from)
|
||||||
|
.collect(),
|
||||||
|
no_binary: requirements_txt.no_binary,
|
||||||
|
no_build: requirements_txt.only_binary,
|
||||||
|
..Self::default()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Read the requirements and constraints from a source, using a cache for file contents.
|
||||||
|
#[instrument(skip_all, level = tracing::Level::DEBUG, fields(source = % source))]
|
||||||
|
pub async fn from_source_with_cache(
|
||||||
|
source: &RequirementsSource,
|
||||||
|
client_builder: &BaseClientBuilder<'_>,
|
||||||
|
cache: &mut SourceCache,
|
||||||
|
) -> Result<Self> {
|
||||||
|
Ok(match source {
|
||||||
|
RequirementsSource::Package(requirement) => Self {
|
||||||
|
requirements: vec![UnresolvedRequirementSpecification::from(
|
||||||
|
requirement.clone(),
|
||||||
|
)],
|
||||||
|
..Self::default()
|
||||||
|
},
|
||||||
|
RequirementsSource::Editable(requirement) => Self {
|
||||||
|
requirements: vec![UnresolvedRequirementSpecification::from(
|
||||||
|
requirement.clone().into_editable()?,
|
||||||
|
)],
|
||||||
|
..Self::default()
|
||||||
|
},
|
||||||
|
RequirementsSource::RequirementsTxt(path) => {
|
||||||
|
if !(path.starts_with("http://") || path.starts_with("https://") || path.exists()) {
|
||||||
|
return Err(anyhow::anyhow!("File not found: `{}`", path.user_display()));
|
||||||
|
}
|
||||||
|
|
||||||
|
let requirements_txt =
|
||||||
|
RequirementsTxt::parse_with_cache(path, &*CWD, client_builder, cache).await?;
|
||||||
|
|
||||||
|
if requirements_txt == RequirementsTxt::default() {
|
||||||
|
warn_user!(
|
||||||
|
"Requirements file `{}` does not contain any dependencies",
|
||||||
|
path.user_display()
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
Self::from_requirements_txt(requirements_txt)
|
||||||
|
}
|
||||||
|
RequirementsSource::PyprojectToml(path) => {
|
||||||
|
let contents = match fs_err::tokio::read_to_string(&path).await {
|
||||||
|
Ok(contents) => contents,
|
||||||
|
Err(err) if err.kind() == std::io::ErrorKind::NotFound => {
|
||||||
|
return Err(anyhow::anyhow!("File not found: `{}`", path.user_display()));
|
||||||
|
}
|
||||||
|
Err(err) => {
|
||||||
|
return Err(anyhow::anyhow!(
|
||||||
|
"Failed to read `{}`: {}",
|
||||||
|
path.user_display(),
|
||||||
|
err
|
||||||
|
));
|
||||||
|
}
|
||||||
|
};
|
||||||
|
let pyproject_toml = toml::from_str::<PyProjectToml>(&contents)
|
||||||
|
.with_context(|| format!("Failed to parse: `{}`", path.user_display()))?;
|
||||||
|
|
||||||
|
Self {
|
||||||
|
source_trees: vec![SourceTree::PyProjectToml(path.clone(), pyproject_toml)],
|
||||||
|
..Self::default()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
RequirementsSource::Pep723Script(path) => {
|
||||||
|
let script = match Pep723Script::read(&path).await {
|
||||||
|
Ok(Some(script)) => script,
|
||||||
|
Ok(None) => {
|
||||||
|
return Err(anyhow::anyhow!(
|
||||||
|
"`{}` does not contain inline script metadata",
|
||||||
|
path.user_display(),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
Err(Pep723Error::Io(err)) if err.kind() == std::io::ErrorKind::NotFound => {
|
||||||
|
return Err(anyhow::anyhow!(
|
||||||
|
"Failed to read `{}` (not found)",
|
||||||
|
path.user_display(),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
Err(err) => return Err(err.into()),
|
||||||
|
};
|
||||||
|
|
||||||
|
Self::from_pep723_metadata(&script.metadata)
|
||||||
|
}
|
||||||
RequirementsSource::SetupPy(path) => {
|
RequirementsSource::SetupPy(path) => {
|
||||||
if !path.is_file() {
|
if !path.is_file() {
|
||||||
return Err(anyhow::anyhow!("File not found: `{}`", path.user_display()));
|
return Err(anyhow::anyhow!("File not found: `{}`", path.user_display()));
|
||||||
|
|
@ -350,6 +346,39 @@ impl RequirementsSpecification {
|
||||||
path.user_display()
|
path.user_display()
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
|
RequirementsSource::Extensionless(path) => {
|
||||||
|
// Read the file content.
|
||||||
|
let content = if let Some(content) = cache.get(path.as_path()) {
|
||||||
|
content.clone()
|
||||||
|
} else {
|
||||||
|
let content = uv_fs::read_to_string_transcode(&path).await?;
|
||||||
|
cache.insert(path.clone(), content.clone());
|
||||||
|
content
|
||||||
|
};
|
||||||
|
|
||||||
|
// Detect if it's a PEP 723 script.
|
||||||
|
if let Some(metadata) = Pep723Metadata::parse(content.as_bytes())? {
|
||||||
|
Self::from_pep723_metadata(&metadata)
|
||||||
|
} else {
|
||||||
|
// If it's not a PEP 723 script, assume it's a `requirements.txt` file.
|
||||||
|
let requirements_txt =
|
||||||
|
RequirementsTxt::parse_str(&content, &path, &*CWD, client_builder, cache)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
if requirements_txt == RequirementsTxt::default() {
|
||||||
|
if path == Path::new("-") {
|
||||||
|
warn_user!("No dependencies found in stdin");
|
||||||
|
} else {
|
||||||
|
warn_user!(
|
||||||
|
"Requirements file `{}` does not contain any dependencies",
|
||||||
|
path.user_display()
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Self::from_requirements_txt(requirements_txt)
|
||||||
|
}
|
||||||
|
}
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -2,7 +2,6 @@ use std::path::Path;
|
||||||
|
|
||||||
use anyhow::Result;
|
use anyhow::Result;
|
||||||
|
|
||||||
use uv_client::{BaseClientBuilder, Connectivity};
|
|
||||||
use uv_configuration::Upgrade;
|
use uv_configuration::Upgrade;
|
||||||
use uv_fs::CWD;
|
use uv_fs::CWD;
|
||||||
use uv_git::ResolvedRepositoryReference;
|
use uv_git::ResolvedRepositoryReference;
|
||||||
|
|
@ -38,13 +37,7 @@ pub async fn read_requirements_txt(
|
||||||
}
|
}
|
||||||
|
|
||||||
// Parse the requirements from the lockfile.
|
// Parse the requirements from the lockfile.
|
||||||
let requirements_txt = RequirementsTxt::parse(
|
let requirements_txt = RequirementsTxt::parse(output_file, &*CWD).await?;
|
||||||
output_file,
|
|
||||||
&*CWD,
|
|
||||||
// Pseudo-client for reading local-only requirements.
|
|
||||||
&BaseClientBuilder::default().connectivity(Connectivity::Offline),
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
// Map each entry in the lockfile to a preference.
|
// Map each entry in the lockfile to a preference.
|
||||||
let preferences = requirements_txt
|
let preferences = requirements_txt
|
||||||
|
|
|
||||||
|
|
@ -138,6 +138,7 @@ pub(crate) async fn add(
|
||||||
RequirementsSource::Package(_)
|
RequirementsSource::Package(_)
|
||||||
| RequirementsSource::Editable(_)
|
| RequirementsSource::Editable(_)
|
||||||
| RequirementsSource::RequirementsTxt(_)
|
| RequirementsSource::RequirementsTxt(_)
|
||||||
|
| RequirementsSource::Extensionless(_)
|
||||||
| RequirementsSource::EnvironmentYml(_) => {}
|
| RequirementsSource::EnvironmentYml(_) => {}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -140,7 +140,7 @@ hint: If you are running a script with `{}` in the shebang, you may need to incl
|
||||||
RequirementsSource::SetupCfg(_) => {
|
RequirementsSource::SetupCfg(_) => {
|
||||||
bail!("Adding requirements from a `setup.cfg` is not supported in `uv run`");
|
bail!("Adding requirements from a `setup.cfg` is not supported in `uv run`");
|
||||||
}
|
}
|
||||||
RequirementsSource::RequirementsTxt(path) => {
|
RequirementsSource::Extensionless(path) => {
|
||||||
if path == Path::new("-") {
|
if path == Path::new("-") {
|
||||||
requirements_from_stdin = true;
|
requirements_from_stdin = true;
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -7096,6 +7096,62 @@ fn add_script_without_metadata_table_with_docstring() -> Result<()> {
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Add to a script without a `.py` extension.
|
||||||
|
#[test]
|
||||||
|
fn add_extensionless_script() -> Result<()> {
|
||||||
|
let context = TestContext::new("3.12");
|
||||||
|
|
||||||
|
let script = context.temp_dir.child("script");
|
||||||
|
script.write_str(indoc! {r#"
|
||||||
|
#!/usr/bin/env python3
|
||||||
|
# /// script
|
||||||
|
# requires-python = ">=3.12"
|
||||||
|
# dependencies = []
|
||||||
|
# ///
|
||||||
|
import requests
|
||||||
|
from rich.pretty import pprint
|
||||||
|
|
||||||
|
resp = requests.get("https://peps.python.org/api/peps.json")
|
||||||
|
data = resp.json()
|
||||||
|
pprint([(k, v["title"]) for k, v in data.items()][:10])
|
||||||
|
"#})?;
|
||||||
|
|
||||||
|
uv_snapshot!(context.filters(), context.add().args(["rich", "requests<3"]).arg("--script").arg("script"), @r###"
|
||||||
|
success: true
|
||||||
|
exit_code: 0
|
||||||
|
----- stdout -----
|
||||||
|
|
||||||
|
----- stderr -----
|
||||||
|
Updated `script`
|
||||||
|
"###);
|
||||||
|
|
||||||
|
let script_content = context.read("script");
|
||||||
|
|
||||||
|
insta::with_settings!({
|
||||||
|
filters => context.filters(),
|
||||||
|
}, {
|
||||||
|
assert_snapshot!(
|
||||||
|
script_content, @r###"
|
||||||
|
#!/usr/bin/env python3
|
||||||
|
# /// script
|
||||||
|
# requires-python = ">=3.12"
|
||||||
|
# dependencies = [
|
||||||
|
# "requests<3",
|
||||||
|
# "rich",
|
||||||
|
# ]
|
||||||
|
# ///
|
||||||
|
import requests
|
||||||
|
from rich.pretty import pprint
|
||||||
|
|
||||||
|
resp = requests.get("https://peps.python.org/api/peps.json")
|
||||||
|
data = resp.json()
|
||||||
|
pprint([(k, v["title"]) for k, v in data.items()][:10])
|
||||||
|
"###
|
||||||
|
);
|
||||||
|
});
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
/// Remove a dependency that is present in multiple places.
|
/// Remove a dependency that is present in multiple places.
|
||||||
#[test]
|
#[test]
|
||||||
fn remove_repeated() -> Result<()> {
|
fn remove_repeated() -> Result<()> {
|
||||||
|
|
|
||||||
|
|
@ -2646,8 +2646,7 @@ fn tool_run_with_incompatible_build_constraints() -> Result<()> {
|
||||||
fn tool_run_with_dependencies_from_script() -> Result<()> {
|
fn tool_run_with_dependencies_from_script() -> Result<()> {
|
||||||
let context = TestContext::new("3.12").with_filtered_counts();
|
let context = TestContext::new("3.12").with_filtered_counts();
|
||||||
|
|
||||||
let script = context.temp_dir.child("script.py");
|
let script_contents = indoc! {r#"
|
||||||
script.write_str(indoc! {r#"
|
|
||||||
# /// script
|
# /// script
|
||||||
# requires-python = ">=3.11"
|
# requires-python = ">=3.11"
|
||||||
# dependencies = [
|
# dependencies = [
|
||||||
|
|
@ -2656,7 +2655,13 @@ fn tool_run_with_dependencies_from_script() -> Result<()> {
|
||||||
# ///
|
# ///
|
||||||
|
|
||||||
import anyio
|
import anyio
|
||||||
"#})?;
|
"#};
|
||||||
|
|
||||||
|
let script = context.temp_dir.child("script.py");
|
||||||
|
script.write_str(script_contents)?;
|
||||||
|
|
||||||
|
let script_without_extension = context.temp_dir.child("script-no-ext");
|
||||||
|
script_without_extension.write_str(script_contents)?;
|
||||||
|
|
||||||
// script dependencies (anyio) are now installed.
|
// script dependencies (anyio) are now installed.
|
||||||
uv_snapshot!(context.filters(), context.tool_run()
|
uv_snapshot!(context.filters(), context.tool_run()
|
||||||
|
|
@ -2684,6 +2689,20 @@ fn tool_run_with_dependencies_from_script() -> Result<()> {
|
||||||
+ sniffio==1.3.1
|
+ sniffio==1.3.1
|
||||||
");
|
");
|
||||||
|
|
||||||
|
uv_snapshot!(context.filters(), context.tool_run()
|
||||||
|
.arg("--with-requirements")
|
||||||
|
.arg("script-no-ext")
|
||||||
|
.arg("black")
|
||||||
|
.arg("script-no-ext")
|
||||||
|
.arg("-q"), @r"
|
||||||
|
success: true
|
||||||
|
exit_code: 0
|
||||||
|
----- stdout -----
|
||||||
|
|
||||||
|
----- stderr -----
|
||||||
|
Resolved [N] packages in [TIME]
|
||||||
|
");
|
||||||
|
|
||||||
// Error when the script is not a valid PEP723 script.
|
// Error when the script is not a valid PEP723 script.
|
||||||
let script = context.temp_dir.child("not_pep723_script.py");
|
let script = context.temp_dir.child("not_pep723_script.py");
|
||||||
script.write_str("import anyio")?;
|
script.write_str("import anyio")?;
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue