mirror of https://github.com/astral-sh/uv
## Summary
Allow using http(s) urls for constraints and requirements files handed
to the CLI, by handling paths starting with `http://` or `https://`
differently. This allows commands for such as: `uv pip install -c
https://raw.githubusercontent.com/apache/airflow/constraints-2.8.1/constraints-3.8.txt
requests`.
closes #1332
## Test Plan
Testing install using a `constraints.txt` file hosted on github in the
airflow repository:
fbdc2eba8e/crates/uv/tests/pip_install.rs (L1440-L1484)
## Advice Needed
- filesystem/http dispatch is implemented at a relatively low level (at
`crates/uv-fs/src/lib.rs#read_to_string`). Should I change some naming
here so it is obvious that the function is able to dispatch?
- I kept the CLI argument for -c and -r as a PathBuf, even though now it
is technically either a path or a url. We could either keep this as is
for now, or implement a new enum for this case? The enum could then
handle dispatch to files/http.
- Using another abstraction layer like
https://docs.rs/object_store/latest/object_store/ for the
files/urls/[s3] could work as well, though I ran into a bug during
testing which I couldn't debug
This commit is contained in:
parent
2ebcef9ad8
commit
71626e8dec
|
|
@ -203,6 +203,17 @@ dependencies = [
|
||||||
"tokio",
|
"tokio",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "async-recursion"
|
||||||
|
version = "1.0.5"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "5fd55a5ba1179988837d24ab4c7cc8ed6efdeff578ede0416b4225a5fca35bd0"
|
||||||
|
dependencies = [
|
||||||
|
"proc-macro2",
|
||||||
|
"quote",
|
||||||
|
"syn 2.0.48",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "async-trait"
|
name = "async-trait"
|
||||||
version = "0.1.77"
|
version = "0.1.77"
|
||||||
|
|
@ -2785,6 +2796,7 @@ version = "0.0.1"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"anyhow",
|
"anyhow",
|
||||||
"assert_fs",
|
"assert_fs",
|
||||||
|
"async-recursion",
|
||||||
"fs-err",
|
"fs-err",
|
||||||
"indoc",
|
"indoc",
|
||||||
"insta",
|
"insta",
|
||||||
|
|
@ -2793,14 +2805,19 @@ dependencies = [
|
||||||
"pep440_rs",
|
"pep440_rs",
|
||||||
"pep508_rs",
|
"pep508_rs",
|
||||||
"regex",
|
"regex",
|
||||||
|
"reqwest",
|
||||||
|
"reqwest-middleware",
|
||||||
"serde",
|
"serde",
|
||||||
"serde_json",
|
"serde_json",
|
||||||
"tempfile",
|
"tempfile",
|
||||||
"test-case",
|
"test-case",
|
||||||
"thiserror",
|
"thiserror",
|
||||||
|
"tokio",
|
||||||
"tracing",
|
"tracing",
|
||||||
"unscanny",
|
"unscanny",
|
||||||
"url",
|
"url",
|
||||||
|
"uv-cache",
|
||||||
|
"uv-client",
|
||||||
"uv-fs",
|
"uv-fs",
|
||||||
"uv-normalize",
|
"uv-normalize",
|
||||||
"uv-warnings",
|
"uv-warnings",
|
||||||
|
|
|
||||||
|
|
@ -3,7 +3,7 @@ members = ["crates/*"]
|
||||||
exclude = [
|
exclude = [
|
||||||
"scripts",
|
"scripts",
|
||||||
# Needs nightly
|
# Needs nightly
|
||||||
"crates/uv-trampoline"
|
"crates/uv-trampoline",
|
||||||
]
|
]
|
||||||
resolver = "2"
|
resolver = "2"
|
||||||
|
|
||||||
|
|
@ -22,6 +22,7 @@ anyhow = { version = "1.0.80" }
|
||||||
async-compression = { version = "0.4.6" }
|
async-compression = { version = "0.4.6" }
|
||||||
async-channel = { version = "2.2.0" }
|
async-channel = { version = "2.2.0" }
|
||||||
async-trait = { version = "0.1.77" }
|
async-trait = { version = "0.1.77" }
|
||||||
|
async-recursion = { version = "1.0.5" }
|
||||||
async_http_range_reader = { version = "0.7.0" }
|
async_http_range_reader = { version = "0.7.0" }
|
||||||
async_zip = { git = "https://github.com/charliermarsh/rs-async-zip", rev = "d76801da0943de985254fc6255c0e476b57c5836", features = ["deflate"] }
|
async_zip = { git = "https://github.com/charliermarsh/rs-async-zip", rev = "d76801da0943de985254fc6255c0e476b57c5836", features = ["deflate"] }
|
||||||
base64 = { version = "0.21.7" }
|
base64 = { version = "0.21.7" }
|
||||||
|
|
|
||||||
|
|
@ -15,15 +15,21 @@ workspace = true
|
||||||
[dependencies]
|
[dependencies]
|
||||||
pep440_rs = { path = "../pep440-rs", features = ["rkyv", "serde"] }
|
pep440_rs = { path = "../pep440-rs", features = ["rkyv", "serde"] }
|
||||||
pep508_rs = { path = "../pep508-rs", features = ["rkyv", "serde", "non-pep508-extensions"] }
|
pep508_rs = { path = "../pep508-rs", features = ["rkyv", "serde", "non-pep508-extensions"] }
|
||||||
|
uv-cache = { path = "../uv-cache" }
|
||||||
|
uv-client = { path = "../uv-client" }
|
||||||
uv-fs = { path = "../uv-fs" }
|
uv-fs = { path = "../uv-fs" }
|
||||||
uv-normalize = { path = "../uv-normalize" }
|
uv-normalize = { path = "../uv-normalize" }
|
||||||
uv-warnings = { path = "../uv-warnings" }
|
uv-warnings = { path = "../uv-warnings" }
|
||||||
|
|
||||||
|
async-recursion = { workspace = true }
|
||||||
fs-err = { workspace = true }
|
fs-err = { workspace = true }
|
||||||
once_cell = { workspace = true }
|
once_cell = { workspace = true }
|
||||||
regex = { workspace = true }
|
regex = { workspace = true }
|
||||||
|
reqwest = { workspace = true }
|
||||||
|
reqwest-middleware = { workspace = true }
|
||||||
serde = { workspace = true }
|
serde = { workspace = true }
|
||||||
thiserror = { workspace = true }
|
thiserror = { workspace = true }
|
||||||
|
tokio = { workspace = true }
|
||||||
tracing = { workspace = true }
|
tracing = { workspace = true }
|
||||||
unscanny = { workspace = true }
|
unscanny = { workspace = true }
|
||||||
url = { workspace = true }
|
url = { workspace = true }
|
||||||
|
|
|
||||||
|
|
@ -40,11 +40,13 @@ use std::io;
|
||||||
use std::path::{Path, PathBuf};
|
use std::path::{Path, PathBuf};
|
||||||
|
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use tracing::{instrument, warn};
|
use tracing::instrument;
|
||||||
use unscanny::{Pattern, Scanner};
|
use unscanny::{Pattern, Scanner};
|
||||||
use url::Url;
|
use url::Url;
|
||||||
|
use uv_client::RegistryClient;
|
||||||
use uv_warnings::warn_user;
|
use uv_warnings::warn_user;
|
||||||
|
|
||||||
|
use async_recursion::async_recursion;
|
||||||
use pep508_rs::{
|
use pep508_rs::{
|
||||||
expand_path_vars, split_scheme, Extras, Pep508Error, Pep508ErrorSource, Requirement, Scheme,
|
expand_path_vars, split_scheme, Extras, Pep508Error, Pep508ErrorSource, Requirement, Scheme,
|
||||||
VerbatimUrl,
|
VerbatimUrl,
|
||||||
|
|
@ -321,28 +323,38 @@ pub struct RequirementsTxt {
|
||||||
impl RequirementsTxt {
|
impl RequirementsTxt {
|
||||||
/// See module level documentation
|
/// See module level documentation
|
||||||
#[instrument(skip_all, fields(requirements_txt = requirements_txt.as_ref().as_os_str().to_str()))]
|
#[instrument(skip_all, fields(requirements_txt = requirements_txt.as_ref().as_os_str().to_str()))]
|
||||||
pub fn parse(
|
pub async fn parse(
|
||||||
requirements_txt: impl AsRef<Path>,
|
requirements_txt: impl AsRef<Path>,
|
||||||
working_dir: impl AsRef<Path>,
|
working_dir: impl AsRef<Path>,
|
||||||
|
client: &RegistryClient,
|
||||||
) -> Result<Self, RequirementsTxtFileError> {
|
) -> Result<Self, RequirementsTxtFileError> {
|
||||||
|
let requirements_txt = requirements_txt.as_ref();
|
||||||
|
let working_dir = working_dir.as_ref();
|
||||||
|
|
||||||
let content =
|
let content =
|
||||||
uv_fs::read_to_string(&requirements_txt).map_err(|err| RequirementsTxtFileError {
|
if requirements_txt.starts_with("http://") | requirements_txt.starts_with("https://") {
|
||||||
file: requirements_txt.as_ref().to_path_buf(),
|
read_url_to_string(&requirements_txt, client).await
|
||||||
error: RequirementsTxtParserError::IO(err),
|
} else {
|
||||||
|
uv_fs::read_to_string(&requirements_txt)
|
||||||
|
.await
|
||||||
|
.map_err(RequirementsTxtParserError::IO)
|
||||||
|
}
|
||||||
|
.map_err(|err| RequirementsTxtFileError {
|
||||||
|
file: requirements_txt.to_path_buf(),
|
||||||
|
error: err,
|
||||||
})?;
|
})?;
|
||||||
|
|
||||||
let working_dir = working_dir.as_ref();
|
let requirements_dir = requirements_txt.parent().unwrap_or(working_dir);
|
||||||
let requirements_dir = requirements_txt.as_ref().parent().unwrap_or(working_dir);
|
let data = Self::parse_inner(&content, working_dir, requirements_dir, client)
|
||||||
let data = Self::parse_inner(&content, working_dir, requirements_dir).map_err(|err| {
|
.await
|
||||||
RequirementsTxtFileError {
|
.map_err(|err| RequirementsTxtFileError {
|
||||||
file: requirements_txt.as_ref().to_path_buf(),
|
file: requirements_txt.to_path_buf(),
|
||||||
error: err,
|
error: err,
|
||||||
}
|
})?;
|
||||||
})?;
|
|
||||||
if data == Self::default() {
|
if data == Self::default() {
|
||||||
warn_user!(
|
warn_user!(
|
||||||
"Requirements file {} does not contain any dependencies",
|
"Requirements file {} does not contain any dependencies",
|
||||||
requirements_txt.as_ref().display()
|
requirements_txt.simplified_display()
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -355,10 +367,12 @@ impl RequirementsTxt {
|
||||||
/// the current working directory. However, relative paths to sub-files (e.g., `-r ../requirements.txt`)
|
/// the current working directory. However, relative paths to sub-files (e.g., `-r ../requirements.txt`)
|
||||||
/// are resolved against the directory of the containing `requirements.txt` file, to match
|
/// are resolved against the directory of the containing `requirements.txt` file, to match
|
||||||
/// `pip`'s behavior.
|
/// `pip`'s behavior.
|
||||||
pub fn parse_inner(
|
#[async_recursion]
|
||||||
|
pub async fn parse_inner(
|
||||||
content: &str,
|
content: &str,
|
||||||
working_dir: &Path,
|
working_dir: &Path,
|
||||||
requirements_dir: &Path,
|
requirements_dir: &Path,
|
||||||
|
client: &RegistryClient,
|
||||||
) -> Result<Self, RequirementsTxtParserError> {
|
) -> Result<Self, RequirementsTxtParserError> {
|
||||||
let mut s = Scanner::new(content);
|
let mut s = Scanner::new(content);
|
||||||
|
|
||||||
|
|
@ -370,14 +384,20 @@ impl RequirementsTxt {
|
||||||
start,
|
start,
|
||||||
end,
|
end,
|
||||||
} => {
|
} => {
|
||||||
let sub_file = requirements_dir.join(expand_path_vars(&filename).as_ref());
|
let filename = expand_path_vars(&filename);
|
||||||
let sub_requirements = Self::parse(&sub_file, working_dir).map_err(|err| {
|
let sub_file =
|
||||||
RequirementsTxtParserError::Subfile {
|
if filename.starts_with("http://") || filename.starts_with("https://") {
|
||||||
|
PathBuf::from(filename.as_ref())
|
||||||
|
} else {
|
||||||
|
requirements_dir.join(filename.as_ref())
|
||||||
|
};
|
||||||
|
let sub_requirements = Self::parse(&sub_file, working_dir, client)
|
||||||
|
.await
|
||||||
|
.map_err(|err| RequirementsTxtParserError::Subfile {
|
||||||
source: Box::new(err),
|
source: Box::new(err),
|
||||||
start,
|
start,
|
||||||
end,
|
end,
|
||||||
}
|
})?;
|
||||||
})?;
|
|
||||||
|
|
||||||
// Disallow conflicting `--index-url` in nested `requirements` files.
|
// Disallow conflicting `--index-url` in nested `requirements` files.
|
||||||
if sub_requirements.index_url.is_some()
|
if sub_requirements.index_url.is_some()
|
||||||
|
|
@ -402,14 +422,20 @@ impl RequirementsTxt {
|
||||||
start,
|
start,
|
||||||
end,
|
end,
|
||||||
} => {
|
} => {
|
||||||
let sub_file = requirements_dir.join(expand_path_vars(&filename).as_ref());
|
let filename = expand_path_vars(&filename);
|
||||||
let sub_constraints = Self::parse(&sub_file, working_dir).map_err(|err| {
|
let sub_file =
|
||||||
RequirementsTxtParserError::Subfile {
|
if filename.starts_with("http://") || filename.starts_with("https://") {
|
||||||
|
PathBuf::from(filename.as_ref())
|
||||||
|
} else {
|
||||||
|
requirements_dir.join(filename.as_ref())
|
||||||
|
};
|
||||||
|
let sub_constraints = Self::parse(&sub_file, working_dir, client)
|
||||||
|
.await
|
||||||
|
.map_err(|err| RequirementsTxtParserError::Subfile {
|
||||||
source: Box::new(err),
|
source: Box::new(err),
|
||||||
start,
|
start,
|
||||||
end,
|
end,
|
||||||
}
|
})?;
|
||||||
})?;
|
|
||||||
// Treat any nested requirements or constraints as constraints. This differs
|
// Treat any nested requirements or constraints as constraints. This differs
|
||||||
// from `pip`, which seems to treat `-r` requirements in constraints files as
|
// from `pip`, which seems to treat `-r` requirements in constraints files as
|
||||||
// _requirements_, but we don't want to support that.
|
// _requirements_, but we don't want to support that.
|
||||||
|
|
@ -767,6 +793,31 @@ fn parse_value<'a, T>(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Fetch the contents of a URL and return them as a string.
|
||||||
|
async fn read_url_to_string(
|
||||||
|
path: impl AsRef<Path>,
|
||||||
|
client: &RegistryClient,
|
||||||
|
) -> Result<String, RequirementsTxtParserError> {
|
||||||
|
// pip would URL-encode the non-UTF-8 bytes of the string; we just don't support them.
|
||||||
|
let path_utf8 =
|
||||||
|
path.as_ref()
|
||||||
|
.to_str()
|
||||||
|
.ok_or_else(|| RequirementsTxtParserError::NonUnicodeUrl {
|
||||||
|
url: path.as_ref().to_owned(),
|
||||||
|
})?;
|
||||||
|
Ok(client
|
||||||
|
.cached_client()
|
||||||
|
.uncached()
|
||||||
|
.get(path_utf8)
|
||||||
|
.send()
|
||||||
|
.await?
|
||||||
|
.error_for_status()
|
||||||
|
.map_err(reqwest_middleware::Error::Reqwest)?
|
||||||
|
.text()
|
||||||
|
.await
|
||||||
|
.map_err(reqwest_middleware::Error::Reqwest)?)
|
||||||
|
}
|
||||||
|
|
||||||
/// Error parsing requirements.txt, wrapper with filename
|
/// Error parsing requirements.txt, wrapper with filename
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub struct RequirementsTxtFileError {
|
pub struct RequirementsTxtFileError {
|
||||||
|
|
@ -808,6 +859,10 @@ pub enum RequirementsTxtParserError {
|
||||||
start: usize,
|
start: usize,
|
||||||
end: usize,
|
end: usize,
|
||||||
},
|
},
|
||||||
|
Reqwest(reqwest_middleware::Error),
|
||||||
|
NonUnicodeUrl {
|
||||||
|
url: PathBuf,
|
||||||
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
impl RequirementsTxtParserError {
|
impl RequirementsTxtParserError {
|
||||||
|
|
@ -855,6 +910,8 @@ impl RequirementsTxtParserError {
|
||||||
start: start + offset,
|
start: start + offset,
|
||||||
end: end + offset,
|
end: end + offset,
|
||||||
},
|
},
|
||||||
|
Self::Reqwest(err) => Self::Reqwest(err),
|
||||||
|
Self::NonUnicodeUrl { url } => Self::NonUnicodeUrl { url },
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -897,6 +954,16 @@ impl Display for RequirementsTxtParserError {
|
||||||
Self::Subfile { start, .. } => {
|
Self::Subfile { start, .. } => {
|
||||||
write!(f, "Error parsing included file at position {start}")
|
write!(f, "Error parsing included file at position {start}")
|
||||||
}
|
}
|
||||||
|
Self::Reqwest(err) => {
|
||||||
|
write!(f, "Error while accessing remote requirements file {err}")
|
||||||
|
}
|
||||||
|
Self::NonUnicodeUrl { url } => {
|
||||||
|
write!(
|
||||||
|
f,
|
||||||
|
"Remote requirements URL contains non-unicode characters: {}",
|
||||||
|
url.display(),
|
||||||
|
)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -914,6 +981,8 @@ impl std::error::Error for RequirementsTxtParserError {
|
||||||
Self::Pep508 { source, .. } => Some(source),
|
Self::Pep508 { source, .. } => Some(source),
|
||||||
Self::Subfile { source, .. } => Some(source.as_ref()),
|
Self::Subfile { source, .. } => Some(source.as_ref()),
|
||||||
Self::Parser { .. } => None,
|
Self::Parser { .. } => None,
|
||||||
|
Self::Reqwest(err) => err.source(),
|
||||||
|
Self::NonUnicodeUrl { .. } => None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -989,6 +1058,21 @@ impl Display for RequirementsTxtFileError {
|
||||||
self.file.simplified_display(),
|
self.file.simplified_display(),
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
RequirementsTxtParserError::Reqwest(err) => {
|
||||||
|
write!(
|
||||||
|
f,
|
||||||
|
"Error while accessing remote requirements file {}: {err}",
|
||||||
|
self.file.simplified_display(),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
RequirementsTxtParserError::NonUnicodeUrl { url } => {
|
||||||
|
write!(
|
||||||
|
f,
|
||||||
|
"Remote requirements URL contains non-unicode characters: {}",
|
||||||
|
url.display(),
|
||||||
|
)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -1005,6 +1089,12 @@ impl From<io::Error> for RequirementsTxtParserError {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl From<reqwest_middleware::Error> for RequirementsTxtParserError {
|
||||||
|
fn from(err: reqwest_middleware::Error) -> Self {
|
||||||
|
Self::Reqwest(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// Calculates the column and line offset of a given cursor based on the
|
/// Calculates the column and line offset of a given cursor based on the
|
||||||
/// number of Unicode codepoints.
|
/// number of Unicode codepoints.
|
||||||
fn calculate_row_column(content: &str, position: usize) -> (usize, usize) {
|
fn calculate_row_column(content: &str, position: usize) -> (usize, usize) {
|
||||||
|
|
@ -1057,6 +1147,7 @@ mod test {
|
||||||
use tempfile::tempdir;
|
use tempfile::tempdir;
|
||||||
use test_case::test_case;
|
use test_case::test_case;
|
||||||
use unscanny::Scanner;
|
use unscanny::Scanner;
|
||||||
|
use uv_client::{RegistryClient, RegistryClientBuilder};
|
||||||
use uv_fs::Simplified;
|
use uv_fs::Simplified;
|
||||||
|
|
||||||
use crate::{calculate_row_column, EditableRequirement, RequirementsTxt};
|
use crate::{calculate_row_column, EditableRequirement, RequirementsTxt};
|
||||||
|
|
@ -1065,6 +1156,12 @@ mod test {
|
||||||
PathBuf::from("./test-data")
|
PathBuf::from("./test-data")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn registry_client() -> RegistryClient {
|
||||||
|
RegistryClientBuilder::new(uv_cache::Cache::temp().unwrap())
|
||||||
|
.connectivity(uv_client::Connectivity::Online)
|
||||||
|
.build()
|
||||||
|
}
|
||||||
|
|
||||||
#[test_case(Path::new("basic.txt"))]
|
#[test_case(Path::new("basic.txt"))]
|
||||||
#[test_case(Path::new("constraints-a.txt"))]
|
#[test_case(Path::new("constraints-a.txt"))]
|
||||||
#[test_case(Path::new("constraints-b.txt"))]
|
#[test_case(Path::new("constraints-b.txt"))]
|
||||||
|
|
@ -1075,11 +1172,14 @@ mod test {
|
||||||
#[test_case(Path::new("poetry-with-hashes.txt"))]
|
#[test_case(Path::new("poetry-with-hashes.txt"))]
|
||||||
#[test_case(Path::new("small.txt"))]
|
#[test_case(Path::new("small.txt"))]
|
||||||
#[test_case(Path::new("whitespace.txt"))]
|
#[test_case(Path::new("whitespace.txt"))]
|
||||||
fn parse(path: &Path) {
|
#[tokio::test]
|
||||||
|
async fn parse(path: &Path) {
|
||||||
let working_dir = workspace_test_data_dir().join("requirements-txt");
|
let working_dir = workspace_test_data_dir().join("requirements-txt");
|
||||||
let requirements_txt = working_dir.join(path);
|
let requirements_txt = working_dir.join(path);
|
||||||
|
|
||||||
let actual = RequirementsTxt::parse(requirements_txt, &working_dir).unwrap();
|
let actual = RequirementsTxt::parse(requirements_txt, &working_dir, ®istry_client())
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
let snapshot = format!("parse-{}", path.to_string_lossy());
|
let snapshot = format!("parse-{}", path.to_string_lossy());
|
||||||
insta::assert_debug_snapshot!(snapshot, actual);
|
insta::assert_debug_snapshot!(snapshot, actual);
|
||||||
|
|
@ -1096,7 +1196,8 @@ mod test {
|
||||||
#[test_case(Path::new("small.txt"))]
|
#[test_case(Path::new("small.txt"))]
|
||||||
#[test_case(Path::new("whitespace.txt"))]
|
#[test_case(Path::new("whitespace.txt"))]
|
||||||
#[test_case(Path::new("editable.txt"))]
|
#[test_case(Path::new("editable.txt"))]
|
||||||
fn line_endings(path: &Path) {
|
#[tokio::test]
|
||||||
|
async fn line_endings(path: &Path) {
|
||||||
let working_dir = workspace_test_data_dir().join("requirements-txt");
|
let working_dir = workspace_test_data_dir().join("requirements-txt");
|
||||||
let requirements_txt = working_dir.join(path);
|
let requirements_txt = working_dir.join(path);
|
||||||
|
|
||||||
|
|
@ -1120,14 +1221,16 @@ mod test {
|
||||||
let requirements_txt = temp_dir.path().join(path);
|
let requirements_txt = temp_dir.path().join(path);
|
||||||
fs::write(&requirements_txt, contents).unwrap();
|
fs::write(&requirements_txt, contents).unwrap();
|
||||||
|
|
||||||
let actual = RequirementsTxt::parse(&requirements_txt, &working_dir).unwrap();
|
let actual = RequirementsTxt::parse(&requirements_txt, &working_dir, ®istry_client())
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
let snapshot = format!("line-endings-{}", path.to_string_lossy());
|
let snapshot = format!("line-endings-{}", path.to_string_lossy());
|
||||||
insta::assert_debug_snapshot!(snapshot, actual);
|
insta::assert_debug_snapshot!(snapshot, actual);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[tokio::test]
|
||||||
fn invalid_include_missing_file() -> Result<()> {
|
async fn invalid_include_missing_file() -> Result<()> {
|
||||||
let temp_dir = assert_fs::TempDir::new()?;
|
let temp_dir = assert_fs::TempDir::new()?;
|
||||||
let missing_txt = temp_dir.child("missing.txt");
|
let missing_txt = temp_dir.child("missing.txt");
|
||||||
let requirements_txt = temp_dir.child("requirements.txt");
|
let requirements_txt = temp_dir.child("requirements.txt");
|
||||||
|
|
@ -1135,7 +1238,10 @@ mod test {
|
||||||
-r missing.txt
|
-r missing.txt
|
||||||
"})?;
|
"})?;
|
||||||
|
|
||||||
let error = RequirementsTxt::parse(requirements_txt.path(), temp_dir.path()).unwrap_err();
|
let error =
|
||||||
|
RequirementsTxt::parse(requirements_txt.path(), temp_dir.path(), ®istry_client())
|
||||||
|
.await
|
||||||
|
.unwrap_err();
|
||||||
let errors = anyhow::Error::new(error)
|
let errors = anyhow::Error::new(error)
|
||||||
.chain()
|
.chain()
|
||||||
// The last error is operating-system specific.
|
// The last error is operating-system specific.
|
||||||
|
|
@ -1155,22 +1261,25 @@ mod test {
|
||||||
}, {
|
}, {
|
||||||
insta::assert_snapshot!(errors, @r###"
|
insta::assert_snapshot!(errors, @r###"
|
||||||
Error parsing included file in `<REQUIREMENTS_TXT>` at position 0
|
Error parsing included file in `<REQUIREMENTS_TXT>` at position 0
|
||||||
failed to open file `<MISSING_TXT>`
|
failed to read from file `<MISSING_TXT>`
|
||||||
"###);
|
"###);
|
||||||
});
|
});
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[tokio::test]
|
||||||
fn invalid_requirement() -> Result<()> {
|
async fn invalid_requirement() -> Result<()> {
|
||||||
let temp_dir = assert_fs::TempDir::new()?;
|
let temp_dir = assert_fs::TempDir::new()?;
|
||||||
let requirements_txt = temp_dir.child("requirements.txt");
|
let requirements_txt = temp_dir.child("requirements.txt");
|
||||||
requirements_txt.write_str(indoc! {"
|
requirements_txt.write_str(indoc! {"
|
||||||
numpy[ö]==1.29
|
numpy[ö]==1.29
|
||||||
"})?;
|
"})?;
|
||||||
|
|
||||||
let error = RequirementsTxt::parse(requirements_txt.path(), temp_dir.path()).unwrap_err();
|
let error =
|
||||||
|
RequirementsTxt::parse(requirements_txt.path(), temp_dir.path(), ®istry_client())
|
||||||
|
.await
|
||||||
|
.unwrap_err();
|
||||||
let errors = anyhow::Error::new(error).chain().join("\n");
|
let errors = anyhow::Error::new(error).chain().join("\n");
|
||||||
|
|
||||||
let requirement_txt =
|
let requirement_txt =
|
||||||
|
|
@ -1193,15 +1302,18 @@ mod test {
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[tokio::test]
|
||||||
fn unsupported_editable() -> Result<()> {
|
async fn unsupported_editable() -> Result<()> {
|
||||||
let temp_dir = assert_fs::TempDir::new()?;
|
let temp_dir = assert_fs::TempDir::new()?;
|
||||||
let requirements_txt = temp_dir.child("requirements.txt");
|
let requirements_txt = temp_dir.child("requirements.txt");
|
||||||
requirements_txt.write_str(indoc! {"
|
requirements_txt.write_str(indoc! {"
|
||||||
-e http://localhost:8080/
|
-e http://localhost:8080/
|
||||||
"})?;
|
"})?;
|
||||||
|
|
||||||
let error = RequirementsTxt::parse(requirements_txt.path(), temp_dir.path()).unwrap_err();
|
let error =
|
||||||
|
RequirementsTxt::parse(requirements_txt.path(), temp_dir.path(), ®istry_client())
|
||||||
|
.await
|
||||||
|
.unwrap_err();
|
||||||
let errors = anyhow::Error::new(error).chain().join("\n");
|
let errors = anyhow::Error::new(error).chain().join("\n");
|
||||||
|
|
||||||
let requirement_txt =
|
let requirement_txt =
|
||||||
|
|
@ -1219,15 +1331,18 @@ mod test {
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[tokio::test]
|
||||||
fn invalid_editable_extra() -> Result<()> {
|
async fn invalid_editable_extra() -> Result<()> {
|
||||||
let temp_dir = assert_fs::TempDir::new()?;
|
let temp_dir = assert_fs::TempDir::new()?;
|
||||||
let requirements_txt = temp_dir.child("requirements.txt");
|
let requirements_txt = temp_dir.child("requirements.txt");
|
||||||
requirements_txt.write_str(indoc! {"
|
requirements_txt.write_str(indoc! {"
|
||||||
-e black[,abcdef]
|
-e black[,abcdef]
|
||||||
"})?;
|
"})?;
|
||||||
|
|
||||||
let error = RequirementsTxt::parse(requirements_txt.path(), temp_dir.path()).unwrap_err();
|
let error =
|
||||||
|
RequirementsTxt::parse(requirements_txt.path(), temp_dir.path(), ®istry_client())
|
||||||
|
.await
|
||||||
|
.unwrap_err();
|
||||||
let errors = anyhow::Error::new(error).chain().join("\n");
|
let errors = anyhow::Error::new(error).chain().join("\n");
|
||||||
|
|
||||||
let requirement_txt =
|
let requirement_txt =
|
||||||
|
|
@ -1247,15 +1362,18 @@ mod test {
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[tokio::test]
|
||||||
fn invalid_index_url() -> Result<()> {
|
async fn invalid_index_url() -> Result<()> {
|
||||||
let temp_dir = assert_fs::TempDir::new()?;
|
let temp_dir = assert_fs::TempDir::new()?;
|
||||||
let requirements_txt = temp_dir.child("requirements.txt");
|
let requirements_txt = temp_dir.child("requirements.txt");
|
||||||
requirements_txt.write_str(indoc! {"
|
requirements_txt.write_str(indoc! {"
|
||||||
--index-url 123
|
--index-url 123
|
||||||
"})?;
|
"})?;
|
||||||
|
|
||||||
let error = RequirementsTxt::parse(requirements_txt.path(), temp_dir.path()).unwrap_err();
|
let error =
|
||||||
|
RequirementsTxt::parse(requirements_txt.path(), temp_dir.path(), ®istry_client())
|
||||||
|
.await
|
||||||
|
.unwrap_err();
|
||||||
let errors = anyhow::Error::new(error).chain().join("\n");
|
let errors = anyhow::Error::new(error).chain().join("\n");
|
||||||
|
|
||||||
let requirement_txt =
|
let requirement_txt =
|
||||||
|
|
@ -1276,8 +1394,8 @@ mod test {
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[tokio::test]
|
||||||
fn missing_r() -> Result<()> {
|
async fn missing_r() -> Result<()> {
|
||||||
let temp_dir = assert_fs::TempDir::new()?;
|
let temp_dir = assert_fs::TempDir::new()?;
|
||||||
|
|
||||||
let file_txt = temp_dir.child("file.txt");
|
let file_txt = temp_dir.child("file.txt");
|
||||||
|
|
@ -1289,7 +1407,10 @@ mod test {
|
||||||
file.txt
|
file.txt
|
||||||
"})?;
|
"})?;
|
||||||
|
|
||||||
let error = RequirementsTxt::parse(requirements_txt.path(), temp_dir.path()).unwrap_err();
|
let error =
|
||||||
|
RequirementsTxt::parse(requirements_txt.path(), temp_dir.path(), ®istry_client())
|
||||||
|
.await
|
||||||
|
.unwrap_err();
|
||||||
let errors = anyhow::Error::new(error).chain().join("\n");
|
let errors = anyhow::Error::new(error).chain().join("\n");
|
||||||
|
|
||||||
let requirement_txt =
|
let requirement_txt =
|
||||||
|
|
@ -1307,8 +1428,8 @@ mod test {
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[tokio::test]
|
||||||
fn relative_requirement() -> Result<()> {
|
async fn relative_requirement() -> Result<()> {
|
||||||
let temp_dir = assert_fs::TempDir::new()?;
|
let temp_dir = assert_fs::TempDir::new()?;
|
||||||
|
|
||||||
// Create a requirements file with a relative entry, in a subdirectory.
|
// Create a requirements file with a relative entry, in a subdirectory.
|
||||||
|
|
@ -1330,7 +1451,10 @@ mod test {
|
||||||
-r subdir/child.txt
|
-r subdir/child.txt
|
||||||
"})?;
|
"})?;
|
||||||
|
|
||||||
let requirements = RequirementsTxt::parse(parent_txt.path(), temp_dir.path()).unwrap();
|
let requirements =
|
||||||
|
RequirementsTxt::parse(parent_txt.path(), temp_dir.path(), ®istry_client())
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
insta::assert_debug_snapshot!(requirements, @r###"
|
insta::assert_debug_snapshot!(requirements, @r###"
|
||||||
RequirementsTxt {
|
RequirementsTxt {
|
||||||
requirements: [
|
requirements: [
|
||||||
|
|
@ -1359,9 +1483,9 @@ mod test {
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[tokio::test]
|
||||||
#[cfg(not(windows))]
|
#[cfg(not(windows))]
|
||||||
fn nested_editable() -> Result<()> {
|
async fn nested_editable() -> Result<()> {
|
||||||
let temp_dir = assert_fs::TempDir::new()?;
|
let temp_dir = assert_fs::TempDir::new()?;
|
||||||
|
|
||||||
let requirements_txt = temp_dir.child("requirements.txt");
|
let requirements_txt = temp_dir.child("requirements.txt");
|
||||||
|
|
@ -1381,7 +1505,9 @@ mod test {
|
||||||
"})?;
|
"})?;
|
||||||
|
|
||||||
let requirements =
|
let requirements =
|
||||||
RequirementsTxt::parse(requirements_txt.path(), temp_dir.path()).unwrap();
|
RequirementsTxt::parse(requirements_txt.path(), temp_dir.path(), ®istry_client())
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
insta::assert_debug_snapshot!(requirements, @r###"
|
insta::assert_debug_snapshot!(requirements, @r###"
|
||||||
RequirementsTxt {
|
RequirementsTxt {
|
||||||
|
|
@ -1419,8 +1545,8 @@ mod test {
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[tokio::test]
|
||||||
fn nested_conflicting_index_url() -> Result<()> {
|
async fn nested_conflicting_index_url() -> Result<()> {
|
||||||
let temp_dir = assert_fs::TempDir::new()?;
|
let temp_dir = assert_fs::TempDir::new()?;
|
||||||
|
|
||||||
let requirements_txt = temp_dir.child("requirements.txt");
|
let requirements_txt = temp_dir.child("requirements.txt");
|
||||||
|
|
@ -1439,7 +1565,10 @@ mod test {
|
||||||
--index-url https://fake.pypi.org/simple
|
--index-url https://fake.pypi.org/simple
|
||||||
"})?;
|
"})?;
|
||||||
|
|
||||||
let error = RequirementsTxt::parse(requirements_txt.path(), temp_dir.path()).unwrap_err();
|
let error =
|
||||||
|
RequirementsTxt::parse(requirements_txt.path(), temp_dir.path(), ®istry_client())
|
||||||
|
.await
|
||||||
|
.unwrap_err();
|
||||||
let errors = anyhow::Error::new(error).chain().join("\n");
|
let errors = anyhow::Error::new(error).chain().join("\n");
|
||||||
|
|
||||||
let requirement_txt =
|
let requirement_txt =
|
||||||
|
|
@ -1477,8 +1606,8 @@ mod test {
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[tokio::test]
|
||||||
fn parser_error_line_and_column() -> Result<()> {
|
async fn parser_error_line_and_column() -> Result<()> {
|
||||||
let temp_dir = assert_fs::TempDir::new()?;
|
let temp_dir = assert_fs::TempDir::new()?;
|
||||||
let requirements_txt = temp_dir.child("requirements.txt");
|
let requirements_txt = temp_dir.child("requirements.txt");
|
||||||
requirements_txt.write_str(indoc! {"
|
requirements_txt.write_str(indoc! {"
|
||||||
|
|
@ -1487,7 +1616,10 @@ mod test {
|
||||||
tqdm
|
tqdm
|
||||||
"})?;
|
"})?;
|
||||||
|
|
||||||
let error = RequirementsTxt::parse(requirements_txt.path(), temp_dir.path()).unwrap_err();
|
let error =
|
||||||
|
RequirementsTxt::parse(requirements_txt.path(), temp_dir.path(), ®istry_client())
|
||||||
|
.await
|
||||||
|
.unwrap_err();
|
||||||
let errors = anyhow::Error::new(error).chain().join("\n");
|
let errors = anyhow::Error::new(error).chain().join("\n");
|
||||||
|
|
||||||
let requirement_txt =
|
let requirement_txt =
|
||||||
|
|
|
||||||
|
|
@ -177,6 +177,12 @@ impl RegistryClient {
|
||||||
self.timeout
|
self.timeout
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Set the index URLs to use for fetching packages.
|
||||||
|
#[must_use]
|
||||||
|
pub fn with_index_url(self, index_urls: IndexUrls) -> Self {
|
||||||
|
Self { index_urls, ..self }
|
||||||
|
}
|
||||||
|
|
||||||
/// Fetch a package from the `PyPI` simple API.
|
/// Fetch a package from the `PyPI` simple API.
|
||||||
///
|
///
|
||||||
/// "simple" here refers to [PEP 503 – Simple Repository API](https://peps.python.org/pep-0503/)
|
/// "simple" here refers to [PEP 503 – Simple Repository API](https://peps.python.org/pep-0503/)
|
||||||
|
|
|
||||||
|
|
@ -1,4 +1,5 @@
|
||||||
use std::fmt::Display;
|
use std::fmt::Display;
|
||||||
|
use std::io::Read;
|
||||||
use std::path::{Path, PathBuf};
|
use std::path::{Path, PathBuf};
|
||||||
|
|
||||||
use fs2::FileExt;
|
use fs2::FileExt;
|
||||||
|
|
@ -12,35 +13,31 @@ pub use crate::path::*;
|
||||||
|
|
||||||
mod path;
|
mod path;
|
||||||
|
|
||||||
/// Reads the contents of the file path given into memory.
|
/// Reads the contents of the file path into memory.
|
||||||
///
|
///
|
||||||
/// If the file path is `-`, then contents are read from stdin instead.
|
/// If the file path is `-`, then contents are read from stdin instead.
|
||||||
pub fn read(path: impl AsRef<Path>) -> std::io::Result<Vec<u8>> {
|
pub async fn read(path: impl AsRef<Path>) -> std::io::Result<Vec<u8>> {
|
||||||
use std::io::Read;
|
|
||||||
|
|
||||||
let path = path.as_ref();
|
let path = path.as_ref();
|
||||||
if path == Path::new("-") {
|
if path == Path::new("-") {
|
||||||
let mut buf = Vec::with_capacity(1024);
|
let mut buf = Vec::with_capacity(1024);
|
||||||
std::io::stdin().read_to_end(&mut buf)?;
|
std::io::stdin().read_to_end(&mut buf)?;
|
||||||
Ok(buf)
|
Ok(buf)
|
||||||
} else {
|
} else {
|
||||||
fs::read(path)
|
fs_err::tokio::read(path).await
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Reads the contents of the file path given into memory as a `String`.
|
/// Reads the contents of the file path into memory as a `String`.
|
||||||
///
|
///
|
||||||
/// If the file path is `-`, then contents are read from stdin instead.
|
/// If the file path is `-`, then contents are read from stdin instead.
|
||||||
pub fn read_to_string(path: impl AsRef<Path>) -> std::io::Result<String> {
|
pub async fn read_to_string(path: impl AsRef<Path>) -> std::io::Result<String> {
|
||||||
use std::io::Read;
|
|
||||||
|
|
||||||
let path = path.as_ref();
|
let path = path.as_ref();
|
||||||
if path == Path::new("-") {
|
if path == Path::new("-") {
|
||||||
let mut buf = String::with_capacity(1024);
|
let mut buf = String::with_capacity(1024);
|
||||||
std::io::stdin().read_to_string(&mut buf)?;
|
std::io::stdin().read_to_string(&mut buf)?;
|
||||||
Ok(buf)
|
Ok(buf)
|
||||||
} else {
|
} else {
|
||||||
fs::read_to_string(path)
|
fs_err::tokio::read_to_string(path).await
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -9,6 +9,7 @@ use std::str::FromStr;
|
||||||
use anstream::{eprint, AutoStream, StripStream};
|
use anstream::{eprint, AutoStream, StripStream};
|
||||||
use anyhow::{anyhow, Context, Result};
|
use anyhow::{anyhow, Context, Result};
|
||||||
use chrono::{DateTime, Utc};
|
use chrono::{DateTime, Utc};
|
||||||
|
use futures::future::OptionFuture;
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
use owo_colors::OwoColorize;
|
use owo_colors::OwoColorize;
|
||||||
use rustc_hash::FxHashSet;
|
use rustc_hash::FxHashSet;
|
||||||
|
|
@ -82,6 +83,11 @@ pub(crate) async fn pip_compile(
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Initialize the registry client.
|
||||||
|
let client = RegistryClientBuilder::new(cache.clone())
|
||||||
|
.connectivity(connectivity)
|
||||||
|
.build();
|
||||||
|
|
||||||
// Read all requirements from the provided sources.
|
// Read all requirements from the provided sources.
|
||||||
let RequirementsSpecification {
|
let RequirementsSpecification {
|
||||||
project,
|
project,
|
||||||
|
|
@ -94,11 +100,14 @@ pub(crate) async fn pip_compile(
|
||||||
no_index,
|
no_index,
|
||||||
find_links,
|
find_links,
|
||||||
extras: used_extras,
|
extras: used_extras,
|
||||||
} = RequirementsSpecification::from_sources(requirements, constraints, overrides, &extras)?;
|
} = RequirementsSpecification::from_sources(
|
||||||
|
requirements,
|
||||||
// Incorporate any index locations from the provided sources.
|
constraints,
|
||||||
let index_locations =
|
overrides,
|
||||||
index_locations.combine(index_url, extra_index_urls, find_links, no_index);
|
&extras,
|
||||||
|
&client,
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
// Check that all provided extras are used
|
// Check that all provided extras are used
|
||||||
if let ExtrasSpecification::Some(extras) = extras {
|
if let ExtrasSpecification::Some(extras) = extras {
|
||||||
|
|
@ -117,28 +126,33 @@ pub(crate) async fn pip_compile(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let preferences: Vec<Requirement> = output_file
|
let preferences: Vec<Requirement> = OptionFuture::from(
|
||||||
// As an optimization, skip reading the lockfile is we're upgrading all packages anyway.
|
output_file
|
||||||
.filter(|_| !upgrade.is_all())
|
// As an optimization, skip reading the lockfile is we're upgrading all packages anyway.
|
||||||
.filter(|output_file| output_file.exists())
|
.filter(|_| !upgrade.is_all())
|
||||||
.map(Path::to_path_buf)
|
.filter(|output_file| output_file.exists())
|
||||||
.map(RequirementsSource::from_path)
|
.map(Path::to_path_buf)
|
||||||
.as_ref()
|
.map(RequirementsSource::from_path)
|
||||||
.map(|source| RequirementsSpecification::from_source(source, &extras))
|
.as_ref()
|
||||||
.transpose()?
|
.map(|source| async {
|
||||||
.map(|spec| spec.requirements)
|
RequirementsSpecification::from_source(source, &extras, &client).await
|
||||||
.map(|requirements| match upgrade {
|
}),
|
||||||
// Respect all pinned versions from the existing lockfile.
|
)
|
||||||
Upgrade::None => requirements,
|
.await
|
||||||
// Ignore all pinned versions from the existing lockfile.
|
.transpose()?
|
||||||
Upgrade::All => vec![],
|
.map(|spec| spec.requirements)
|
||||||
// Ignore pinned versions for the specified packages.
|
.map(|requirements| match upgrade {
|
||||||
Upgrade::Packages(packages) => requirements
|
// Respect all pinned versions from the existing lockfile.
|
||||||
.into_iter()
|
Upgrade::None => requirements,
|
||||||
.filter(|requirement| !packages.contains(&requirement.name))
|
// Ignore all pinned versions from the existing lockfile.
|
||||||
.collect(),
|
Upgrade::All => vec![],
|
||||||
})
|
// Ignore pinned versions for the specified packages.
|
||||||
.unwrap_or_default();
|
Upgrade::Packages(packages) => requirements
|
||||||
|
.into_iter()
|
||||||
|
.filter(|requirement| !packages.contains(&requirement.name))
|
||||||
|
.collect(),
|
||||||
|
})
|
||||||
|
.unwrap_or_default();
|
||||||
|
|
||||||
// Find an interpreter to use for building distributions
|
// Find an interpreter to use for building distributions
|
||||||
let platform = Platform::current()?;
|
let platform = Platform::current()?;
|
||||||
|
|
@ -196,11 +210,13 @@ pub(crate) async fn pip_compile(
|
||||||
|python_version| Cow::Owned(python_version.markers(interpreter.markers())),
|
|python_version| Cow::Owned(python_version.markers(interpreter.markers())),
|
||||||
);
|
);
|
||||||
|
|
||||||
// Instantiate a client.
|
// Incorporate any index locations from the provided sources.
|
||||||
let client = RegistryClientBuilder::new(cache.clone())
|
let index_locations =
|
||||||
.index_urls(index_locations.index_urls())
|
index_locations.combine(index_url, extra_index_urls, find_links, no_index);
|
||||||
.connectivity(connectivity)
|
|
||||||
.build();
|
// Update the index URLs on the client, to take into account any index URLs added by the
|
||||||
|
// sources (e.g., `--index-url` in a `requirements.txt` file).
|
||||||
|
let client = client.with_index_url(index_locations.index_urls());
|
||||||
|
|
||||||
// Resolve the flat indexes from `--find-links`.
|
// Resolve the flat indexes from `--find-links`.
|
||||||
let flat_index = {
|
let flat_index = {
|
||||||
|
|
|
||||||
|
|
@ -70,6 +70,11 @@ pub(crate) async fn pip_install(
|
||||||
) -> Result<ExitStatus> {
|
) -> Result<ExitStatus> {
|
||||||
let start = std::time::Instant::now();
|
let start = std::time::Instant::now();
|
||||||
|
|
||||||
|
// Initialize the registry client.
|
||||||
|
let client = RegistryClientBuilder::new(cache.clone())
|
||||||
|
.connectivity(connectivity)
|
||||||
|
.build();
|
||||||
|
|
||||||
// Read all requirements from the provided sources.
|
// Read all requirements from the provided sources.
|
||||||
let RequirementsSpecification {
|
let RequirementsSpecification {
|
||||||
project,
|
project,
|
||||||
|
|
@ -82,11 +87,7 @@ pub(crate) async fn pip_install(
|
||||||
no_index,
|
no_index,
|
||||||
find_links,
|
find_links,
|
||||||
extras: used_extras,
|
extras: used_extras,
|
||||||
} = specification(requirements, constraints, overrides, extras)?;
|
} = specification(requirements, constraints, overrides, extras, &client).await?;
|
||||||
|
|
||||||
// Incorporate any index locations from the provided sources.
|
|
||||||
let index_locations =
|
|
||||||
index_locations.combine(index_url, extra_index_urls, find_links, no_index);
|
|
||||||
|
|
||||||
// Check that all provided extras are used
|
// Check that all provided extras are used
|
||||||
if let ExtrasSpecification::Some(extras) = extras {
|
if let ExtrasSpecification::Some(extras) = extras {
|
||||||
|
|
@ -169,11 +170,13 @@ pub(crate) async fn pip_install(
|
||||||
let tags = venv.interpreter().tags()?;
|
let tags = venv.interpreter().tags()?;
|
||||||
let markers = venv.interpreter().markers();
|
let markers = venv.interpreter().markers();
|
||||||
|
|
||||||
// Instantiate a client.
|
// Incorporate any index locations from the provided sources.
|
||||||
let client = RegistryClientBuilder::new(cache.clone())
|
let index_locations =
|
||||||
.index_urls(index_locations.index_urls())
|
index_locations.combine(index_url, extra_index_urls, find_links, no_index);
|
||||||
.connectivity(connectivity)
|
|
||||||
.build();
|
// Update the index URLs on the client, to take into account any index URLs added by the
|
||||||
|
// sources (e.g., `--index-url` in a `requirements.txt` file).
|
||||||
|
let client = client.with_index_url(index_locations.index_urls());
|
||||||
|
|
||||||
// Resolve the flat indexes from `--find-links`.
|
// Resolve the flat indexes from `--find-links`.
|
||||||
let flat_index = {
|
let flat_index = {
|
||||||
|
|
@ -316,11 +319,12 @@ pub(crate) async fn pip_install(
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Consolidate the requirements for an installation.
|
/// Consolidate the requirements for an installation.
|
||||||
fn specification(
|
async fn specification(
|
||||||
requirements: &[RequirementsSource],
|
requirements: &[RequirementsSource],
|
||||||
constraints: &[RequirementsSource],
|
constraints: &[RequirementsSource],
|
||||||
overrides: &[RequirementsSource],
|
overrides: &[RequirementsSource],
|
||||||
extras: &ExtrasSpecification<'_>,
|
extras: &ExtrasSpecification<'_>,
|
||||||
|
client: &RegistryClient,
|
||||||
) -> Result<RequirementsSpecification, Error> {
|
) -> Result<RequirementsSpecification, Error> {
|
||||||
// If the user requests `extras` but does not provide a pyproject toml source
|
// If the user requests `extras` but does not provide a pyproject toml source
|
||||||
if !matches!(extras, ExtrasSpecification::None)
|
if !matches!(extras, ExtrasSpecification::None)
|
||||||
|
|
@ -332,8 +336,14 @@ fn specification(
|
||||||
}
|
}
|
||||||
|
|
||||||
// Read all requirements from the provided sources.
|
// Read all requirements from the provided sources.
|
||||||
let spec =
|
let spec = RequirementsSpecification::from_sources(
|
||||||
RequirementsSpecification::from_sources(requirements, constraints, overrides, extras)?;
|
requirements,
|
||||||
|
constraints,
|
||||||
|
overrides,
|
||||||
|
extras,
|
||||||
|
client,
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
// Check that all provided extras are used
|
// Check that all provided extras are used
|
||||||
if let ExtrasSpecification::Some(extras) = extras {
|
if let ExtrasSpecification::Some(extras) = extras {
|
||||||
|
|
|
||||||
|
|
@ -48,6 +48,11 @@ pub(crate) async fn pip_sync(
|
||||||
) -> Result<ExitStatus> {
|
) -> Result<ExitStatus> {
|
||||||
let start = std::time::Instant::now();
|
let start = std::time::Instant::now();
|
||||||
|
|
||||||
|
// Initialize the registry client.
|
||||||
|
let client = RegistryClientBuilder::new(cache.clone())
|
||||||
|
.connectivity(connectivity)
|
||||||
|
.build();
|
||||||
|
|
||||||
// Read all requirements from the provided sources.
|
// Read all requirements from the provided sources.
|
||||||
let RequirementsSpecification {
|
let RequirementsSpecification {
|
||||||
project: _project,
|
project: _project,
|
||||||
|
|
@ -60,7 +65,7 @@ pub(crate) async fn pip_sync(
|
||||||
no_index,
|
no_index,
|
||||||
find_links,
|
find_links,
|
||||||
extras: _extras,
|
extras: _extras,
|
||||||
} = RequirementsSpecification::from_simple_sources(sources)?;
|
} = RequirementsSpecification::from_simple_sources(sources, &client).await?;
|
||||||
|
|
||||||
let num_requirements = requirements.len() + editables.len();
|
let num_requirements = requirements.len() + editables.len();
|
||||||
if num_requirements == 0 {
|
if num_requirements == 0 {
|
||||||
|
|
@ -68,10 +73,6 @@ pub(crate) async fn pip_sync(
|
||||||
return Ok(ExitStatus::Success);
|
return Ok(ExitStatus::Success);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Incorporate any index locations from the provided sources.
|
|
||||||
let index_locations =
|
|
||||||
index_locations.combine(index_url, extra_index_urls, find_links, no_index);
|
|
||||||
|
|
||||||
// Detect the current Python interpreter.
|
// Detect the current Python interpreter.
|
||||||
let platform = Platform::current()?;
|
let platform = Platform::current()?;
|
||||||
let venv = if let Some(python) = python.as_ref() {
|
let venv = if let Some(python) = python.as_ref() {
|
||||||
|
|
@ -108,11 +109,13 @@ pub(crate) async fn pip_sync(
|
||||||
// Determine the current environment markers.
|
// Determine the current environment markers.
|
||||||
let tags = venv.interpreter().tags()?;
|
let tags = venv.interpreter().tags()?;
|
||||||
|
|
||||||
// Prep the registry client.
|
// Incorporate any index locations from the provided sources.
|
||||||
let client = RegistryClientBuilder::new(cache.clone())
|
let index_locations =
|
||||||
.index_urls(index_locations.index_urls())
|
index_locations.combine(index_url, extra_index_urls, find_links, no_index);
|
||||||
.connectivity(connectivity)
|
|
||||||
.build();
|
// Update the index URLs on the client, to take into account any index URLs added by the
|
||||||
|
// sources (e.g., `--index-url` in a `requirements.txt` file).
|
||||||
|
let client = client.with_index_url(index_locations.index_urls());
|
||||||
|
|
||||||
// Resolve the flat indexes from `--find-links`.
|
// Resolve the flat indexes from `--find-links`.
|
||||||
let flat_index = {
|
let flat_index = {
|
||||||
|
|
|
||||||
|
|
@ -7,6 +7,7 @@ use tracing::debug;
|
||||||
use distribution_types::{InstalledMetadata, Name};
|
use distribution_types::{InstalledMetadata, Name};
|
||||||
use platform_host::Platform;
|
use platform_host::Platform;
|
||||||
use uv_cache::Cache;
|
use uv_cache::Cache;
|
||||||
|
use uv_client::{Connectivity, RegistryClientBuilder};
|
||||||
use uv_fs::Simplified;
|
use uv_fs::Simplified;
|
||||||
use uv_interpreter::PythonEnvironment;
|
use uv_interpreter::PythonEnvironment;
|
||||||
|
|
||||||
|
|
@ -20,10 +21,16 @@ pub(crate) async fn pip_uninstall(
|
||||||
python: Option<String>,
|
python: Option<String>,
|
||||||
system: bool,
|
system: bool,
|
||||||
cache: Cache,
|
cache: Cache,
|
||||||
|
connectivity: Connectivity,
|
||||||
printer: Printer,
|
printer: Printer,
|
||||||
) -> Result<ExitStatus> {
|
) -> Result<ExitStatus> {
|
||||||
let start = std::time::Instant::now();
|
let start = std::time::Instant::now();
|
||||||
|
|
||||||
|
// Initialize the registry client.
|
||||||
|
let client: uv_client::RegistryClient = RegistryClientBuilder::new(cache.clone())
|
||||||
|
.connectivity(connectivity)
|
||||||
|
.build();
|
||||||
|
|
||||||
// Read all requirements from the provided sources.
|
// Read all requirements from the provided sources.
|
||||||
let RequirementsSpecification {
|
let RequirementsSpecification {
|
||||||
project: _project,
|
project: _project,
|
||||||
|
|
@ -36,7 +43,7 @@ pub(crate) async fn pip_uninstall(
|
||||||
no_index: _no_index,
|
no_index: _no_index,
|
||||||
find_links: _find_links,
|
find_links: _find_links,
|
||||||
extras: _extras,
|
extras: _extras,
|
||||||
} = RequirementsSpecification::from_simple_sources(sources)?;
|
} = RequirementsSpecification::from_simple_sources(sources, &client).await?;
|
||||||
|
|
||||||
// Detect the current Python interpreter.
|
// Detect the current Python interpreter.
|
||||||
let platform = Platform::current()?;
|
let platform = Platform::current()?;
|
||||||
|
|
|
||||||
|
|
@ -861,6 +861,10 @@ struct PipUninstallArgs {
|
||||||
/// should be used with caution, as it can modify the system Python installation.
|
/// should be used with caution, as it can modify the system Python installation.
|
||||||
#[clap(long, conflicts_with = "python")]
|
#[clap(long, conflicts_with = "python")]
|
||||||
system: bool,
|
system: bool,
|
||||||
|
|
||||||
|
/// Run offline, i.e., without accessing the network.
|
||||||
|
#[arg(global = true, long)]
|
||||||
|
offline: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Args)]
|
#[derive(Args)]
|
||||||
|
|
@ -1455,7 +1459,19 @@ async fn run() -> Result<ExitStatus> {
|
||||||
.map(RequirementsSource::from_path),
|
.map(RequirementsSource::from_path),
|
||||||
)
|
)
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
commands::pip_uninstall(&sources, args.python, args.system, cache, printer).await
|
commands::pip_uninstall(
|
||||||
|
&sources,
|
||||||
|
args.python,
|
||||||
|
args.system,
|
||||||
|
cache,
|
||||||
|
if args.offline {
|
||||||
|
Connectivity::Offline
|
||||||
|
} else {
|
||||||
|
Connectivity::Online
|
||||||
|
},
|
||||||
|
printer,
|
||||||
|
)
|
||||||
|
.await
|
||||||
}
|
}
|
||||||
Commands::Pip(PipNamespace {
|
Commands::Pip(PipNamespace {
|
||||||
command: PipCommand::Freeze(args),
|
command: PipCommand::Freeze(args),
|
||||||
|
|
|
||||||
|
|
@ -12,6 +12,7 @@ use distribution_types::{FlatIndexLocation, IndexUrl};
|
||||||
use pep508_rs::Requirement;
|
use pep508_rs::Requirement;
|
||||||
use requirements_txt::{EditableRequirement, FindLink, RequirementsTxt};
|
use requirements_txt::{EditableRequirement, FindLink, RequirementsTxt};
|
||||||
use tracing::{instrument, Level};
|
use tracing::{instrument, Level};
|
||||||
|
use uv_client::RegistryClient;
|
||||||
use uv_fs::Simplified;
|
use uv_fs::Simplified;
|
||||||
use uv_normalize::{ExtraName, PackageName};
|
use uv_normalize::{ExtraName, PackageName};
|
||||||
|
|
||||||
|
|
@ -138,9 +139,10 @@ pub(crate) struct RequirementsSpecification {
|
||||||
impl RequirementsSpecification {
|
impl RequirementsSpecification {
|
||||||
/// Read the requirements and constraints from a source.
|
/// Read the requirements and constraints from a source.
|
||||||
#[instrument(skip_all, level = Level::DEBUG, fields(source = % source))]
|
#[instrument(skip_all, level = Level::DEBUG, fields(source = % source))]
|
||||||
pub(crate) fn from_source(
|
pub(crate) async fn from_source(
|
||||||
source: &RequirementsSource,
|
source: &RequirementsSource,
|
||||||
extras: &ExtrasSpecification,
|
extras: &ExtrasSpecification<'_>,
|
||||||
|
client: &RegistryClient,
|
||||||
) -> Result<Self> {
|
) -> Result<Self> {
|
||||||
Ok(match source {
|
Ok(match source {
|
||||||
RequirementsSource::Package(name) => {
|
RequirementsSource::Package(name) => {
|
||||||
|
|
@ -176,7 +178,8 @@ impl RequirementsSpecification {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
RequirementsSource::RequirementsTxt(path) => {
|
RequirementsSource::RequirementsTxt(path) => {
|
||||||
let requirements_txt = RequirementsTxt::parse(path, std::env::current_dir()?)?;
|
let requirements_txt =
|
||||||
|
RequirementsTxt::parse(path, std::env::current_dir()?, client).await?;
|
||||||
Self {
|
Self {
|
||||||
project: None,
|
project: None,
|
||||||
requirements: requirements_txt
|
requirements: requirements_txt
|
||||||
|
|
@ -206,7 +209,7 @@ impl RequirementsSpecification {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
RequirementsSource::PyprojectToml(path) => {
|
RequirementsSource::PyprojectToml(path) => {
|
||||||
let contents = uv_fs::read_to_string(path)?;
|
let contents = uv_fs::read_to_string(path).await?;
|
||||||
let pyproject_toml = toml::from_str::<pyproject_toml::PyProjectToml>(&contents)
|
let pyproject_toml = toml::from_str::<pyproject_toml::PyProjectToml>(&contents)
|
||||||
.with_context(|| format!("Failed to parse `{}`", path.simplified_display()))?;
|
.with_context(|| format!("Failed to parse `{}`", path.simplified_display()))?;
|
||||||
let mut used_extras = FxHashSet::default();
|
let mut used_extras = FxHashSet::default();
|
||||||
|
|
@ -273,11 +276,12 @@ impl RequirementsSpecification {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Read the combined requirements and constraints from a set of sources.
|
/// Read the combined requirements and constraints from a set of sources.
|
||||||
pub(crate) fn from_sources(
|
pub(crate) async fn from_sources(
|
||||||
requirements: &[RequirementsSource],
|
requirements: &[RequirementsSource],
|
||||||
constraints: &[RequirementsSource],
|
constraints: &[RequirementsSource],
|
||||||
overrides: &[RequirementsSource],
|
overrides: &[RequirementsSource],
|
||||||
extras: &ExtrasSpecification,
|
extras: &ExtrasSpecification<'_>,
|
||||||
|
client: &RegistryClient,
|
||||||
) -> Result<Self> {
|
) -> Result<Self> {
|
||||||
let mut spec = Self::default();
|
let mut spec = Self::default();
|
||||||
|
|
||||||
|
|
@ -285,7 +289,7 @@ impl RequirementsSpecification {
|
||||||
// A `requirements.txt` can contain a `-c constraints.txt` directive within it, so reading
|
// A `requirements.txt` can contain a `-c constraints.txt` directive within it, so reading
|
||||||
// a requirements file can also add constraints.
|
// a requirements file can also add constraints.
|
||||||
for source in requirements {
|
for source in requirements {
|
||||||
let source = Self::from_source(source, extras)?;
|
let source = Self::from_source(source, extras, client).await?;
|
||||||
spec.requirements.extend(source.requirements);
|
spec.requirements.extend(source.requirements);
|
||||||
spec.constraints.extend(source.constraints);
|
spec.constraints.extend(source.constraints);
|
||||||
spec.overrides.extend(source.overrides);
|
spec.overrides.extend(source.overrides);
|
||||||
|
|
@ -312,7 +316,7 @@ impl RequirementsSpecification {
|
||||||
|
|
||||||
// Read all constraints, treating _everything_ as a constraint.
|
// Read all constraints, treating _everything_ as a constraint.
|
||||||
for source in constraints {
|
for source in constraints {
|
||||||
let source = Self::from_source(source, extras)?;
|
let source = Self::from_source(source, extras, client).await?;
|
||||||
spec.constraints.extend(source.requirements);
|
spec.constraints.extend(source.requirements);
|
||||||
spec.constraints.extend(source.constraints);
|
spec.constraints.extend(source.constraints);
|
||||||
spec.constraints.extend(source.overrides);
|
spec.constraints.extend(source.overrides);
|
||||||
|
|
@ -332,7 +336,7 @@ impl RequirementsSpecification {
|
||||||
|
|
||||||
// Read all overrides, treating both requirements _and_ constraints as overrides.
|
// Read all overrides, treating both requirements _and_ constraints as overrides.
|
||||||
for source in overrides {
|
for source in overrides {
|
||||||
let source = Self::from_source(source, extras)?;
|
let source = Self::from_source(source, extras, client).await?;
|
||||||
spec.overrides.extend(source.requirements);
|
spec.overrides.extend(source.requirements);
|
||||||
spec.overrides.extend(source.constraints);
|
spec.overrides.extend(source.constraints);
|
||||||
spec.overrides.extend(source.overrides);
|
spec.overrides.extend(source.overrides);
|
||||||
|
|
@ -354,8 +358,11 @@ impl RequirementsSpecification {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Read the requirements from a set of sources.
|
/// Read the requirements from a set of sources.
|
||||||
pub(crate) fn from_simple_sources(requirements: &[RequirementsSource]) -> Result<Self> {
|
pub(crate) async fn from_simple_sources(
|
||||||
Self::from_sources(requirements, &[], &[], &ExtrasSpecification::None)
|
requirements: &[RequirementsSource],
|
||||||
|
client: &RegistryClient,
|
||||||
|
) -> Result<Self> {
|
||||||
|
Self::from_sources(requirements, &[], &[], &ExtrasSpecification::None, client).await
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -115,7 +115,7 @@ fn missing_requirements_in() {
|
||||||
----- stdout -----
|
----- stdout -----
|
||||||
|
|
||||||
----- stderr -----
|
----- stderr -----
|
||||||
error: failed to open file `requirements.in`
|
error: failed to read from file `requirements.in`
|
||||||
Caused by: No such file or directory (os error 2)
|
Caused by: No such file or directory (os error 2)
|
||||||
"###
|
"###
|
||||||
);
|
);
|
||||||
|
|
@ -142,7 +142,7 @@ fn missing_venv() -> Result<()> {
|
||||||
----- stdout -----
|
----- stdout -----
|
||||||
|
|
||||||
----- stderr -----
|
----- stderr -----
|
||||||
error: failed to open file `requirements.in`
|
error: failed to read from file `requirements.in`
|
||||||
Caused by: No such file or directory (os error 2)
|
Caused by: No such file or directory (os error 2)
|
||||||
"###
|
"###
|
||||||
);
|
);
|
||||||
|
|
|
||||||
|
|
@ -100,7 +100,7 @@ fn missing_requirements_txt() {
|
||||||
----- stdout -----
|
----- stdout -----
|
||||||
|
|
||||||
----- stderr -----
|
----- stderr -----
|
||||||
error: failed to open file `requirements.txt`
|
error: failed to read from file `requirements.txt`
|
||||||
Caused by: No such file or directory (os error 2)
|
Caused by: No such file or directory (os error 2)
|
||||||
"###
|
"###
|
||||||
);
|
);
|
||||||
|
|
@ -1509,6 +1509,73 @@ fn install_constraints_inline() -> Result<()> {
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Install a package from a `constraints.txt` file on a remote http server.
|
||||||
|
#[test]
|
||||||
|
fn install_constraints_remote() {
|
||||||
|
let context = TestContext::new("3.12");
|
||||||
|
|
||||||
|
uv_snapshot!(command(&context)
|
||||||
|
.arg("-c")
|
||||||
|
.arg("https://raw.githubusercontent.com/apache/airflow/constraints-2-6/constraints-3.11.txt")
|
||||||
|
.arg("typing_extensions>=4.0"), @r###"
|
||||||
|
success: true
|
||||||
|
exit_code: 0
|
||||||
|
----- stdout -----
|
||||||
|
|
||||||
|
----- stderr -----
|
||||||
|
Resolved 1 package in [TIME]
|
||||||
|
Downloaded 1 package in [TIME]
|
||||||
|
Installed 1 package in [TIME]
|
||||||
|
+ typing-extensions==4.7.1
|
||||||
|
"###
|
||||||
|
); // would yield typing-extensions==4.8.2 without constraint file
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Install a package from a `requirements.txt` file, with an inline constraint, which points
|
||||||
|
/// to a remote http server.
|
||||||
|
#[test]
|
||||||
|
fn install_constraints_inline_remote() -> Result<()> {
|
||||||
|
let context = TestContext::new("3.12");
|
||||||
|
let requirementstxt = context.temp_dir.child("requirements.txt");
|
||||||
|
requirementstxt.write_str("typing-extensions>=4.0\n-c https://raw.githubusercontent.com/apache/airflow/constraints-2-6/constraints-3.11.txt")?;
|
||||||
|
|
||||||
|
uv_snapshot!(command(&context)
|
||||||
|
.arg("-r")
|
||||||
|
.arg("requirements.txt"), @r###"
|
||||||
|
success: true
|
||||||
|
exit_code: 0
|
||||||
|
----- stdout -----
|
||||||
|
|
||||||
|
----- stderr -----
|
||||||
|
Resolved 1 package in [TIME]
|
||||||
|
Downloaded 1 package in [TIME]
|
||||||
|
Installed 1 package in [TIME]
|
||||||
|
+ typing-extensions==4.7.1
|
||||||
|
"### // would yield typing-extensions==4.8.2 without constraint file
|
||||||
|
);
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn install_constraints_respects_offline_mode() {
|
||||||
|
let context = TestContext::new("3.12");
|
||||||
|
|
||||||
|
uv_snapshot!(command(&context)
|
||||||
|
.arg("--offline")
|
||||||
|
.arg("-r")
|
||||||
|
.arg("http://example.com/requirements.txt"), @r###"
|
||||||
|
success: false
|
||||||
|
exit_code: 2
|
||||||
|
----- stdout -----
|
||||||
|
|
||||||
|
----- stderr -----
|
||||||
|
error: Error while accessing remote requirements file http://example.com/requirements.txt: Middleware error: Network connectivity is disabled, but the requested data wasn't found in the cache for: `http://example.com/requirements.txt`
|
||||||
|
Caused by: Network connectivity is disabled, but the requested data wasn't found in the cache for: `http://example.com/requirements.txt`
|
||||||
|
"###
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
/// Tests that we can install `polars==0.14.0`, which has this odd dependency
|
/// Tests that we can install `polars==0.14.0`, which has this odd dependency
|
||||||
/// requirement in its wheel metadata: `pyarrow>=4.0.*; extra == 'pyarrow'`.
|
/// requirement in its wheel metadata: `pyarrow>=4.0.*; extra == 'pyarrow'`.
|
||||||
///
|
///
|
||||||
|
|
|
||||||
|
|
@ -124,7 +124,7 @@ fn missing_requirements_txt() {
|
||||||
----- stdout -----
|
----- stdout -----
|
||||||
|
|
||||||
----- stderr -----
|
----- stderr -----
|
||||||
error: failed to open file `requirements.txt`
|
error: failed to read from file `requirements.txt`
|
||||||
Caused by: No such file or directory (os error 2)
|
Caused by: No such file or directory (os error 2)
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
|
|
@ -151,7 +151,7 @@ fn missing_venv() -> Result<()> {
|
||||||
----- stdout -----
|
----- stdout -----
|
||||||
|
|
||||||
----- stderr -----
|
----- stderr -----
|
||||||
error: failed to open file `requirements.txt`
|
error: failed to read from file `requirements.txt`
|
||||||
Caused by: No such file or directory (os error 2)
|
Caused by: No such file or directory (os error 2)
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -75,7 +75,7 @@ fn missing_requirements_txt() -> Result<()> {
|
||||||
----- stdout -----
|
----- stdout -----
|
||||||
|
|
||||||
----- stderr -----
|
----- stderr -----
|
||||||
error: failed to open file `requirements.txt`
|
error: failed to read from file `requirements.txt`
|
||||||
Caused by: No such file or directory (os error 2)
|
Caused by: No such file or directory (os error 2)
|
||||||
"###
|
"###
|
||||||
);
|
);
|
||||||
|
|
@ -125,7 +125,7 @@ fn missing_pyproject_toml() -> Result<()> {
|
||||||
----- stdout -----
|
----- stdout -----
|
||||||
|
|
||||||
----- stderr -----
|
----- stderr -----
|
||||||
error: failed to open file `pyproject.toml`
|
error: failed to read from file `pyproject.toml`
|
||||||
Caused by: No such file or directory (os error 2)
|
Caused by: No such file or directory (os error 2)
|
||||||
"###
|
"###
|
||||||
);
|
);
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue