uv/crates/uv/tests/pip_compile.rs

5818 lines
189 KiB
Rust
Raw Blame History

This file contains ambiguous Unicode characters

This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.

#![cfg(all(feature = "python", feature = "pypi"))]
#![cfg_attr(feature = "cargo-clippy", allow(clippy::disallowed_types))]
use std::env::current_dir;
use std::fs;
use std::path::PathBuf;
use std::process::Command;
use anyhow::{bail, Context, Result};
use assert_fs::prelude::*;
use assert_fs::TempDir;
use indoc::indoc;
use url::Url;
use common::{uv_snapshot, TestContext, INSTA_FILTERS};
use uv_fs::Simplified;
use crate::common::{get_bin, EXCLUDE_NEWER};
mod common;
/// Resolve a specific version of `anyio` from a `requirements.in` file.
#[test]
fn compile_requirements_in() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("anyio==3.7.0")?;
uv_snapshot!(context
.compile()
.arg("requirements.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2023-11-18T12:00:00Z requirements.in
anyio==3.7.0
idna==3.4
# via anyio
sniffio==1.3.0
# via anyio
----- stderr -----
Resolved 3 packages in [TIME]
"###);
Ok(())
}
/// Resolve a specific version of `anyio` from a `requirements.in` file with a `--annotation-style=line` flag.
#[test]
fn compile_requirements_in_annotation_line() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("anyio==3.7.0")?;
uv_snapshot!(context
.compile()
.arg("--annotation-style=line")
.arg("requirements.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2023-11-18T12:00:00Z --annotation-style=line requirements.in
anyio==3.7.0
idna==3.4 # via anyio
sniffio==1.3.0 # via anyio
----- stderr -----
Resolved 3 packages in [TIME]
"###);
Ok(())
}
/// Resolve a specific version of `anyio` from a `requirements.in` file on stdin
/// when passed a path of `-`.
#[test]
fn compile_requirements_in_stdin() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("anyio==3.7.0")?;
uv_snapshot!(context
.compile()
.stdin(fs::File::open(requirements_in)?)
.arg("-"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2023-11-18T12:00:00Z -
anyio==3.7.0
idna==3.4
# via anyio
sniffio==1.3.0
# via anyio
----- stderr -----
Resolved 3 packages in [TIME]
"###);
Ok(())
}
#[test]
fn missing_requirements_in() {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
uv_snapshot!(context.compile()
.arg("requirements.in"), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: failed to read from file `requirements.in`
Caused by: No such file or directory (os error 2)
"###
);
requirements_in.assert(predicates::path::missing());
}
#[test]
fn missing_venv() -> Result<()> {
let temp_dir = TempDir::new()?;
let cache_dir = TempDir::new()?;
let venv = temp_dir.child(".venv");
uv_snapshot!(Command::new(get_bin())
.arg("pip")
.arg("compile")
.arg("requirements.in")
.arg("--cache-dir")
.arg(cache_dir.path())
.env("VIRTUAL_ENV", venv.as_os_str())
.current_dir(&temp_dir), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: failed to read from file `requirements.in`
Caused by: No such file or directory (os error 2)
"###
);
venv.assert(predicates::path::missing());
Ok(())
}
/// Resolve a specific version of `anyio` from a `pyproject.toml` file.
#[test]
fn compile_pyproject_toml() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"[build-system]
requires = ["setuptools", "wheel"]
[project]
name = "project"
dependencies = [
"anyio==3.7.0",
]
"#,
)?;
uv_snapshot!(context.compile()
.arg("pyproject.toml"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2023-11-18T12:00:00Z pyproject.toml
anyio==3.7.0
idna==3.4
# via anyio
sniffio==1.3.0
# via anyio
----- stderr -----
Resolved 3 packages in [TIME]
"###
);
Ok(())
}
/// Resolve a package from a `requirements.in` file, with a `constraints.txt` file.
#[test]
fn compile_constraints_txt() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("anyio==3.7.0")?;
let constraints_txt = context.temp_dir.child("constraints.txt");
constraints_txt.write_str("idna<3.4")?;
uv_snapshot!(context.compile()
.arg("requirements.in")
.arg("--constraint")
.arg("constraints.txt"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2023-11-18T12:00:00Z requirements.in --constraint constraints.txt
anyio==3.7.0
idna==3.3
# via anyio
sniffio==1.3.0
# via anyio
----- stderr -----
Resolved 3 packages in [TIME]
"###
);
Ok(())
}
/// Resolve a package from a `requirements.in` file, with an inline constraint.
#[test]
fn compile_constraints_inline() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("anyio==3.7.0")?;
requirements_in.write_str("-c constraints.txt")?;
let constraints_txt = context.temp_dir.child("constraints.txt");
constraints_txt.write_str("idna<3.4")?;
uv_snapshot!(context.compile()
.arg("requirements.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2023-11-18T12:00:00Z requirements.in
----- stderr -----
Resolved 0 packages in [TIME]
"###
);
Ok(())
}
/// Resolve a package from a `requirements.in` file, with a `constraints.txt` file that
/// uses markers.
#[test]
fn compile_constraints_markers() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("anyio")?;
// Constrain a transitive dependency based on the Python version
let constraints_txt = context.temp_dir.child("constraints.txt");
// If constraints are ignored, these will conflict
constraints_txt.write_str("sniffio==1.2.0;python_version<='3.7'")?;
constraints_txt.write_str("sniffio==1.3.0;python_version>'3.7'")?;
uv_snapshot!(context.compile()
.arg("requirements.in")
.arg("--constraint")
.arg("constraints.txt"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2023-11-18T12:00:00Z requirements.in --constraint constraints.txt
anyio==4.0.0
idna==3.4
# via anyio
sniffio==1.3.0
# via anyio
----- stderr -----
Resolved 3 packages in [TIME]
"###
);
Ok(())
}
/// Resolve a package from an optional dependency group in a `pyproject.toml` file.
#[test]
fn compile_pyproject_toml_extra() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"[build-system]
requires = ["setuptools", "wheel"]
[project]
name = "project"
dependencies = []
optional-dependencies.foo = [
"anyio==3.7.0",
]
"#,
)?;
uv_snapshot!(context.compile()
.arg("pyproject.toml")
.arg("--extra")
.arg("foo"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2023-11-18T12:00:00Z pyproject.toml --extra foo
anyio==3.7.0
idna==3.4
# via anyio
sniffio==1.3.0
# via anyio
----- stderr -----
Resolved 3 packages in [TIME]
"###
);
Ok(())
}
/// Resolve a package from an extra with non-normalized names in a `pyproject.toml` file.
#[test]
fn compile_pyproject_toml_extra_name_normalization() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"[build-system]
requires = ["setuptools", "wheel"]
[project]
name = "project"
dependencies = []
optional-dependencies."FrIeNdLy-._.-bArD" = [
"anyio==3.7.0",
]
"#,
)?;
uv_snapshot!(context.compile()
.arg("pyproject.toml")
.arg("--extra")
.arg("FRiENDlY-...-_-BARd"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2023-11-18T12:00:00Z pyproject.toml --extra FRiENDlY-...-_-BARd
anyio==3.7.0
idna==3.4
# via anyio
sniffio==1.3.0
# via anyio
----- stderr -----
Resolved 3 packages in [TIME]
"###
);
Ok(())
}
/// Request an extra that does not exist as a dependency group in a `pyproject.toml` file.
#[test]
fn compile_pyproject_toml_extra_missing() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"[build-system]
requires = ["setuptools", "wheel"]
[project]
name = "project"
dependencies = []
optional-dependencies.foo = [
"anyio==3.7.0",
]
"#,
)?;
uv_snapshot!(context.compile()
.arg("pyproject.toml")
.arg("--extra")
.arg("bar"), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: Requested extra not found: bar
"###
);
Ok(())
}
/// Show a dedicated warning if the user tries to compile a `pyproject.toml` file with a `poetry`
/// section.
#[test]
fn compile_empty_pyproject_toml_poetry() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"[tool.poetry]
name = "poetry-editable"
version = "0.1.0"
description = ""
authors = ["Astral Software Inc. <hey@astral.sh>"]
[tool.poetry.dependencies]
python = "^3.10"
numpy = "^1"
[build-system]
requires = ["poetry-core"]
build-backend = "poetry.core.masonry.api"
"#,
)?;
uv_snapshot!(context.compile()
.arg("pyproject.toml"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2023-11-18T12:00:00Z pyproject.toml
----- stderr -----
warning: `pyproject.toml` does not contain any dependencies (hint: specify dependencies in the `project.dependencies` section; `tool.poetry.dependencies` is not currently supported)
Resolved 0 packages in [TIME]
"###
);
Ok(())
}
/// Request multiple extras that do not exist as a dependency group in a `pyproject.toml` file.
#[test]
fn compile_pyproject_toml_extras_missing() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"[build-system]
requires = ["setuptools", "wheel"]
[project]
name = "project"
dependencies = []
optional-dependencies.foo = [
"anyio==3.7.0",
]
"#,
)?;
uv_snapshot!(context.compile()
.arg("pyproject.toml")
.arg("--extra")
.arg("foo")
.arg("--extra")
.arg("bar")
.arg("--extra")
.arg("foobar"), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: Requested extras not found: bar, foobar
"###
);
Ok(())
}
/// Request extras when using a `requirements.in` file which does not support extras.
#[test]
fn compile_requirements_file_extra() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("anyio==3.7.0")?;
uv_snapshot!(context.compile()
.arg("requirements.in")
.arg("--all-extras"),
@r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: Requesting extras requires a pyproject.toml input file.
"###
);
Ok(())
}
/// Request an extra with a name that does not conform to the specification.
#[test]
fn invalid_extra_name() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"[build-system]
requires = ["setuptools", "wheel"]
[project]
name = "project"
dependencies = []
optional-dependencies.foo = [
"anyio==3.7.0",
]
"#,
)?;
uv_snapshot!(context.compile()
.arg("pyproject.toml")
.arg("--extra")
.arg("invalid name!"), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: invalid value 'invalid name!' for '--extra <EXTRA>': Extra names must start and end with a letter or digit and may only contain -, _, ., and alphanumeric characters
For more information, try '--help'.
"###
);
Ok(())
}
/// Resolve a specific version of Black at Python 3.12.
#[test]
fn compile_python_312() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("black==23.10.1")?;
uv_snapshot!(context.compile()
.arg("requirements.in")
.arg("--python-version")
.arg("3.12"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2023-11-18T12:00:00Z requirements.in --python-version 3.12
black==23.10.1
click==8.1.7
# via black
mypy-extensions==1.0.0
# via black
packaging==23.2
# via black
pathspec==0.11.2
# via black
platformdirs==4.0.0
# via black
----- stderr -----
Resolved 6 packages in [TIME]
"###
);
Ok(())
}
/// Resolve a specific version of Black at Python 3.12 with `--annotation-style=line`.
#[test]
fn compile_python_312_annotation_line() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("black==23.10.1")?;
uv_snapshot!(context.compile()
.arg("--annotation-style=line")
.arg("requirements.in")
.arg("--python-version")
.arg("3.12"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2023-11-18T12:00:00Z --annotation-style=line requirements.in --python-version 3.12
black==23.10.1
click==8.1.7 # via black
mypy-extensions==1.0.0 # via black
packaging==23.2 # via black
pathspec==0.11.2 # via black
platformdirs==4.0.0 # via black
----- stderr -----
Resolved 6 packages in [TIME]
"###
);
Ok(())
}
/// Resolve a specific version of Black at Python 3.12 without deps.
#[test]
fn compile_python_312_no_deps() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("black==23.10.1")?;
uv_snapshot!(context.compile()
.arg("requirements.in")
.arg("--no-deps")
.arg("--python-version")
.arg("3.12"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2023-11-18T12:00:00Z requirements.in --no-deps --python-version 3.12
black==23.10.1
----- stderr -----
Resolved 1 package in [TIME]
"###
);
Ok(())
}
/// Resolve a specific version of Black at Python 3.7.
#[test]
fn compile_python_37() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("black==23.10.1")?;
let filters: Vec<_> = [
// 3.7 may not be installed
(
"warning: The requested Python version 3.7 is not available; .* will be used to build dependencies instead.\n",
"",
),
]
.into_iter()
.chain(INSTA_FILTERS.to_vec())
.collect();
uv_snapshot!(filters, context.compile()
.arg("requirements.in")
.arg("--python-version")
.arg("3.7"), @r###"
success: false
exit_code: 1
----- stdout -----
----- stderr -----
× No solution found when resolving dependencies:
╰─▶ Because the requested Python version (3.7) does not satisfy Python>=3.8
and black==23.10.1 depends on Python>=3.8, we can conclude that
black==23.10.1 cannot be used.
And because you require black==23.10.1, we can conclude that the
requirements are unsatisfiable.
"###);
Ok(())
}
/// Resolve a source distribution with `--resolution=lowest-direct`, to ensure that the build
/// requirements aren't resolved at their lowest compatible version.
#[test]
fn compile_sdist_resolution_lowest() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("anyio @ https://files.pythonhosted.org/packages/2d/b8/7333d87d5f03247215d86a86362fd3e324111788c6cdd8d2e6196a6ba833/anyio-4.2.0.tar.gz")?;
uv_snapshot!(context.compile()
.arg("requirements.in")
.arg("--resolution=lowest-direct")
.arg("--python-version")
.arg("3.12"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2023-11-18T12:00:00Z requirements.in --resolution=lowest-direct --python-version 3.12
anyio @ https://files.pythonhosted.org/packages/2d/b8/7333d87d5f03247215d86a86362fd3e324111788c6cdd8d2e6196a6ba833/anyio-4.2.0.tar.gz
idna==3.4
# via anyio
sniffio==1.3.0
# via anyio
----- stderr -----
Resolved 3 packages in [TIME]
"###
);
Ok(())
}
/// Resolve a specific version of Black against an invalid Python version.
#[test]
fn compile_python_invalid_version() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("black==23.10.1")?;
uv_snapshot!(context.compile()
.arg("requirements.in")
.arg("--python-version")
.arg("3.7.x"), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: invalid value '3.7.x' for '--python-version <PYTHON_VERSION>': after parsing 3.7, found ".x" after it, which is not part of a valid version
For more information, try '--help'.
"###
);
Ok(())
}
/// Resolve a specific version of Black against an invalid Python version.
#[test]
fn compile_python_dev_version() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("black==23.10.1")?;
uv_snapshot!(context.compile()
.arg("requirements.in")
.arg("--python-version")
.arg("3.7-dev"), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: invalid value '3.7-dev' for '--python-version <PYTHON_VERSION>': Python version 3.7-dev is a development release
For more information, try '--help'.
"###
);
Ok(())
}
/// Test that we select the last 3.8 compatible numpy version instead of trying to compile an
/// incompatible sdist <https://github.com/astral-sh/uv/issues/388>
#[test]
fn compile_numpy_py38() -> Result<()> {
let context = TestContext::new("3.8");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("numpy")?;
uv_snapshot!(context.compile()
.arg("requirements.in")
.arg("--no-build"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2023-11-18T12:00:00Z requirements.in --no-build
numpy==1.24.4
----- stderr -----
Resolved 1 package in [TIME]
"###
);
Ok(())
}
/// Resolve a specific Flask wheel via a URL dependency.
#[test]
fn compile_wheel_url_dependency() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("flask @ https://files.pythonhosted.org/packages/36/42/015c23096649b908c809c69388a805a571a3bea44362fe87e33fc3afa01f/flask-3.0.0-py3-none-any.whl")?;
uv_snapshot!(context.compile()
.arg("requirements.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2023-11-18T12:00:00Z requirements.in
blinker==1.7.0
# via flask
click==8.1.7
# via flask
flask @ https://files.pythonhosted.org/packages/36/42/015c23096649b908c809c69388a805a571a3bea44362fe87e33fc3afa01f/flask-3.0.0-py3-none-any.whl
itsdangerous==2.1.2
# via flask
jinja2==3.1.2
# via flask
markupsafe==2.1.3
# via
# jinja2
# werkzeug
werkzeug==3.0.1
# via flask
----- stderr -----
Resolved 7 packages in [TIME]
"###
);
Ok(())
}
/// Resolve a specific Flask source distribution via a URL dependency.
///
/// Exercises the `prepare_metadata_for_build_wheel` hooks.
#[test]
fn compile_sdist_url_dependency() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("flask @ https://files.pythonhosted.org/packages/d8/09/c1a7354d3925a3c6c8cfdebf4245bae67d633ffda1ba415add06ffc839c5/flask-3.0.0.tar.gz")?;
uv_snapshot!(context.compile()
.arg("requirements.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2023-11-18T12:00:00Z requirements.in
blinker==1.7.0
# via flask
click==8.1.7
# via flask
flask @ https://files.pythonhosted.org/packages/d8/09/c1a7354d3925a3c6c8cfdebf4245bae67d633ffda1ba415add06ffc839c5/flask-3.0.0.tar.gz
itsdangerous==2.1.2
# via flask
jinja2==3.1.2
# via flask
markupsafe==2.1.3
# via
# jinja2
# werkzeug
werkzeug==3.0.1
# via flask
----- stderr -----
Resolved 7 packages in [TIME]
"###
);
Ok(())
}
/// Resolve a specific Flask source distribution via a Git HTTPS dependency.
#[test]
#[cfg(feature = "git")]
fn compile_git_https_dependency() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("flask @ git+https://github.com/pallets/flask.git")?;
// In addition to the standard filters, remove the `main` commit, which will change frequently.
let filters: Vec<_> = [(r"@(\d|\w){40}", "@[COMMIT]")]
.into_iter()
.chain(INSTA_FILTERS.to_vec())
.collect();
uv_snapshot!(filters, context.compile()
.arg("requirements.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2023-11-18T12:00:00Z requirements.in
blinker==1.7.0
# via flask
click==8.1.7
# via flask
flask @ git+https://github.com/pallets/flask.git@[COMMIT]
itsdangerous==2.1.2
# via flask
jinja2==3.1.2
# via flask
markupsafe==2.1.3
# via
# jinja2
# werkzeug
werkzeug==3.0.1
# via flask
----- stderr -----
Resolved 7 packages in [TIME]
"###);
Ok(())
}
/// Resolve a specific Flask branch via a Git HTTPS dependency.
#[test]
#[cfg(feature = "git")]
fn compile_git_branch_https_dependency() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("flask @ git+https://github.com/pallets/flask.git@1.0.x")?;
uv_snapshot!(context.compile()
.arg("requirements.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2023-11-18T12:00:00Z requirements.in
click==8.1.7
# via flask
flask @ git+https://github.com/pallets/flask.git@d92b64aa275841b0c9aea3903aba72fbc4275d91
itsdangerous==2.1.2
# via flask
jinja2==3.1.2
# via flask
markupsafe==2.1.3
# via
# jinja2
# werkzeug
werkzeug==3.0.1
# via flask
----- stderr -----
Resolved 6 packages in [TIME]
"###
);
Ok(())
}
/// Resolve a specific Flask tag via a Git HTTPS dependency.
#[test]
#[cfg(feature = "git")]
fn compile_git_tag_https_dependency() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("flask @ git+https://github.com/pallets/flask.git@3.0.0")?;
uv_snapshot!(context.compile()
.arg("requirements.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2023-11-18T12:00:00Z requirements.in
blinker==1.7.0
# via flask
click==8.1.7
# via flask
flask @ git+https://github.com/pallets/flask.git@735a4701d6d5e848241e7d7535db898efb62d400
itsdangerous==2.1.2
# via flask
jinja2==3.1.2
# via flask
markupsafe==2.1.3
# via
# jinja2
# werkzeug
werkzeug==3.0.1
# via flask
----- stderr -----
Resolved 7 packages in [TIME]
"###
);
Ok(())
}
/// Resolve a specific Flask commit via a Git HTTPS dependency.
#[test]
#[cfg(feature = "git")]
fn compile_git_long_commit_https_dependency() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str(
"flask @ git+https://github.com/pallets/flask.git@d92b64aa275841b0c9aea3903aba72fbc4275d91",
)?;
uv_snapshot!(context.compile()
.arg("requirements.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2023-11-18T12:00:00Z requirements.in
click==8.1.7
# via flask
flask @ git+https://github.com/pallets/flask.git@d92b64aa275841b0c9aea3903aba72fbc4275d91
itsdangerous==2.1.2
# via flask
jinja2==3.1.2
# via flask
markupsafe==2.1.3
# via
# jinja2
# werkzeug
werkzeug==3.0.1
# via flask
----- stderr -----
Resolved 6 packages in [TIME]
"###
);
Ok(())
}
/// Resolve a specific Flask commit via a Git HTTPS dependency.
#[test]
#[cfg(feature = "git")]
fn compile_git_short_commit_https_dependency() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("flask @ git+https://github.com/pallets/flask.git@d92b64a")?;
uv_snapshot!(context.compile()
.arg("requirements.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2023-11-18T12:00:00Z requirements.in
click==8.1.7
# via flask
flask @ git+https://github.com/pallets/flask.git@d92b64aa275841b0c9aea3903aba72fbc4275d91
itsdangerous==2.1.2
# via flask
jinja2==3.1.2
# via flask
markupsafe==2.1.3
# via
# jinja2
# werkzeug
werkzeug==3.0.1
# via flask
----- stderr -----
Resolved 6 packages in [TIME]
"###
);
Ok(())
}
/// Resolve a specific Flask ref via a Git HTTPS dependency.
#[test]
#[cfg(feature = "git")]
fn compile_git_refs_https_dependency() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in
.write_str("flask @ git+https://github.com/pallets/flask.git@refs/pull/5313/head")?;
uv_snapshot!(context.compile()
.arg("requirements.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2023-11-18T12:00:00Z requirements.in
blinker==1.7.0
# via flask
click==8.1.7
# via flask
flask @ git+https://github.com/pallets/flask.git@7af0271f4703a71beef8e26d1f5f6f8da04100e6
itsdangerous==2.1.2
# via flask
jinja2==3.1.2
# via flask
markupsafe==2.1.3
# via
# jinja2
# werkzeug
werkzeug==3.0.1
# via flask
----- stderr -----
Resolved 7 packages in [TIME]
"###
);
Ok(())
}
/// Resolve a specific Git dependency with a subdirectory.
#[test]
#[cfg(feature = "git")]
fn compile_git_subdirectory_dependency() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("example-pkg-a @ git+https://github.com/pypa/sample-namespace-packages.git@df7530eeb8fa0cb7dbb8ecb28363e8e36bfa2f45#subdirectory=pkg_resources/pkg_a")?;
uv_snapshot!(context.compile()
.arg("requirements.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2023-11-18T12:00:00Z requirements.in
example-pkg-a @ git+https://github.com/pypa/sample-namespace-packages.git@df7530eeb8fa0cb7dbb8ecb28363e8e36bfa2f45#subdirectory=pkg_resources/pkg_a
----- stderr -----
Resolved 1 package in [TIME]
"###
);
Ok(())
}
/// Resolve two packages from a `requirements.in` file with the same Git HTTPS dependency.
#[test]
#[cfg(feature = "git")]
fn compile_git_concurrent_access() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in
.write_str("example-pkg-a @ git+https://github.com/pypa/sample-namespace-packages.git@df7530eeb8fa0cb7dbb8ecb28363e8e36bfa2f45#subdirectory=pkg_resources/pkg_a\nexample-pkg-b @ git+https://github.com/pypa/sample-namespace-packages.git@df7530eeb8fa0cb7dbb8ecb28363e8e36bfa2f45#subdirectory=pkg_resources/pkg_b")?;
uv_snapshot!(context.compile()
.arg("requirements.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2023-11-18T12:00:00Z requirements.in
example-pkg-a @ git+https://github.com/pypa/sample-namespace-packages.git@df7530eeb8fa0cb7dbb8ecb28363e8e36bfa2f45#subdirectory=pkg_resources/pkg_a
example-pkg-b @ git+https://github.com/pypa/sample-namespace-packages.git@df7530eeb8fa0cb7dbb8ecb28363e8e36bfa2f45#subdirectory=pkg_resources/pkg_b
----- stderr -----
Resolved 2 packages in [TIME]
"###
);
Ok(())
}
/// Resolve a Git dependency with a declared name that differs from the true name of the package.
#[test]
#[cfg(feature = "git")]
fn compile_git_mismatched_name() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in
.write_str("flask @ git+https://github.com/pallets/flask.git@2.0.0\ndask @ git+https://github.com/pallets/flask.git@3.0.0")?;
uv_snapshot!(context.compile()
.arg("requirements.in"), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: Failed to download and build: dask @ git+https://github.com/pallets/flask.git@3.0.0
Caused by: Package metadata name `flask` does not match given name `dask`
"###
);
Ok(())
}
/// Request Flask, but include a URL dependency for Werkzeug, which should avoid adding a
/// duplicate dependency from `PyPI`.
#[test]
fn mixed_url_dependency() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("flask==3.0.0\nwerkzeug @ https://files.pythonhosted.org/packages/c3/fc/254c3e9b5feb89ff5b9076a23218dafbc99c96ac5941e900b71206e6313b/werkzeug-3.0.1-py3-none-any.whl")?;
uv_snapshot!(context.compile()
.arg("requirements.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2023-11-18T12:00:00Z requirements.in
blinker==1.7.0
# via flask
click==8.1.7
# via flask
flask==3.0.0
itsdangerous==2.1.2
# via flask
jinja2==3.1.2
# via flask
markupsafe==2.1.3
# via
# jinja2
# werkzeug
werkzeug @ https://files.pythonhosted.org/packages/c3/fc/254c3e9b5feb89ff5b9076a23218dafbc99c96ac5941e900b71206e6313b/werkzeug-3.0.1-py3-none-any.whl
# via flask
----- stderr -----
Resolved 7 packages in [TIME]
"###
);
Ok(())
}
/// Request Werkzeug via both a version and a URL dependency at a _different_ version, which
/// should result in a conflict.
#[test]
fn conflicting_direct_url_dependency() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("werkzeug==3.0.0\nwerkzeug @ https://files.pythonhosted.org/packages/ff/1d/960bb4017c68674a1cb099534840f18d3def3ce44aed12b5ed8b78e0153e/Werkzeug-2.0.0-py3-none-any.whl")?;
uv_snapshot!(context.compile()
.arg("requirements.in"), @r###"
success: false
exit_code: 1
----- stdout -----
----- stderr -----
× No solution found when resolving dependencies:
╰─▶ Because there is no version of werkzeug==3.0.0 and you require
werkzeug==3.0.0, we can conclude that the requirements are
unsatisfiable.
"###
);
Ok(())
}
/// Request Werkzeug via both a version and a URL dependency at _the same_ version, which
/// should prefer the direct URL dependency.
#[test]
fn compatible_direct_url_dependency() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("werkzeug==2.0.0\nwerkzeug @ https://files.pythonhosted.org/packages/ff/1d/960bb4017c68674a1cb099534840f18d3def3ce44aed12b5ed8b78e0153e/Werkzeug-2.0.0-py3-none-any.whl")?;
uv_snapshot!(context.compile()
.arg("requirements.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2023-11-18T12:00:00Z requirements.in
werkzeug @ https://files.pythonhosted.org/packages/ff/1d/960bb4017c68674a1cb099534840f18d3def3ce44aed12b5ed8b78e0153e/Werkzeug-2.0.0-py3-none-any.whl
----- stderr -----
Resolved 1 package in [TIME]
"###
);
Ok(())
}
/// Request Werkzeug via two different URLs at different versions, which should result in a conflict.
#[test]
fn conflicting_repeated_url_dependency_version_mismatch() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("werkzeug @ https://files.pythonhosted.org/packages/bd/24/11c3ea5a7e866bf2d97f0501d0b4b1c9bbeade102bb4b588f0d2919a5212/Werkzeug-2.0.1-py3-none-any.whl\nwerkzeug @ https://files.pythonhosted.org/packages/ff/1d/960bb4017c68674a1cb099534840f18d3def3ce44aed12b5ed8b78e0153e/Werkzeug-2.0.0-py3-none-any.whl")?;
uv_snapshot!(context.compile()
.arg("requirements.in"), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: Requirements contain conflicting URLs for package `werkzeug`:
- https://files.pythonhosted.org/packages/bd/24/11c3ea5a7e866bf2d97f0501d0b4b1c9bbeade102bb4b588f0d2919a5212/Werkzeug-2.0.1-py3-none-any.whl
- https://files.pythonhosted.org/packages/ff/1d/960bb4017c68674a1cb099534840f18d3def3ce44aed12b5ed8b78e0153e/Werkzeug-2.0.0-py3-none-any.whl
"###
);
Ok(())
}
/// Request Werkzeug via two different URLs at different versions. However, only one of the
/// URLs is compatible with the requested Python version, so there shouldn't be any conflict.
#[test]
fn conflicting_repeated_url_dependency_markers() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str(indoc! {r"
werkzeug @ https://files.pythonhosted.org/packages/bd/24/11c3ea5a7e866bf2d97f0501d0b4b1c9bbeade102bb4b588f0d2919a5212/Werkzeug-2.0.1-py3-none-any.whl ; python_version >= '3.10'
werkzeug @ https://files.pythonhosted.org/packages/ff/1d/960bb4017c68674a1cb099534840f18d3def3ce44aed12b5ed8b78e0153e/Werkzeug-2.0.0-py3-none-any.whl ; python_version < '3.10'
"})?;
uv_snapshot!(context.compile()
.arg("requirements.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2023-11-18T12:00:00Z requirements.in
werkzeug @ https://files.pythonhosted.org/packages/bd/24/11c3ea5a7e866bf2d97f0501d0b4b1c9bbeade102bb4b588f0d2919a5212/Werkzeug-2.0.1-py3-none-any.whl
----- stderr -----
Resolved 1 package in [TIME]
"###
);
Ok(())
}
/// Request Werkzeug via two different URLs at the same version. Despite mapping to the same
/// version, it should still result in a conflict.
#[test]
#[cfg(feature = "git")]
fn conflicting_repeated_url_dependency_version_match() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("werkzeug @ git+https://github.com/pallets/werkzeug.git@2.0.0\nwerkzeug @ https://files.pythonhosted.org/packages/ff/1d/960bb4017c68674a1cb099534840f18d3def3ce44aed12b5ed8b78e0153e/Werkzeug-2.0.0-py3-none-any.whl")?;
uv_snapshot!(context.compile()
.arg("requirements.in"), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: Requirements contain conflicting URLs for package `werkzeug`:
- git+https://github.com/pallets/werkzeug.git@2.0.0
- https://files.pythonhosted.org/packages/ff/1d/960bb4017c68674a1cb099534840f18d3def3ce44aed12b5ed8b78e0153e/Werkzeug-2.0.0-py3-none-any.whl
"###
);
Ok(())
}
/// Request Flask, but include a URL dependency for a conflicting version of Werkzeug.
#[test]
fn conflicting_transitive_url_dependency() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("flask==3.0.0\nwerkzeug @ https://files.pythonhosted.org/packages/ff/1d/960bb4017c68674a1cb099534840f18d3def3ce44aed12b5ed8b78e0153e/Werkzeug-2.0.0-py3-none-any.whl")?;
uv_snapshot!(context.compile()
.arg("requirements.in"), @r###"
success: false
exit_code: 1
----- stdout -----
----- stderr -----
× No solution found when resolving dependencies:
╰─▶ Because flask==3.0.0 depends on werkzeug>=3.0.0 and only werkzeug<3.0.0
is available, we can conclude that flask==3.0.0 cannot be used.
And because you require flask==3.0.0, we can conclude that the
requirements are unsatisfiable.
"###
);
Ok(())
}
/// Request Werkzeug via two different URLs which resolve to the same canonical version.
#[test]
fn compatible_repeated_url_dependency() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("werkzeug @ git+https://github.com/pallets/werkzeug.git@2.0.0\nwerkzeug @ git+https://github.com/pallets/werkzeug@2.0.0")?;
uv_snapshot!(context.compile()
.arg("requirements.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2023-11-18T12:00:00Z requirements.in
werkzeug @ git+https://github.com/pallets/werkzeug@af160e0b6b7ddd81c22f1652c728ff5ac72d5c74
----- stderr -----
Resolved 1 package in [TIME]
"###
);
Ok(())
}
/// Request Werkzeug via two different URLs which resolve to the same repository, but different
/// commits.
#[test]
fn conflicting_repeated_url_dependency() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("werkzeug @ git+https://github.com/pallets/werkzeug.git@2.0.0\nwerkzeug @ git+https://github.com/pallets/werkzeug@3.0.0")?;
uv_snapshot!(context.compile()
.arg("requirements.in"), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: Requirements contain conflicting URLs for package `werkzeug`:
- git+https://github.com/pallets/werkzeug.git@2.0.0
- git+https://github.com/pallets/werkzeug@3.0.0
"###
);
Ok(())
}
/// Request Werkzeug via two different URLs: `main`, and a precise SHA. Allow the precise SHA
/// to override the `main` branch.
#[test]
fn compatible_narrowed_url_dependency() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("werkzeug @ git+https://github.com/pallets/werkzeug.git@main\nwerkzeug @ git+https://github.com/pallets/werkzeug@32e69512134c2f8183c6438b2b2e13fd24e9d19f")?;
uv_snapshot!(context.compile()
.arg("requirements.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2023-11-18T12:00:00Z requirements.in
markupsafe==2.1.3
# via werkzeug
werkzeug @ git+https://github.com/pallets/werkzeug@32e69512134c2f8183c6438b2b2e13fd24e9d19f
----- stderr -----
Resolved 2 packages in [TIME]
"###
);
Ok(())
}
/// Request Werkzeug via two different URLs: `main`, and a precise SHA, followed by `main` again.
/// We _may_ want to allow this, but we don't right now.
#[test]
fn compatible_repeated_narrowed_url_dependency() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("werkzeug @ git+https://github.com/pallets/werkzeug@main\nwerkzeug @ git+https://github.com/pallets/werkzeug.git@main\nwerkzeug @ git+https://github.com/pallets/werkzeug@32e69512134c2f8183c6438b2b2e13fd24e9d19f\nwerkzeug @ git+https://github.com/pallets/werkzeug.git@main")?;
uv_snapshot!(context.compile()
.arg("requirements.in"), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: Requirements contain conflicting URLs for package `werkzeug`:
- git+https://github.com/pallets/werkzeug@32e69512134c2f8183c6438b2b2e13fd24e9d19f
- git+https://github.com/pallets/werkzeug.git@main
"###
);
Ok(())
}
/// Request Werkzeug via two different URLs: `main`, and a precise SHA. Allow the precise SHA
/// to override the `main` branch, but error when we see yet another URL for the same package.
#[test]
fn incompatible_narrowed_url_dependency() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("werkzeug @ git+https://github.com/pallets/werkzeug.git@main\nwerkzeug @ git+https://github.com/pallets/werkzeug@32e69512134c2f8183c6438b2b2e13fd24e9d19f\nwerkzeug @ git+https://github.com/pallets/werkzeug.git@v1.0.0")?;
uv_snapshot!(context.compile()
.arg("requirements.in"), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: Requirements contain conflicting URLs for package `werkzeug`:
- git+https://github.com/pallets/werkzeug@32e69512134c2f8183c6438b2b2e13fd24e9d19f
- git+https://github.com/pallets/werkzeug.git@v1.0.0
"###
);
Ok(())
}
/// Request `transitive_url_dependency`, which depends on `https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl`.
/// Since this URL isn't declared upfront, we should reject it.
#[test]
#[cfg(feature = "git")]
fn disallowed_transitive_url_dependency() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("hatchling_editable @ ${HATCHLING}")?;
let hatchling_path = current_dir()?.join("../../scripts/editable-installs/hatchling_editable");
uv_snapshot!(context.compile()
.arg("requirements.in")
.env("HATCHLING", hatchling_path.as_os_str()), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: Package `iniconfig` attempted to resolve via URL: git+https://github.com/pytest-dev/iniconfig@9cae43103df70bac6fde7b9f35ad11a9f1be0cb4. URL dependencies must be expressed as direct requirements or constraints. Consider adding `iniconfig @ git+https://github.com/pytest-dev/iniconfig@9cae43103df70bac6fde7b9f35ad11a9f1be0cb4` to your dependencies or constraints file.
"###
);
Ok(())
}
/// Request `transitive_url_dependency`, which depends on `https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl`.
/// Since this URL is declared as a constraint, we should accept it.
#[test]
#[cfg(feature = "git")]
fn allowed_transitive_url_dependency() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("hatchling_editable @ ${HATCHLING}")?;
let constraints_txt = context.temp_dir.child("constraints.txt");
constraints_txt.write_str("iniconfig @ git+https://github.com/pytest-dev/iniconfig@9cae43103df70bac6fde7b9f35ad11a9f1be0cb4")?;
let hatchling_path = current_dir()?.join("../../scripts/editable-installs/hatchling_editable");
uv_snapshot!(context.compile()
.arg("requirements.in")
.arg("--constraint")
.arg("constraints.txt")
.env("HATCHLING", hatchling_path.as_os_str()), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2023-11-18T12:00:00Z requirements.in --constraint constraints.txt
hatchling-editable @ ${HATCHLING}
iniconfig @ git+https://github.com/pytest-dev/iniconfig@9cae43103df70bac6fde7b9f35ad11a9f1be0cb4
# via hatchling-editable
----- stderr -----
Resolved 2 packages in [TIME]
"###
);
Ok(())
}
/// Request `transitive_url_dependency`, which depends on `iniconfig @ git+https://github.com/pytest-dev/iniconfig@9cae43103df70bac6fde7b9f35ad11a9f1be0cb4`.
/// Since this `iniconfig @ git+https://github.com/pytest-dev/iniconfig.git@9cae43103df70bac6fde7b9f35ad11a9f1be0cb4.git` is declared as a constraint, and
/// those map to the same canonical URL, we should accept it.
#[test]
#[cfg(feature = "git")]
fn allowed_transitive_canonical_url_dependency() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("hatchling_editable @ ${HATCHLING}")?;
let constraints_txt = context.temp_dir.child("constraints.txt");
constraints_txt.write_str("iniconfig @ git+https://github.com/pytest-dev/iniconfig.git@9cae43103df70bac6fde7b9f35ad11a9f1be0cb4")?;
let hatchling_path = current_dir()?.join("../../scripts/editable-installs/hatchling_editable");
uv_snapshot!(context.compile()
.arg("requirements.in")
.arg("--constraint")
.arg("constraints.txt")
.env("HATCHLING", hatchling_path.as_os_str()), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2023-11-18T12:00:00Z requirements.in --constraint constraints.txt
hatchling-editable @ ${HATCHLING}
iniconfig @ git+https://github.com/pytest-dev/iniconfig.git@9cae43103df70bac6fde7b9f35ad11a9f1be0cb4
# via hatchling-editable
----- stderr -----
Resolved 2 packages in [TIME]
"###
);
Ok(())
}
/// Resolve packages from all optional dependency groups in a `pyproject.toml` file.
#[test]
fn compile_pyproject_toml_all_extras() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"[build-system]
requires = ["setuptools", "wheel"]
[project]
name = "project"
dependencies = ["anyio==3.7.0"]
optional-dependencies.foo = [
"iniconfig==1.1.1",
]
optional-dependencies.bar = [
"httpcore==0.18.0",
]
"#,
)?;
uv_snapshot!(context.compile()
.arg("pyproject.toml")
.arg("--all-extras"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2023-11-18T12:00:00Z pyproject.toml --all-extras
anyio==3.7.0
# via httpcore
certifi==2023.11.17
# via httpcore
h11==0.14.0
# via httpcore
httpcore==0.18.0
idna==3.4
# via anyio
iniconfig==1.1.1
sniffio==1.3.0
# via
# anyio
# httpcore
----- stderr -----
Resolved 7 packages in [TIME]
"###
);
Ok(())
}
#[test]
fn compile_pyproject_toml_all_extras_annotation_line() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"[build-system]
requires = ["setuptools", "wheel"]
[project]
name = "project"
dependencies = ["anyio==3.7.0"]
optional-dependencies.foo = [
"iniconfig==1.1.1",
]
optional-dependencies.bar = [
"httpcore==0.18.0",
]
"#,
)?;
uv_snapshot!(context.compile()
.arg("--annotation-style=line")
.arg("pyproject.toml")
.arg("--all-extras"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2023-11-18T12:00:00Z --annotation-style=line pyproject.toml --all-extras
anyio==3.7.0 # via httpcore
certifi==2023.11.17 # via httpcore
h11==0.14.0 # via httpcore
httpcore==0.18.0
idna==3.4 # via anyio
iniconfig==1.1.1
sniffio==1.3.0 # via anyio, httpcore
----- stderr -----
Resolved 7 packages in [TIME]
"###
);
Ok(())
}
/// Resolve packages from all optional dependency groups in a `pyproject.toml` file.
#[test]
fn compile_does_not_allow_both_extra_and_all_extras() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"[build-system]
requires = ["setuptools", "wheel"]
[project]
name = "project"
dependencies = ["anyio==3.7.0"]
optional-dependencies.foo = [
"iniconfig==1.1.1",
]
optional-dependencies.bar = [
"httpcore==0.18.0",
]
"#,
)?;
uv_snapshot!(context.compile()
.arg("pyproject.toml")
.arg("--all-extras")
.arg("--extra")
.arg("foo"),
@r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: the argument '--all-extras' cannot be used with '--extra <EXTRA>'
Usage: uv pip compile --cache-dir [CACHE_DIR] --exclude-newer <EXCLUDE_NEWER> --all-extras <SRC_FILE>...
For more information, try '--help'.
"###
);
Ok(())
}
/// Compile requirements that cannot be solved due to conflict in a `pyproject.toml` fil;e.
#[test]
fn compile_unsolvable_requirements() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"[build-system]
requires = ["setuptools", "wheel"]
[project]
name = "my-project"
dependencies = ["anyio==3.7.0", "anyio==4.0.0"]
"#,
)?;
uv_snapshot!(context.compile()
.arg("pyproject.toml"), @r###"
success: false
exit_code: 1
----- stdout -----
----- stderr -----
× No solution found when resolving dependencies:
╰─▶ Because my-project depends on anyio==3.7.0 and my-project depends on
anyio==4.0.0, we can conclude that the requirements are unsatisfiable.
"###
);
Ok(())
}
/// Compile requirements in a `pyproject.toml` file that cannot be resolved due to
/// a requirement with a version that is not available online.
#[test]
fn compile_unsolvable_requirements_version_not_available() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"[build-system]
requires = ["setuptools", "wheel"]
[project]
name = "my-project"
dependencies = ["anyio==300.1.4"]
"#,
)?;
uv_snapshot!(context.compile()
.arg("pyproject.toml"), @r###"
success: false
exit_code: 1
----- stdout -----
----- stderr -----
× No solution found when resolving dependencies:
╰─▶ Because there is no version of anyio==300.1.4 and my-project depends on
anyio==300.1.4, we can conclude that the requirements are unsatisfiable.
"###
);
Ok(())
}
/// Resolve at a specific time in the past
#[test]
fn compile_exclude_newer() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("tqdm")?;
uv_snapshot!(Command::new(get_bin())
.arg("pip")
.arg("compile")
.arg("requirements.in")
.arg("--exclude-newer")
// 4.64.0: 2022-04-04T01:48:46.194635Z1
// 4.64.1: 2022-09-03T11:10:27.148080Z
.arg("2022-04-04T12:00:00Z")
.arg("--cache-dir")
.arg(context.cache_dir.path())
.env("VIRTUAL_ENV", context.venv.as_os_str())
.current_dir(context.temp_dir.path()), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile requirements.in --exclude-newer 2022-04-04T12:00:00Z --cache-dir [CACHE_DIR]
tqdm==4.64.0
----- stderr -----
Resolved 1 package in [TIME]
"###
);
// Use a date as input instead.
// We interpret a date as including this day
uv_snapshot!(Command::new(get_bin())
.arg("pip")
.arg("compile")
.arg("requirements.in")
.arg("--exclude-newer")
.arg("2022-04-04")
.arg("--cache-dir")
.arg(context.cache_dir.path())
.env("VIRTUAL_ENV", context.venv.as_os_str())
.current_dir(context.temp_dir.path()), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile requirements.in --exclude-newer 2022-04-04 --cache-dir [CACHE_DIR]
tqdm==4.64.0
----- stderr -----
Resolved 1 package in [TIME]
"###
);
// Check the error message for invalid datetime
uv_snapshot!(Command::new(get_bin())
.arg("pip")
.arg("compile")
.arg("requirements.in")
.arg("--exclude-newer")
.arg("2022-04-04+02:00")
.arg("--cache-dir")
.arg(context.cache_dir.path())
.env("VIRTUAL_ENV", context.venv.as_os_str())
.current_dir(context.temp_dir.path()), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: invalid value '2022-04-04+02:00' for '--exclude-newer <EXCLUDE_NEWER>': Neither a valid date (trailing input) not a valid datetime (input contains invalid characters)
For more information, try '--help'.
"###
);
Ok(())
}
/// Resolve a local path dependency on a specific wheel.
#[test]
fn compile_wheel_path_dependency() -> Result<()> {
let context = TestContext::new("3.12");
// Download a wheel.
let response = reqwest::blocking::get("https://files.pythonhosted.org/packages/36/42/015c23096649b908c809c69388a805a571a3bea44362fe87e33fc3afa01f/flask-3.0.0-py3-none-any.whl")?;
let flask_wheel = context.temp_dir.child("flask-3.0.0-py3-none-any.whl");
let mut flask_wheel_file = fs::File::create(&flask_wheel)?;
std::io::copy(&mut response.bytes()?.as_ref(), &mut flask_wheel_file)?;
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str(&format!(
"flask @ {}",
Url::from_file_path(flask_wheel.path()).unwrap()
))?;
// In addition to the standard filters, remove the temporary directory from the snapshot.
let filters: Vec<_> = [(r"file://.*/", "file://[TEMP_DIR]/")]
.into_iter()
.chain(INSTA_FILTERS.to_vec())
.collect();
uv_snapshot!(filters, context.compile()
.arg("requirements.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2023-11-18T12:00:00Z requirements.in
blinker==1.7.0
# via flask
click==8.1.7
# via flask
flask @ file://[TEMP_DIR]/flask-3.0.0-py3-none-any.whl
itsdangerous==2.1.2
# via flask
jinja2==3.1.2
# via flask
markupsafe==2.1.3
# via
# jinja2
# werkzeug
werkzeug==3.0.1
# via flask
----- stderr -----
Resolved 7 packages in [TIME]
"###);
// Run the same operation, but this time with a relative path, omitting the `//`.
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("flask @ file:flask-3.0.0-py3-none-any.whl")?;
uv_snapshot!(context.compile()
.arg("requirements.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2023-11-18T12:00:00Z requirements.in
blinker==1.7.0
# via flask
click==8.1.7
# via flask
flask @ file:flask-3.0.0-py3-none-any.whl
itsdangerous==2.1.2
# via flask
jinja2==3.1.2
# via flask
markupsafe==2.1.3
# via
# jinja2
# werkzeug
werkzeug==3.0.1
# via flask
----- stderr -----
Resolved 7 packages in [TIME]
"###
);
// Run the same operation, but this time with a relative path, including the `//`.
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("flask @ file://flask-3.0.0-py3-none-any.whl")?;
uv_snapshot!(context.compile()
.arg("requirements.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2023-11-18T12:00:00Z requirements.in
blinker==1.7.0
# via flask
click==8.1.7
# via flask
flask @ file://flask-3.0.0-py3-none-any.whl
itsdangerous==2.1.2
# via flask
jinja2==3.1.2
# via flask
markupsafe==2.1.3
# via
# jinja2
# werkzeug
werkzeug==3.0.1
# via flask
----- stderr -----
Resolved 7 packages in [TIME]
"###
);
// Run the same operation, but this time with a relative path, exclusive of any scheme.
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("flask @ ./flask-3.0.0-py3-none-any.whl")?;
uv_snapshot!(context.compile()
.arg("requirements.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2023-11-18T12:00:00Z requirements.in
blinker==1.7.0
# via flask
click==8.1.7
# via flask
flask @ ./flask-3.0.0-py3-none-any.whl
itsdangerous==2.1.2
# via flask
jinja2==3.1.2
# via flask
markupsafe==2.1.3
# via
# jinja2
# werkzeug
werkzeug==3.0.1
# via flask
----- stderr -----
Resolved 7 packages in [TIME]
"###
);
// Run the same operation, but this time with an absolute path (rather than a URL).
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str(&format!("flask @ {}", flask_wheel.path().display()))?;
// In addition to the standard filters, remove the temporary directory from the snapshot.
let filter_path = regex::escape(&flask_wheel.user_display().to_string());
let filters: Vec<_> = [(filter_path.as_str(), "/[TEMP_DIR]/")]
.into_iter()
.chain(INSTA_FILTERS.to_vec())
.collect();
uv_snapshot!(filters, context.compile()
.arg("requirements.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2023-11-18T12:00:00Z requirements.in
blinker==1.7.0
# via flask
click==8.1.7
# via flask
flask @ /[TEMP_DIR]/
itsdangerous==2.1.2
# via flask
jinja2==3.1.2
# via flask
markupsafe==2.1.3
# via
# jinja2
# werkzeug
werkzeug==3.0.1
# via flask
----- stderr -----
Resolved 7 packages in [TIME]
"###
);
Ok(())
}
/// Resolve a local path dependency on a specific source distribution.
#[test]
fn compile_source_distribution_path_dependency() -> Result<()> {
let context = TestContext::new("3.12");
// Download a source distribution.
let response = reqwest::blocking::get("https://files.pythonhosted.org/packages/d8/09/c1a7354d3925a3c6c8cfdebf4245bae67d633ffda1ba415add06ffc839c5/flask-3.0.0.tar.gz")?;
let flask_wheel = context.temp_dir.child("flask-3.0.0.tar.gz");
let mut flask_wheel_file = std::fs::File::create(&flask_wheel)?;
std::io::copy(&mut response.bytes()?.as_ref(), &mut flask_wheel_file)?;
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str(&format!(
"flask @ {}",
Url::from_file_path(flask_wheel.path()).unwrap()
))?;
// In addition to the standard filters, remove the temporary directory from the snapshot.
let filters: Vec<_> = [(r"file://.*/", "file://[TEMP_DIR]/")]
.into_iter()
.chain(INSTA_FILTERS.to_vec())
.collect();
uv_snapshot!(filters, context.compile()
.arg("requirements.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2023-11-18T12:00:00Z requirements.in
blinker==1.7.0
# via flask
click==8.1.7
# via flask
flask @ file://[TEMP_DIR]/flask-3.0.0.tar.gz
itsdangerous==2.1.2
# via flask
jinja2==3.1.2
# via flask
markupsafe==2.1.3
# via
# jinja2
# werkzeug
werkzeug==3.0.1
# via flask
----- stderr -----
Resolved 7 packages in [TIME]
"###);
Ok(())
}
/// Resolve a local path dependency to a non-existent file.
#[test]
fn compile_wheel_path_dependency_missing() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("flask @ file:///path/to/flask-3.0.0-py3-none-any.whl")?;
// In addition to the standard filters, remove the temporary directory from the snapshot.
let filters: Vec<_> = [(r"file://.*/", "file://[TEMP_DIR]/")]
.into_iter()
.chain(INSTA_FILTERS.to_vec())
.collect();
uv_snapshot!(filters, context.compile()
.arg("requirements.in"), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: Distribution not found at: file://[TEMP_DIR]/flask-3.0.0-py3-none-any.whl
"###);
Ok(())
}
/// Resolve a yanked version of `attrs` by specifying the version directly.
#[test]
fn compile_yanked_version_direct() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("attrs==21.1.0")?;
uv_snapshot!(context.compile()
.arg("requirements.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2023-11-18T12:00:00Z requirements.in
attrs==21.1.0
----- stderr -----
Resolved 1 package in [TIME]
"###
);
Ok(())
}
/// Fail to resolve `attrs` due to the indirect use of a yanked version (`21.1.0`).
#[test]
fn compile_yanked_version_indirect() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("attrs>20.3.0,<21.2.0")?;
uv_snapshot!(context.compile()
.arg("requirements.in"), @r###"
success: false
exit_code: 1
----- stdout -----
----- stderr -----
× No solution found when resolving dependencies:
╰─▶ Because only the following versions of attrs are available:
attrs<=20.3.0
attrs==21.1.0
attrs>=21.2.0
and attrs==21.1.0 is unusable because it was yanked (reason:
Installable but not importable on Python 3.4), we can conclude that
attrs>20.3.0,<21.2.0 cannot be used.
And because you require attrs>20.3.0,<21.2.0, we can conclude that the
requirements are unsatisfiable.
"###
);
Ok(())
}
/// Flask==3.0.0 depends on Werkzeug>=3.0.0. Demonstrate that we can override this
/// requirement with an incompatible version.
#[test]
fn override_dependency() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("flask==3.0.0")?;
let overrides_txt = context.temp_dir.child("overrides.txt");
overrides_txt.write_str("werkzeug==2.3.0")?;
uv_snapshot!(context.compile()
.arg("requirements.in")
.arg("--override")
.arg("overrides.txt"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2023-11-18T12:00:00Z requirements.in --override overrides.txt
blinker==1.7.0
# via flask
click==8.1.7
# via flask
flask==3.0.0
itsdangerous==2.1.2
# via flask
jinja2==3.1.2
# via flask
markupsafe==2.1.3
# via
# jinja2
# werkzeug
werkzeug==2.3.0
# via flask
----- stderr -----
Resolved 7 packages in [TIME]
"###
);
Ok(())
}
/// Black==23.10.1 depends on tomli>=1.1.0 for Python versions below 3.11. Demonstrate that we can
/// override it with a multi-line override.
#[test]
fn override_multi_dependency() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("black==23.10.1")?;
let overrides_txt = context.temp_dir.child("overrides.txt");
overrides_txt.write_str(
"tomli>=1.1.0; python_version >= '3.11'\ntomli<1.0.0; python_version < '3.11'",
)?;
uv_snapshot!(context.compile()
.arg("requirements.in")
.arg("--override")
.arg("overrides.txt"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2023-11-18T12:00:00Z requirements.in --override overrides.txt
black==23.10.1
click==8.1.7
# via black
mypy-extensions==1.0.0
# via black
packaging==23.2
# via black
pathspec==0.11.2
# via black
platformdirs==4.0.0
# via black
tomli==2.0.1
# via black
----- stderr -----
Resolved 7 packages in [TIME]
"###
);
Ok(())
}
/// Request an extra that doesn't exist on the specified package.
#[test]
fn missing_registry_extra() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("black[tensorboard]==23.10.1")?;
uv_snapshot!(context.compile()
.arg("requirements.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2023-11-18T12:00:00Z requirements.in
black==23.10.1
click==8.1.7
# via black
mypy-extensions==1.0.0
# via black
packaging==23.2
# via black
pathspec==0.11.2
# via black
platformdirs==4.0.0
# via black
----- stderr -----
Resolved 6 packages in [TIME]
warning: The package `black==23.10.1` does not have an extra named `tensorboard`.
"###
);
Ok(())
}
/// Request an extra that doesn't exist on the specified package.
#[test]
fn missing_url_extra() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("flask[tensorboard] @ https://files.pythonhosted.org/packages/36/42/015c23096649b908c809c69388a805a571a3bea44362fe87e33fc3afa01f/flask-3.0.0-py3-none-any.whl")?;
uv_snapshot!(context.compile()
.arg("requirements.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2023-11-18T12:00:00Z requirements.in
blinker==1.7.0
# via flask
click==8.1.7
# via flask
flask @ https://files.pythonhosted.org/packages/36/42/015c23096649b908c809c69388a805a571a3bea44362fe87e33fc3afa01f/flask-3.0.0-py3-none-any.whl
itsdangerous==2.1.2
# via flask
jinja2==3.1.2
# via flask
markupsafe==2.1.3
# via
# jinja2
# werkzeug
werkzeug==3.0.1
# via flask
----- stderr -----
Resolved 7 packages in [TIME]
warning: The package `flask @ https://files.pythonhosted.org/packages/36/42/015c23096649b908c809c69388a805a571a3bea44362fe87e33fc3afa01f/flask-3.0.0-py3-none-any.whl` does not have an extra named `tensorboard`.
"###
);
Ok(())
}
/// Resolve a dependency from a URL, preserving the exact casing of the URL as specified in the
/// requirements file.
#[test]
fn preserve_url() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("flask @ https://files.PYTHONHOSTED.org/packages/36/42/015c23096649b908c809c69388a805a571a3bea44362fe87e33fc3afa01f/flask-3.0.0-py3-none-any.whl")?;
uv_snapshot!(context.compile()
.arg("requirements.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2023-11-18T12:00:00Z requirements.in
blinker==1.7.0
# via flask
click==8.1.7
# via flask
flask @ https://files.PYTHONHOSTED.org/packages/36/42/015c23096649b908c809c69388a805a571a3bea44362fe87e33fc3afa01f/flask-3.0.0-py3-none-any.whl
itsdangerous==2.1.2
# via flask
jinja2==3.1.2
# via flask
markupsafe==2.1.3
# via
# jinja2
# werkzeug
werkzeug==3.0.1
# via flask
----- stderr -----
Resolved 7 packages in [TIME]
"###
);
Ok(())
}
/// Resolve a dependency from a URL, preserving the unexpanded environment variable as specified in
/// the requirements file.
#[test]
fn preserve_project_root() -> Result<()> {
let context = TestContext::new("3.12");
// Download a wheel.
let response = reqwest::blocking::get("https://files.pythonhosted.org/packages/36/42/015c23096649b908c809c69388a805a571a3bea44362fe87e33fc3afa01f/flask-3.0.0-py3-none-any.whl")?;
let flask_wheel = context.temp_dir.child("flask-3.0.0-py3-none-any.whl");
let mut flask_wheel_file = std::fs::File::create(flask_wheel)?;
std::io::copy(&mut response.bytes()?.as_ref(), &mut flask_wheel_file)?;
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("flask @ file://${PROJECT_ROOT}/flask-3.0.0-py3-none-any.whl")?;
uv_snapshot!(context.compile()
.arg("requirements.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2023-11-18T12:00:00Z requirements.in
blinker==1.7.0
# via flask
click==8.1.7
# via flask
flask @ file://${PROJECT_ROOT}/flask-3.0.0-py3-none-any.whl
itsdangerous==2.1.2
# via flask
jinja2==3.1.2
# via flask
markupsafe==2.1.3
# via
# jinja2
# werkzeug
werkzeug==3.0.1
# via flask
----- stderr -----
Resolved 7 packages in [TIME]
"###
);
Ok(())
}
/// Resolve a dependency from a URL, passing in the entire URL as an environment variable.
#[test]
fn respect_http_env_var() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("flask @ ${URL}")?;
uv_snapshot!(context.compile()
.arg("requirements.in")
.env("URL", "https://files.pythonhosted.org/packages/36/42/015c23096649b908c809c69388a805a571a3bea44362fe87e33fc3afa01f/flask-3.0.0-py3-none-any.whl"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2023-11-18T12:00:00Z requirements.in
blinker==1.7.0
# via flask
click==8.1.7
# via flask
flask @ ${URL}
itsdangerous==2.1.2
# via flask
jinja2==3.1.2
# via flask
markupsafe==2.1.3
# via
# jinja2
# werkzeug
werkzeug==3.0.1
# via flask
----- stderr -----
Resolved 7 packages in [TIME]
"###
);
Ok(())
}
/// A requirement defined as a single unnamed environment variable should be parsed as such.
#[test]
fn respect_unnamed_env_var() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("${URL}")?;
uv_snapshot!(context.compile()
.arg("requirements.in")
.env("URL", "https://files.pythonhosted.org/packages/36/42/015c23096649b908c809c69388a805a571a3bea44362fe87e33fc3afa01f/flask-3.0.0-py3-none-any.whl"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2023-11-18T12:00:00Z requirements.in
blinker==1.7.0
# via flask
click==8.1.7
# via flask
flask @ ${URL}
itsdangerous==2.1.2
# via flask
jinja2==3.1.2
# via flask
markupsafe==2.1.3
# via
# jinja2
# werkzeug
werkzeug==3.0.1
# via flask
----- stderr -----
Resolved 7 packages in [TIME]
"###
);
Ok(())
}
/// A requirement defined as a single unnamed environment variable should error if the environment
/// variable is not set.
#[test]
fn error_missing_unnamed_env_var() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("${URL}")?;
uv_snapshot!(context.compile()
.arg("requirements.in"), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: Couldn't parse requirement in `requirements.in` at position 0
Caused by: Expected package name starting with an alphanumeric character, found '$'
${URL}
^
"###
);
Ok(())
}
/// Resolve a dependency from a file path, passing in the entire path as an environment variable.
#[test]
fn respect_file_env_var() -> Result<()> {
let context = TestContext::new("3.12");
// Download a wheel.
let response = reqwest::blocking::get("https://files.pythonhosted.org/packages/36/42/015c23096649b908c809c69388a805a571a3bea44362fe87e33fc3afa01f/flask-3.0.0-py3-none-any.whl")?;
let flask_wheel = context.temp_dir.child("flask-3.0.0-py3-none-any.whl");
let mut flask_wheel_file = std::fs::File::create(flask_wheel)?;
std::io::copy(&mut response.bytes()?.as_ref(), &mut flask_wheel_file)?;
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("flask @ ${FILE_PATH}")?;
uv_snapshot!(context.compile()
.arg("requirements.in")
.env("FILE_PATH", context.temp_dir.join("flask-3.0.0-py3-none-any.whl")), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2023-11-18T12:00:00Z requirements.in
blinker==1.7.0
# via flask
click==8.1.7
# via flask
flask @ ${FILE_PATH}
itsdangerous==2.1.2
# via flask
jinja2==3.1.2
# via flask
markupsafe==2.1.3
# via
# jinja2
# werkzeug
werkzeug==3.0.1
# via flask
----- stderr -----
Resolved 7 packages in [TIME]
"###
);
Ok(())
}
#[test]
#[cfg(feature = "maturin")]
fn compile_editable() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str(indoc! {r"
-e ../../scripts/editable-installs/poetry_editable
-e ${PROJECT_ROOT}/../../scripts/editable-installs/maturin_editable
-e file://../../scripts/editable-installs/black_editable[dev]
boltons # normal dependency for comparison
"
})?;
let filter_path = regex::escape(&requirements_in.user_display().to_string());
let filters: Vec<_> = [(filter_path.as_str(), "requirements.in")]
.into_iter()
.chain(INSTA_FILTERS.to_vec())
.collect();
uv_snapshot!(filters, context.compile()
.arg(requirements_in.path())
.current_dir(current_dir()?), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2023-11-18T12:00:00Z requirements.in
-e ${PROJECT_ROOT}/../../scripts/editable-installs/maturin_editable
-e ../../scripts/editable-installs/poetry_editable
-e file://../../scripts/editable-installs/black_editable
aiohttp==3.9.0
# via black
aiosignal==1.3.1
# via aiohttp
anyio==4.0.0
# via poetry-editable
attrs==23.1.0
# via aiohttp
boltons==23.1.1
frozenlist==1.4.0
# via
# aiohttp
# aiosignal
idna==3.4
# via
# anyio
# yarl
multidict==6.0.4
# via
# aiohttp
# yarl
sniffio==1.3.0
# via anyio
uvloop==0.19.0
# via black
yarl==1.9.2
# via aiohttp
----- stderr -----
Built 3 editables in [TIME]
Resolved 14 packages in [TIME]
"###);
Ok(())
}
#[test]
fn recursive_extras_direct_url() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("black[dev] @ ../../scripts/editable-installs/black_editable")?;
let filter_path = regex::escape(&requirements_in.user_display().to_string());
let filters: Vec<_> = [(filter_path.as_str(), "requirements.in")]
.into_iter()
.chain(INSTA_FILTERS.to_vec())
.collect();
let mut command = Command::new(get_bin());
if cfg!(all(windows, debug_assertions)) {
// TODO(konstin): Reduce stack usage in debug mode enough that the tests pass with the
// default windows stack of 1MB
command.env("UV_STACK_SIZE", (2 * 1024 * 1024).to_string());
}
uv_snapshot!(filters, command
.arg("pip")
.arg("compile")
.arg(requirements_in.path())
.arg("--cache-dir")
.arg(context.cache_dir.path())
.arg("--exclude-newer")
.arg(EXCLUDE_NEWER)
.env("VIRTUAL_ENV", context.venv.as_os_str()), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile requirements.in --cache-dir [CACHE_DIR] --exclude-newer 2023-11-18T12:00:00Z
aiohttp==3.9.0
# via black
aiosignal==1.3.1
# via aiohttp
attrs==23.1.0
# via aiohttp
black @ ../../scripts/editable-installs/black_editable
frozenlist==1.4.0
# via
# aiohttp
# aiosignal
idna==3.4
# via yarl
multidict==6.0.4
# via
# aiohttp
# yarl
uvloop==0.19.0
# via black
yarl==1.9.2
# via aiohttp
----- stderr -----
Resolved 9 packages in [TIME]
"###);
Ok(())
}
/// Compile an editable package with a direct URL requirement.
#[test]
fn compile_editable_url_requirement() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("-e ../../scripts/editable-installs/hatchling_editable")?;
let filter_path = regex::escape(&requirements_in.user_display().to_string());
let filters: Vec<_> = [(filter_path.as_str(), "requirements.in")]
.into_iter()
.chain(INSTA_FILTERS.to_vec())
.collect();
uv_snapshot!(filters, context.compile()
.arg(requirements_in.path())
.current_dir(current_dir()?), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2023-11-18T12:00:00Z requirements.in
-e ../../scripts/editable-installs/hatchling_editable
iniconfig @ git+https://github.com/pytest-dev/iniconfig@9cae43103df70bac6fde7b9f35ad11a9f1be0cb4
# via hatchling-editable
----- stderr -----
Built 1 editable in [TIME]
Resolved 2 packages in [TIME]
"###);
Ok(())
}
#[test]
#[ignore]
fn cache_errors_are_non_fatal() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
// No git dep, git has its own locking strategy
requirements_in.write_str(indoc! {r"
# pypi wheel
pandas
# url wheel
flask @ https://files.pythonhosted.org/packages/36/42/015c23096649b908c809c69388a805a571a3bea44362fe87e33fc3afa01f/flask-3.0.0-py3-none-any.whl
# url source dist
werkzeug @ https://files.pythonhosted.org/packages/0d/cc/ff1904eb5eb4b455e442834dabf9427331ac0fa02853bf83db817a7dd53d/werkzeug-3.0.1.tar.gz
"
})?;
// Pick a file from each kind of cache
let interpreter_cache = context
.cache_dir
.path()
.join("interpreter-v0")
.read_dir()?
.next()
.context("Expected a python interpreter cache file")??
.path();
let cache_files = [
PathBuf::from("simple-v0/pypi/numpy.msgpack"),
PathBuf::from(
"wheels-v0/pypi/python-dateutil/python_dateutil-2.8.2-py2.py3-none-any.msgpack",
),
PathBuf::from("wheels-v0/url/4b8be67c801a7ecb/flask/flask-3.0.0-py3-none-any.msgpack"),
PathBuf::from("built-wheels-v0/url/6781bd6440ae72c2/werkzeug/metadata.msgpack"),
interpreter_cache,
];
let check = || {
uv_snapshot!(context.compile()
.arg("pip")
.arg("compile")
.arg(requirements_in.path())
// It's sufficient to check that we resolve to a fix number of packages
.stdout(std::process::Stdio::null()), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 13 packages in [TIME]
"###
);
};
insta::allow_duplicates! {
check();
// Replace some cache files with invalid contents
for file in &cache_files {
let file = context.cache_dir.join(file);
if !file.is_file() {
bail!("Missing cache file {}", file.user_display());
}
fs_err::write(file, "I borken you cache")?;
}
check();
#[cfg(unix)]
{
use fs_err::os::unix::fs::OpenOptionsExt;
// Make some files unreadable, so that the read instead of the deserialization will fail
for file in cache_files {
let file = context.cache_dir.join(file);
if !file.is_file() {
bail!("Missing cache file {}", file.user_display());
}
fs_err::OpenOptions::new()
.create(true)
.write(true)
.mode(0o000)
.open(file)?;
}
}
check();
Ok(())
}
}
/// Resolve a distribution from an HTML-only registry.
#[test]
#[cfg(not(target_env = "musl"))] // No musllinux wheels in the torch index
fn compile_html() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("jinja2<=3.1.2")?;
uv_snapshot!(Command::new(get_bin())
.arg("pip")
.arg("compile")
.arg("requirements.in")
.arg("--cache-dir")
.arg(context.cache_dir.path())
.arg("--index-url")
.arg("https://download.pytorch.org/whl")
.env("VIRTUAL_ENV", context.venv.as_os_str())
.current_dir(context.temp_dir.path()), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile requirements.in --cache-dir [CACHE_DIR]
jinja2==3.1.2
markupsafe==2.1.3
# via jinja2
----- stderr -----
Resolved 2 packages in [TIME]
"###
);
Ok(())
}
/// Resolve a distribution from a registry with and without a trailing slash.
#[test]
fn trailing_slash() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("jinja2")?;
uv_snapshot!(context.compile()
.arg("requirements.in")
.arg("--index-url")
.arg("https://test.pypi.org/simple"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2023-11-18T12:00:00Z requirements.in
jinja2==3.1.2
markupsafe==2.1.3
# via jinja2
----- stderr -----
Resolved 2 packages in [TIME]
"###
);
uv_snapshot!(context.compile()
.arg("requirements.in")
.arg("--index-url")
.arg("https://test.pypi.org/simple/"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2023-11-18T12:00:00Z requirements.in
jinja2==3.1.2
markupsafe==2.1.3
# via jinja2
----- stderr -----
Resolved 2 packages in [TIME]
"###
);
Ok(())
}
/// Resolve a project without a `pyproject.toml`, using the PEP 517 build backend (default).
#[test]
fn compile_legacy_sdist_pep_517() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("flake8 @ https://files.pythonhosted.org/packages/66/53/3ad4a3b74d609b3b9008a10075c40e7c8909eae60af53623c3888f7a529a/flake8-6.0.0.tar.gz")?;
uv_snapshot!(context.compile()
.arg("requirements.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2023-11-18T12:00:00Z requirements.in
flake8 @ https://files.pythonhosted.org/packages/66/53/3ad4a3b74d609b3b9008a10075c40e7c8909eae60af53623c3888f7a529a/flake8-6.0.0.tar.gz
mccabe==0.7.0
# via flake8
pycodestyle==2.10.0
# via flake8
pyflakes==3.0.1
# via flake8
----- stderr -----
Resolved 4 packages in [TIME]
"###
);
Ok(())
}
/// Resolve a project without a `pyproject.toml`, using `setuptools` directly.
#[test]
fn compile_legacy_sdist_setuptools() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("flake8 @ https://files.pythonhosted.org/packages/66/53/3ad4a3b74d609b3b9008a10075c40e7c8909eae60af53623c3888f7a529a/flake8-6.0.0.tar.gz")?;
uv_snapshot!(context.compile()
.arg("requirements.in")
.arg("--legacy-setup-py"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2023-11-18T12:00:00Z requirements.in --legacy-setup-py
flake8 @ https://files.pythonhosted.org/packages/66/53/3ad4a3b74d609b3b9008a10075c40e7c8909eae60af53623c3888f7a529a/flake8-6.0.0.tar.gz
mccabe==0.7.0
# via flake8
pycodestyle==2.10.0
# via flake8
pyflakes==3.0.1
# via flake8
----- stderr -----
Resolved 4 packages in [TIME]
"###
);
Ok(())
}
/// Include hashes in the generated output.
#[test]
fn generate_hashes() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("flask==3.0.0")?;
let colorama_locked = regex::escape(indoc! {r"
colorama==0.4.6 \
--hash=sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44 \
--hash=sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6
# via click
"});
let filters: Vec<_> = if cfg!(windows) {
// Remove colorama
vec![
(colorama_locked.as_str(), ""),
("Resolved 8 packages", "Resolved 7 packages"),
]
} else {
vec![]
}
.into_iter()
.chain(INSTA_FILTERS.to_vec())
.collect();
uv_snapshot!(filters, context.compile()
.arg("requirements.in")
.arg("--generate-hashes"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2023-11-18T12:00:00Z requirements.in --generate-hashes
blinker==1.7.0 \
--hash=sha256:c3f865d4d54db7abc53758a01601cf343fe55b84c1de4e3fa910e420b438d5b9 \
--hash=sha256:e6820ff6fa4e4d1d8e2747c2283749c3f547e4fee112b98555cdcdae32996182
# via flask
click==8.1.7 \
--hash=sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28 \
--hash=sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de
# via flask
flask==3.0.0 \
--hash=sha256:21128f47e4e3b9d597a3e8521a329bf56909b690fcc3fa3e477725aa81367638 \
--hash=sha256:cfadcdb638b609361d29ec22360d6070a77d7463dcb3ab08d2c2f2f168845f58
itsdangerous==2.1.2 \
--hash=sha256:2c2349112351b88699d8d4b6b075022c0808887cb7ad10069318a8b0bc88db44 \
--hash=sha256:5dbbc68b317e5e42f327f9021763545dc3fc3bfe22e6deb96aaf1fc38874156a
# via flask
jinja2==3.1.2 \
--hash=sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852 \
--hash=sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61
# via flask
markupsafe==2.1.3 \
--hash=sha256:05fb21170423db021895e1ea1e1f3ab3adb85d1c2333cbc2310f2a26bc77272e \
--hash=sha256:0a4e4a1aff6c7ac4cd55792abf96c915634c2b97e3cc1c7129578aa68ebd754e \
--hash=sha256:10bbfe99883db80bdbaff2dcf681dfc6533a614f700da1287707e8a5d78a8431 \
--hash=sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686 \
--hash=sha256:14ff806850827afd6b07a5f32bd917fb7f45b046ba40c57abdb636674a8b559c \
--hash=sha256:1577735524cdad32f9f694208aa75e422adba74f1baee7551620e43a3141f559 \
--hash=sha256:1b40069d487e7edb2676d3fbdb2b0829ffa2cd63a2ec26c4938b2d34391b4ecc \
--hash=sha256:1b8dd8c3fd14349433c79fa8abeb573a55fc0fdd769133baac1f5e07abf54aeb \
--hash=sha256:1f67c7038d560d92149c060157d623c542173016c4babc0c1913cca0564b9939 \
--hash=sha256:282c2cb35b5b673bbcadb33a585408104df04f14b2d9b01d4c345a3b92861c2c \
--hash=sha256:2c1b19b3aaacc6e57b7e25710ff571c24d6c3613a45e905b1fde04d691b98ee0 \
--hash=sha256:2ef12179d3a291be237280175b542c07a36e7f60718296278d8593d21ca937d4 \
--hash=sha256:338ae27d6b8745585f87218a3f23f1512dbf52c26c28e322dbe54bcede54ccb9 \
--hash=sha256:3c0fae6c3be832a0a0473ac912810b2877c8cb9d76ca48de1ed31e1c68386575 \
--hash=sha256:3fd4abcb888d15a94f32b75d8fd18ee162ca0c064f35b11134be77050296d6ba \
--hash=sha256:42de32b22b6b804f42c5d98be4f7e5e977ecdd9ee9b660fda1a3edf03b11792d \
--hash=sha256:47d4f1c5f80fc62fdd7777d0d40a2e9dda0a05883ab11374334f6c4de38adffd \
--hash=sha256:504b320cd4b7eff6f968eddf81127112db685e81f7e36e75f9f84f0df46041c3 \
--hash=sha256:525808b8019e36eb524b8c68acdd63a37e75714eac50e988180b169d64480a00 \
--hash=sha256:56d9f2ecac662ca1611d183feb03a3fa4406469dafe241673d521dd5ae92a155 \
--hash=sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac \
--hash=sha256:65c1a9bcdadc6c28eecee2c119465aebff8f7a584dd719facdd9e825ec61ab52 \
--hash=sha256:68e78619a61ecf91e76aa3e6e8e33fc4894a2bebe93410754bd28fce0a8a4f9f \
--hash=sha256:69c0f17e9f5a7afdf2cc9fb2d1ce6aabdb3bafb7f38017c0b77862bcec2bbad8 \
--hash=sha256:6b2b56950d93e41f33b4223ead100ea0fe11f8e6ee5f641eb753ce4b77a7042b \
--hash=sha256:715d3562f79d540f251b99ebd6d8baa547118974341db04f5ad06d5ea3eb8007 \
--hash=sha256:787003c0ddb00500e49a10f2844fac87aa6ce977b90b0feaaf9de23c22508b24 \
--hash=sha256:7ef3cb2ebbf91e330e3bb937efada0edd9003683db6b57bb108c4001f37a02ea \
--hash=sha256:8023faf4e01efadfa183e863fefde0046de576c6f14659e8782065bcece22198 \
--hash=sha256:8758846a7e80910096950b67071243da3e5a20ed2546e6392603c096778d48e0 \
--hash=sha256:8afafd99945ead6e075b973fefa56379c5b5c53fd8937dad92c662da5d8fd5ee \
--hash=sha256:8c41976a29d078bb235fea9b2ecd3da465df42a562910f9022f1a03107bd02be \
--hash=sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2 \
--hash=sha256:8f9293864fe09b8149f0cc42ce56e3f0e54de883a9de90cd427f191c346eb2e1 \
--hash=sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707 \
--hash=sha256:962f82a3086483f5e5f64dbad880d31038b698494799b097bc59c2edf392fce6 \
--hash=sha256:9aad3c1755095ce347e26488214ef77e0485a3c34a50c5a5e2471dff60b9dd9c \
--hash=sha256:9dcdfd0eaf283af041973bff14a2e143b8bd64e069f4c383416ecd79a81aab58 \
--hash=sha256:aa57bd9cf8ae831a362185ee444e15a93ecb2e344c8e52e4d721ea3ab6ef1823 \
--hash=sha256:aa7bd130efab1c280bed0f45501b7c8795f9fdbeb02e965371bbef3523627779 \
--hash=sha256:ab4a0df41e7c16a1392727727e7998a467472d0ad65f3ad5e6e765015df08636 \
--hash=sha256:ad9e82fb8f09ade1c3e1b996a6337afac2b8b9e365f926f5a61aacc71adc5b3c \
--hash=sha256:af598ed32d6ae86f1b747b82783958b1a4ab8f617b06fe68795c7f026abbdcad \
--hash=sha256:b076b6226fb84157e3f7c971a47ff3a679d837cf338547532ab866c57930dbee \
--hash=sha256:b7ff0f54cb4ff66dd38bebd335a38e2c22c41a8ee45aa608efc890ac3e3931bc \
--hash=sha256:bfce63a9e7834b12b87c64d6b155fdd9b3b96191b6bd334bf37db7ff1fe457f2 \
--hash=sha256:c011a4149cfbcf9f03994ec2edffcb8b1dc2d2aede7ca243746df97a5d41ce48 \
--hash=sha256:c9c804664ebe8f83a211cace637506669e7890fec1b4195b505c214e50dd4eb7 \
--hash=sha256:ca379055a47383d02a5400cb0d110cef0a776fc644cda797db0c5696cfd7e18e \
--hash=sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b \
--hash=sha256:cd0f502fe016460680cd20aaa5a76d241d6f35a1c3350c474bac1273803893fa \
--hash=sha256:ceb01949af7121f9fc39f7d27f91be8546f3fb112c608bc4029aef0bab86a2a5 \
--hash=sha256:d080e0a5eb2529460b30190fcfcc4199bd7f827663f858a226a81bc27beaa97e \
--hash=sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb \
--hash=sha256:df0be2b576a7abbf737b1575f048c23fb1d769f267ec4358296f31c2479db8f9 \
--hash=sha256:e09031c87a1e51556fdcb46e5bd4f59dfb743061cf93c4d6831bf894f125eb57 \
--hash=sha256:e4dd52d80b8c83fdce44e12478ad2e85c64ea965e75d66dbeafb0a3e77308fcc \
--hash=sha256:f698de3fd0c4e6972b92290a45bd9b1536bffe8c6759c62471efaa8acb4c37bc \
--hash=sha256:fec21693218efe39aa7f8599346e90c705afa52c5b31ae019b2e57e8f6542bb2 \
--hash=sha256:ffcc3f7c66b5f5b7931a5aa68fc9cecc51e685ef90282f4a82f0f5e9b704ad11
# via
# jinja2
# werkzeug
werkzeug==3.0.1 \
--hash=sha256:507e811ecea72b18a404947aded4b3390e1db8f826b494d76550ef45bb3b1dcc \
--hash=sha256:90a285dc0e42ad56b34e696398b8122ee4c681833fb35b8334a095d82c56da10
# via flask
----- stderr -----
Resolved 7 packages in [TIME]
"###
);
Ok(())
}
/// Compile using `--find-links` with a local directory.
#[test]
fn find_links_directory() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str(indoc! {r"
tqdm
numpy
werkzeug @ https://files.pythonhosted.org/packages/c3/fc/254c3e9b5feb89ff5b9076a23218dafbc99c96ac5941e900b71206e6313b/werkzeug-3.0.1-py3-none-any.whl
"})?;
let project_root = fs_err::canonicalize(std::env::current_dir()?.join("..").join(".."))?;
let project_root_string = regex::escape(&project_root.user_display().to_string());
let filters: Vec<_> = [
(project_root_string.as_str(), "[PROJECT_ROOT]"),
// Unify trailing (back)slash between Windows and Unix.
(
"[PROJECT_ROOT]/scripts/wheels/",
"[PROJECT_ROOT]/scripts/wheels",
),
]
.into_iter()
.chain(INSTA_FILTERS.to_vec())
.collect();
uv_snapshot!(filters, context.compile()
.arg("requirements.in")
.arg("--find-links")
.arg(project_root.join("scripts").join("wheels")), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2023-11-18T12:00:00Z requirements.in
markupsafe==2.1.3
# via werkzeug
numpy==1.26.2
tqdm==1000.0.0
werkzeug @ https://files.pythonhosted.org/packages/c3/fc/254c3e9b5feb89ff5b9076a23218dafbc99c96ac5941e900b71206e6313b/werkzeug-3.0.1-py3-none-any.whl
----- stderr -----
Resolved 4 packages in [TIME]
"###);
Ok(())
}
/// Compile using `--find-links` with a URL by resolving `tqdm` from the `PyTorch` wheels index.
#[test]
fn find_links_url() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("tqdm")?;
uv_snapshot!(context.compile()
.arg("requirements.in")
.arg("--no-index")
.arg("--find-links")
.arg("https://download.pytorch.org/whl/torch_stable.html"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2023-11-18T12:00:00Z requirements.in --no-index
tqdm==4.64.1
----- stderr -----
Resolved 1 package in [TIME]
"###
);
Ok(())
}
/// Compile using `--find-links` with a URL passed via an environment variable.
#[test]
fn find_links_env_var() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("tqdm\n--find-links ${URL}")?;
uv_snapshot!(context.compile()
.arg("requirements.in")
.arg("--no-index")
.env("URL", "https://download.pytorch.org/whl/torch_stable.html"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2023-11-18T12:00:00Z requirements.in --no-index
tqdm==4.64.1
----- stderr -----
Resolved 1 package in [TIME]
"###
);
Ok(())
}
/// Compile using `--find-links` with a URL by resolving `tqdm` from the `PyTorch` wheels index,
/// with the URL itself provided in a `requirements.txt` file.
#[test]
fn find_links_requirements_txt() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("-f https://download.pytorch.org/whl/torch_stable.html\ntqdm")?;
uv_snapshot!(context.compile()
.arg("requirements.in")
.arg("--no-index")
.arg("--emit-find-links"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2023-11-18T12:00:00Z requirements.in --no-index --emit-find-links
--find-links https://download.pytorch.org/whl/torch_stable.html
tqdm==4.64.1
----- stderr -----
Resolved 1 package in [TIME]
"###
);
Ok(())
}
/// Use an existing resolution for `black==23.10.1`, with stale versions of `click` and `pathspec`.
/// Nothing should change.
#[test]
fn upgrade_none() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("black==23.10.1")?;
let requirements_txt = context.temp_dir.child("requirements.txt");
requirements_txt.write_str(indoc! {r"
black==23.10.1
click==8.1.2
# via black
mypy-extensions==1.0.0
# via black
packaging==23.2
# via black
pathspec==0.11.0
# via black
platformdirs==4.0.0
# via black
"})?;
uv_snapshot!(context.compile()
.arg("requirements.in")
.arg("--output-file")
.arg("requirements.txt"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2023-11-18T12:00:00Z requirements.in --output-file requirements.txt
black==23.10.1
click==8.1.2
# via black
mypy-extensions==1.0.0
# via black
packaging==23.2
# via black
pathspec==0.11.0
# via black
platformdirs==4.0.0
# via black
----- stderr -----
Resolved 6 packages in [TIME]
"###
);
Ok(())
}
/// Use an existing resolution for `black==23.10.1`, with stale versions of `click` and `pathspec`.
/// Both packages should be upgraded.
#[test]
fn upgrade_all() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("black==23.10.1")?;
let requirements_txt = context.temp_dir.child("requirements.txt");
requirements_txt.write_str(indoc! {r"
# This file was autogenerated by uv via the following command:
# uv pip compile requirements.in --python-version 3.12 --cache-dir [CACHE_DIR]
black==23.10.1
click==8.1.2
# via black
mypy-extensions==1.0.0
# via black
packaging==23.2
# via black
pathspec==0.11.0
# via black
platformdirs==4.0.0
# via black
"})?;
uv_snapshot!(context.compile()
.arg("requirements.in")
.arg("--output-file")
.arg("requirements.txt")
.arg("--upgrade"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2023-11-18T12:00:00Z requirements.in --output-file requirements.txt
black==23.10.1
click==8.1.7
# via black
mypy-extensions==1.0.0
# via black
packaging==23.2
# via black
pathspec==0.11.2
# via black
platformdirs==4.0.0
# via black
----- stderr -----
Resolved 6 packages in [TIME]
"###
);
Ok(())
}
/// Use an existing resolution for `black==23.10.1`, with stale versions of `click` and `pathspec`.
/// Only `click` should be upgraded.
#[test]
fn upgrade_package() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("black==23.10.1")?;
let requirements_txt = context.temp_dir.child("requirements.txt");
requirements_txt.write_str(indoc! {r"
# This file was autogenerated by uv via the following command:
# uv pip compile requirements.in --python-version 3.12 --cache-dir [CACHE_DIR]
black==23.10.1
click==8.1.2
# via black
mypy-extensions==1.0.0
# via black
packaging==23.2
# via black
pathspec==0.11.0
# via black
platformdirs==4.0.0
# via black
"})?;
uv_snapshot!(context.compile()
.arg("requirements.in")
.arg("--output-file")
.arg("requirements.txt")
.arg("--upgrade-package")
.arg("click"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2023-11-18T12:00:00Z requirements.in --output-file requirements.txt --upgrade-package click
black==23.10.1
click==8.1.7
# via black
mypy-extensions==1.0.0
# via black
packaging==23.2
# via black
pathspec==0.11.0
# via black
platformdirs==4.0.0
# via black
----- stderr -----
Resolved 6 packages in [TIME]
"###
);
Ok(())
}
/// Attempt to resolve a requirement at a path that doesn't exist.
#[test]
fn missing_path_requirement() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str(if cfg!(windows) {
"anyio @ file://C:/tmp/anyio-3.7.0.tar.gz"
} else {
"anyio @ file:///tmp/anyio-3.7.0.tar.gz"
})?;
let filters: Vec<_> = [(r"/C:/", "/")]
.into_iter()
.chain(INSTA_FILTERS.to_vec())
.collect();
uv_snapshot!(filters, context.compile()
.arg("requirements.in"), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: Distribution not found at: file:///tmp/anyio-3.7.0.tar.gz
"###);
Ok(())
}
/// Attempt to resolve an editable requirement at a path that doesn't exist.
#[test]
fn missing_editable_requirement() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("-e ../tmp/anyio-3.7.0.tar.gz")?;
// File url, absolute Unix path or absolute Windows path
let filters: Vec<_> = [
(r" file://.*/", " file://[TEMP_DIR]/"),
(r" /.*/", " /[TEMP_DIR]/"),
(r" [A-Z]:\\.*\\", " /[TEMP_DIR]/"),
]
.into_iter()
.chain(INSTA_FILTERS.to_vec())
.collect::<Vec<_>>();
uv_snapshot!(filters, context.compile()
.arg("requirements.in"), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: Failed to build editables
Caused by: Source distribution not found at: /[TEMP_DIR]/anyio-3.7.0.tar.gz
"###);
Ok(())
}
/// Attempt to resolve a URL requirement without a package name. The package name can be extracted
/// from the URL.
#[test]
fn unnamed_requirement_with_package_name() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("https://files.pythonhosted.org/packages/36/42/015c23096649b908c809c69388a805a571a3bea44362fe87e33fc3afa01f/flask-3.0.0-py3-none-any.whl")?;
uv_snapshot!(context.compile()
.arg("requirements.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2023-11-18T12:00:00Z requirements.in
blinker==1.7.0
# via flask
click==8.1.7
# via flask
flask @ https://files.pythonhosted.org/packages/36/42/015c23096649b908c809c69388a805a571a3bea44362fe87e33fc3afa01f/flask-3.0.0-py3-none-any.whl
itsdangerous==2.1.2
# via flask
jinja2==3.1.2
# via flask
markupsafe==2.1.3
# via
# jinja2
# werkzeug
werkzeug==3.0.1
# via flask
----- stderr -----
Resolved 7 packages in [TIME]
"###
);
Ok(())
}
/// Attempt to resolve a URL requirement without a package name. The package name can't be extracted
/// from the URL.
#[test]
fn unnamed_requirement_ambiguous() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("https://files.pythonhosted.org/packages/36/42/015c23096649b908c809c69388a805a571a3bea44362fe87e33fc3afa01f/flask-3.0.0")?;
uv_snapshot!(context.compile()
.arg("requirements.in"), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: Unable to infer package name for the unnamed requirement: https://files.pythonhosted.org/packages/36/42/015c23096649b908c809c69388a805a571a3bea44362fe87e33fc3afa01f/flask-3.0.0
"###
);
Ok(())
}
/// Exclude annotations from the output.
#[test]
fn no_annotate() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("black==23.10.1")?;
uv_snapshot!(context.compile()
.arg("requirements.in")
.arg("--no-annotate"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2023-11-18T12:00:00Z requirements.in --no-annotate
black==23.10.1
click==8.1.7
mypy-extensions==1.0.0
packaging==23.2
pathspec==0.11.2
platformdirs==4.0.0
----- stderr -----
Resolved 6 packages in [TIME]
"###
);
Ok(())
}
/// Exclude header from the output.
#[test]
fn no_header() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("black==23.10.1")?;
uv_snapshot!(context.compile()
.arg("requirements.in")
.arg("--no-header"), @r###"
success: true
exit_code: 0
----- stdout -----
black==23.10.1
click==8.1.7
# via black
mypy-extensions==1.0.0
# via black
packaging==23.2
# via black
pathspec==0.11.2
# via black
platformdirs==4.0.0
# via black
----- stderr -----
Resolved 6 packages in [TIME]
"###
);
Ok(())
}
/// Include custom compile command in the header.
#[test]
fn custom_compile_command() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("black==23.10.1")?;
uv_snapshot!(context.compile()
.arg("requirements.in")
.arg("--custom-compile-command")
.arg("./custom-uv-compile.sh"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# ./custom-uv-compile.sh
black==23.10.1
click==8.1.7
# via black
mypy-extensions==1.0.0
# via black
packaging==23.2
# via black
pathspec==0.11.2
# via black
platformdirs==4.0.0
# via black
----- stderr -----
Resolved 6 packages in [TIME]
"###
);
// with env var
uv_snapshot!(context.compile()
.arg("requirements.in")
.env("UV_CUSTOM_COMPILE_COMMAND", "./custom-uv-compile.sh"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# ./custom-uv-compile.sh
black==23.10.1
click==8.1.7
# via black
mypy-extensions==1.0.0
# via black
packaging==23.2
# via black
pathspec==0.11.2
# via black
platformdirs==4.0.0
# via black
----- stderr -----
Resolved 6 packages in [TIME]
"###
);
Ok(())
}
/// Emit warnings when users pass redundant options from `pip-compile`.
#[test]
fn allow_unsafe() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("werkzeug==3.0.1")?;
uv_snapshot!(context.compile()
.arg("requirements.in")
.arg("--allow-unsafe"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2023-11-18T12:00:00Z requirements.in --allow-unsafe
markupsafe==2.1.3
# via werkzeug
werkzeug==3.0.1
----- stderr -----
warning: pip-compile's `--allow-unsafe` has no effect (uv can safely pin `pip` and other packages).
Resolved 2 packages in [TIME]
"###
);
Ok(())
}
/// Emit warnings when users pass redundant options from `pip-compile`.
#[test]
fn resolver_legacy() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("werkzeug==3.0.1")?;
uv_snapshot!(context.compile()
.arg("requirements.in")
.arg("--resolver=legacy"), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: pip-compile's `--resolver=legacy` is unsupported (uv always backtracks).
"###
);
Ok(())
}
/// Emit the `--index-url` and `--extra-index-url` locations.
/// Also, preserve the `--index-url` and `--extra-index-url` flags in the command in the header.
#[test]
fn emit_index_urls() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("black==23.10.1")?;
uv_snapshot!(context.compile()
.arg("requirements.in")
.arg("--emit-index-url")
.arg("--index-url")
.arg("https://test.pypi.org/simple/")
.arg("--extra-index-url")
.arg("https://pypi.org/simple"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2023-11-18T12:00:00Z requirements.in --emit-index-url --index-url https://test.pypi.org/simple/ --extra-index-url https://pypi.org/simple
--index-url https://test.pypi.org/simple/
--extra-index-url https://pypi.org/simple
black==23.10.1
click==8.1.7
# via black
mypy-extensions==1.0.0
# via black
packaging==23.2
# via black
pathspec==0.11.2
# via black
platformdirs==4.0.0
# via black
----- stderr -----
Resolved 6 packages in [TIME]
"###
);
Ok(())
}
/// Emit the `--find-links` locations.
#[test]
fn emit_find_links() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("black==23.10.1")?;
uv_snapshot!(context.compile()
.arg("requirements.in")
.arg("--emit-find-links")
.arg("--find-links")
.arg("./"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2023-11-18T12:00:00Z requirements.in --emit-find-links --find-links ./
--find-links ./
black==23.10.1
click==8.1.7
# via black
mypy-extensions==1.0.0
# via black
packaging==23.2
# via black
pathspec==0.11.2
# via black
platformdirs==4.0.0
# via black
----- stderr -----
Resolved 6 packages in [TIME]
"###
);
Ok(())
}
/// Respect the `--no-index` flag in a `requirements.txt` file.
#[test]
fn no_index_requirements_txt() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("--no-index\ntqdm")?;
uv_snapshot!(context.compile()
.arg("requirements.in"), @r###"
success: false
exit_code: 1
----- stdout -----
----- stderr -----
× No solution found when resolving dependencies:
╰─▶ Because tqdm was not found in the provided package locations and you
require tqdm, we can conclude that the requirements are unsatisfiable.
hint: Packages were unavailable because index lookups were disabled
and no additional package locations were provided (try: `--find-links
<uri>`)
"###
);
Ok(())
}
/// Prefer the `--index-url` from the command line over the `--index-url` in a `requirements.txt`
/// file. Also, `--index-url` and `--extra-index-url` should not be presented in the output
/// unless we specify `--emit-index-url`.
#[test]
fn index_url_requirements_txt() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("--index-url https://google.com\ntqdm")?;
uv_snapshot!(context.compile()
.arg("requirements.in")
.arg("--index-url")
.arg("https://pypi.org/simple"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2023-11-18T12:00:00Z requirements.in
tqdm==4.66.1
----- stderr -----
Resolved 1 package in [TIME]
"###
);
Ok(())
}
/// Raise an error when multiple `requirements.txt` files include `--index-url` flags.
#[test]
fn conflicting_index_urls_requirements_txt() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("--index-url https://google.com\ntqdm")?;
let constraints_in = context.temp_dir.child("constraints.in");
constraints_in.write_str("--index-url https://wikipedia.org\nflask")?;
uv_snapshot!(context.compile()
.arg("requirements.in")
.arg("--constraint")
.arg("constraints.in"), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: Multiple index URLs specified: `https://google.com/` vs.` https://wikipedia.org/
"###
);
Ok(())
}
/// Resolve without network access via the `--offline` flag.
#[test]
fn offline() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("black==23.10.1")?;
// Resolve with `--offline` with an empty cache.
uv_snapshot!(context.compile()
.arg("requirements.in")
.arg("--offline"), @r###"
success: false
exit_code: 1
----- stdout -----
----- stderr -----
× No solution found when resolving dependencies:
╰─▶ Because black==23.10.1 was not found in the cache and you require
black==23.10.1, we can conclude that the requirements are unsatisfiable.
hint: Packages were unavailable because the network was disabled
"###
);
// Populate the cache.
uv_snapshot!(context.compile()
.arg("requirements.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2023-11-18T12:00:00Z requirements.in
black==23.10.1
click==8.1.7
# via black
mypy-extensions==1.0.0
# via black
packaging==23.2
# via black
pathspec==0.11.2
# via black
platformdirs==4.0.0
# via black
----- stderr -----
Resolved 6 packages in [TIME]
"###
);
// Resolve with `--offline` with a populated cache.
uv_snapshot!(context.compile()
.arg("requirements.in")
.arg("--offline"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2023-11-18T12:00:00Z requirements.in --offline
black==23.10.1
click==8.1.7
# via black
mypy-extensions==1.0.0
# via black
packaging==23.2
# via black
pathspec==0.11.2
# via black
platformdirs==4.0.0
# via black
----- stderr -----
Resolved 6 packages in [TIME]
"###
);
Ok(())
}
/// Resolve without network access via the `--offline` flag, using `--find-links` for an HTML
/// registry.
#[test]
fn offline_find_links() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("tqdm")?;
// Resolve with `--offline` and `--find-links`. We indicate that the network was disabled,
// since both the `--find-links` and the registry lookups fail (but, importantly, we don't error
// when failing to fetch the `--find-links` URL).
uv_snapshot!(context.compile()
.arg("requirements.in")
.arg("--find-links")
.arg("https://download.pytorch.org/whl/torch_stable.html")
.arg("--offline"), @r###"
success: false
exit_code: 1
----- stdout -----
----- stderr -----
× No solution found when resolving dependencies:
╰─▶ Because tqdm was not found in the cache and you require tqdm, we can
conclude that the requirements are unsatisfiable.
hint: Packages were unavailable because the network was disabled
"###
);
// Resolve with `--offline`, `--find-links`, and `--no-index`.
uv_snapshot!(context.compile()
.arg("requirements.in")
.arg("--find-links")
.arg("https://download.pytorch.org/whl/torch_stable.html")
.arg("--no-index")
.arg("--offline"), @r###"
success: false
exit_code: 1
----- stdout -----
----- stderr -----
× No solution found when resolving dependencies:
╰─▶ Because tqdm was not found in the cache and you require tqdm, we can
conclude that the requirements are unsatisfiable.
hint: Packages were unavailable because the network was disabled
"###
);
Ok(())
}
/// Resolve nested `-r` requirements files with relative paths.
#[test]
fn compile_relative_subfile() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("-r subdir/requirements.in")?;
let subdir = context.temp_dir.child("subdir");
let requirements_in = subdir.child("requirements.in");
requirements_in.write_str("-r requirements-dev.in")?;
let requirements_dev_in = subdir.child("requirements-dev.in");
requirements_dev_in.write_str("anyio")?;
uv_snapshot!(context
.compile()
.arg("requirements.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2023-11-18T12:00:00Z requirements.in
anyio==4.0.0
idna==3.4
# via anyio
sniffio==1.3.0
# via anyio
----- stderr -----
Resolved 3 packages in [TIME]
"###);
Ok(())
}
/// Resolve a package with an invalid extra named `.none`.
#[test]
fn compile_none_extra() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("entrypoints==0.3")?;
uv_snapshot!(context
.compile()
.arg("requirements.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2023-11-18T12:00:00Z requirements.in
entrypoints==0.3
----- stderr -----
Resolved 1 package in [TIME]
"###);
Ok(())
}
/// Resolve a package (`pytz`) with a preference that omits a trailing zero.
///
/// See: <https://github.com/astral-sh/uv/issues/1536>
#[test]
fn compile_types_pytz() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("types-pytz")?;
let requirements_txt = context.temp_dir.child("requirements.txt");
requirements_txt.write_str("types-pytz==2021.1")?;
uv_snapshot!(context
.compile()
.arg("requirements.in")
.arg("-o")
.arg("requirements.txt"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2023-11-18T12:00:00Z requirements.in -o requirements.txt
types-pytz==2021.1.0
----- stderr -----
Resolved 1 package in [TIME]
"###);
Ok(())
}
/// Resolve a package from a `requirements.in` file, with a `constraints.txt` pinning that package
/// to a specific URL.
#[test]
fn compile_constraints_compatible_url() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("anyio>4")?;
let constraints_txt = context.temp_dir.child("constraints.txt");
constraints_txt.write_str("anyio @ https://files.pythonhosted.org/packages/bf/cd/d6d9bb1dadf73e7af02d18225cbd2c93f8552e13130484f1c8dcfece292b/anyio-4.2.0-py3-none-any.whl")?;
uv_snapshot!(context.compile()
.arg("requirements.in")
.arg("--constraint")
.arg("constraints.txt"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2023-11-18T12:00:00Z requirements.in --constraint constraints.txt
anyio @ https://files.pythonhosted.org/packages/bf/cd/d6d9bb1dadf73e7af02d18225cbd2c93f8552e13130484f1c8dcfece292b/anyio-4.2.0-py3-none-any.whl
idna==3.4
# via anyio
sniffio==1.3.0
# via anyio
----- stderr -----
Resolved 3 packages in [TIME]
"###
);
Ok(())
}
/// Resolve a direct URL package from a `requirements.in` file, with a `constraints.txt` file
/// pinning it to a specific version.
#[test]
fn compile_constraints_compatible_url_version() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("anyio @ https://files.pythonhosted.org/packages/bf/cd/d6d9bb1dadf73e7af02d18225cbd2c93f8552e13130484f1c8dcfece292b/anyio-4.2.0-py3-none-any.whl")?;
let constraints_txt = context.temp_dir.child("constraints.txt");
constraints_txt.write_str("anyio>4")?;
uv_snapshot!(context.compile()
.arg("requirements.in")
.arg("--constraint")
.arg("constraints.txt"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2023-11-18T12:00:00Z requirements.in --constraint constraints.txt
anyio @ https://files.pythonhosted.org/packages/bf/cd/d6d9bb1dadf73e7af02d18225cbd2c93f8552e13130484f1c8dcfece292b/anyio-4.2.0-py3-none-any.whl
idna==3.4
# via anyio
sniffio==1.3.0
# via anyio
----- stderr -----
Resolved 3 packages in [TIME]
"###
);
Ok(())
}
/// Resolve a package from a `requirements.in` file, with a `constraints.txt` file pinning it to
/// a specific URL with an incompatible version.
#[test]
fn compile_constraints_incompatible_url() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("anyio<4")?;
let constraints_txt = context.temp_dir.child("constraints.txt");
constraints_txt.write_str("anyio @ https://files.pythonhosted.org/packages/bf/cd/d6d9bb1dadf73e7af02d18225cbd2c93f8552e13130484f1c8dcfece292b/anyio-4.2.0-py3-none-any.whl")?;
uv_snapshot!(context.compile()
.arg("requirements.in")
.arg("--constraint")
.arg("constraints.txt"), @r###"
success: false
exit_code: 1
----- stdout -----
----- stderr -----
× No solution found when resolving dependencies:
╰─▶ Because only anyio>=4 is available and you require anyio<4, we can
conclude that the requirements are unsatisfiable.
"###
);
Ok(())
}
/// Resolve a package from a `requirements.in` file, respecting the `--index-url` in a
/// `requirements.in` file. The resolution should fail, since the package doesn't exist at the
#[test]
fn index_url_in_requirements() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("--index-url https://download.pytorch.org/whl\nanyio<4")?;
uv_snapshot!(context.compile()
.arg("requirements.in"), @r###"
success: false
exit_code: 1
----- stdout -----
----- stderr -----
× No solution found when resolving dependencies:
╰─▶ Because anyio<4 was not found in the package registry and you require
anyio<4, we can conclude that the requirements are unsatisfiable.
"###
);
Ok(())
}
/// Resolve a package from a `requirements.in` file, respecting the `--index-url` passed via the
/// command line over that in a `requirements.in` file.
#[test]
fn index_url_from_command_line() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("--index-url https://download.pytorch.org/whl\nanyio<4")?;
uv_snapshot!(context.compile()
.arg("requirements.in")
.arg("--index-url")
.arg("https://pypi.org/simple"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2023-11-18T12:00:00Z requirements.in
anyio==3.7.1
idna==3.4
# via anyio
sniffio==1.3.0
# via anyio
----- stderr -----
Resolved 3 packages in [TIME]
"###
);
Ok(())
}
/// Resolve a package from a `requirements.in` file with a dependency that uses an unsupported
/// scheme.
#[test]
fn unsupported_scheme() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("anyio @ bzr+https://example.com/anyio")?;
uv_snapshot!(context.compile()
.arg("requirements.in"), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: Unsupported scheme `bzr+https` on URL: bzr+https://example.com/anyio (Bazaar is not supported)
"###
);
Ok(())
}
/// Resolve a package with `--no-deps`, including a valid extra.
#[test]
fn no_deps_valid_extra() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("flask[dotenv]")?;
uv_snapshot!(context.compile()
.arg("requirements.in")
.arg("--no-deps"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2023-11-18T12:00:00Z requirements.in --no-deps
flask==3.0.0
----- stderr -----
Resolved 1 package in [TIME]
"###
);
Ok(())
}
/// Resolve a package with `--no-deps`, including an invalid extra.
#[test]
fn no_deps_invalid_extra() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("flask[empty]")?;
uv_snapshot!(context.compile()
.arg("requirements.in")
.arg("--no-deps"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2023-11-18T12:00:00Z requirements.in --no-deps
flask==3.0.0
----- stderr -----
Resolved 1 package in [TIME]
warning: The package `flask==3.0.0` does not have an extra named `empty`.
"###
);
Ok(())
}
/// Resolve an editable package with an invalid extra.
#[test]
fn editable_invalid_extra() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("-e ../../scripts/editable-installs/black_editable[empty]")?;
let requirements_path = regex::escape(&requirements_in.user_display().to_string());
let filters: Vec<_> = [
(r" file://.*/", " file://[TEMP_DIR]/"),
(requirements_path.as_str(), "requirements.in"),
]
.into_iter()
.chain(INSTA_FILTERS.to_vec())
.collect();
uv_snapshot!(filters, context.compile()
.arg(requirements_in.path())
.current_dir(current_dir()?), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2023-11-18T12:00:00Z requirements.in
-e ../../scripts/editable-installs/black_editable
----- stderr -----
Built 1 editable in [TIME]
Resolved 1 package in [TIME]
warning: The package `black @ file://[TEMP_DIR]/black_editable` does not have an extra named `empty`.
"###);
Ok(())
}
/// Resolve a package with `--no-strip-extras`.
#[test]
fn no_strip_extra() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("flask[dotenv]")?;
uv_snapshot!(context.compile()
.arg("requirements.in")
.arg("--no-strip-extras"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2023-11-18T12:00:00Z requirements.in --no-strip-extras
blinker==1.7.0
# via flask
click==8.1.7
# via flask
flask[dotenv]==3.0.0
itsdangerous==2.1.2
# via flask
jinja2==3.1.2
# via flask
markupsafe==2.1.3
# via
# jinja2
# werkzeug
python-dotenv==1.0.0
# via flask
werkzeug==3.0.1
# via flask
----- stderr -----
Resolved 8 packages in [TIME]
"###
);
Ok(())
}
/// Resolve a package with `--no-strip-extras`.
#[test]
#[cfg(not(windows))]
fn no_strip_extras() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("anyio[trio]\nanyio[doc]")?;
uv_snapshot!(context.compile()
.arg("requirements.in")
.arg("--no-strip-extras"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2023-11-18T12:00:00Z requirements.in --no-strip-extras
alabaster==0.7.13
# via sphinx
anyio[doc, trio]==4.0.0
attrs==23.1.0
# via
# outcome
# trio
babel==2.13.1
# via sphinx
certifi==2023.11.17
# via requests
charset-normalizer==3.3.2
# via requests
docutils==0.20.1
# via sphinx
idna==3.4
# via
# anyio
# requests
# trio
imagesize==1.4.1
# via sphinx
jinja2==3.1.2
# via sphinx
markupsafe==2.1.3
# via jinja2
outcome==1.3.0.post0
# via trio
packaging==23.2
# via
# anyio
# sphinx
pygments==2.16.1
# via sphinx
requests==2.31.0
# via sphinx
setuptools==68.2.2
# via babel
sniffio==1.3.0
# via
# anyio
# trio
snowballstemmer==2.2.0
# via sphinx
sortedcontainers==2.4.0
# via trio
sphinx==7.2.6
# via
# anyio
# sphinx-autodoc-typehints
# sphinxcontrib-applehelp
# sphinxcontrib-devhelp
# sphinxcontrib-htmlhelp
# sphinxcontrib-qthelp
# sphinxcontrib-serializinghtml
sphinx-autodoc-typehints==1.25.2
# via anyio
sphinxcontrib-applehelp==1.0.7
# via sphinx
sphinxcontrib-devhelp==1.0.5
# via sphinx
sphinxcontrib-htmlhelp==2.0.4
# via sphinx
sphinxcontrib-jsmath==1.0.1
# via sphinx
sphinxcontrib-qthelp==1.0.6
# via sphinx
sphinxcontrib-serializinghtml==1.1.9
# via sphinx
trio==0.23.1
# via anyio
urllib3==2.1.0
# via requests
----- stderr -----
Resolved 29 packages in [TIME]
"###
);
Ok(())
}
/// Resolve a package from a `requirements.in` file, with a `constraints.txt` file pinning one of
/// its transitive dependencies to a specific version.
#[test]
fn compile_constraints_compatible_version() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("virtualenv")?;
let constraints_txt = context.temp_dir.child("constraints.txt");
constraints_txt.write_str("filelock==3.8.0")?;
uv_snapshot!(context.compile()
.arg("requirements.in")
.arg("--constraint")
.arg("constraints.txt"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2023-11-18T12:00:00Z requirements.in --constraint constraints.txt
distlib==0.3.7
# via virtualenv
filelock==3.8.0
# via virtualenv
platformdirs==3.11.0
# via virtualenv
virtualenv==20.21.1
----- stderr -----
Resolved 4 packages in [TIME]
"###
);
Ok(())
}
/// Resolve a package from a `requirements.in` file, with a `constraints.txt` file pinning one of
/// its direct dependencies to an incompatible version.
#[test]
fn compile_constraints_incompatible_version() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("filelock==1.0.0")?;
let constraints_txt = context.temp_dir.child("constraints.txt");
constraints_txt.write_str("filelock==3.8.0")?;
uv_snapshot!(context.compile()
.arg("requirements.in")
.arg("--constraint")
.arg("constraints.txt"), @r###"
success: false
exit_code: 1
----- stdout -----
----- stderr -----
× No solution found when resolving dependencies:
╰─▶ Because you require filelock==1.0.0 and you require filelock==3.8.0, we
can conclude that the requirements are unsatisfiable.
"###
);
Ok(())
}
/// Resolve a package from a `requirements.in` file, with a `constraints.txt` file pinning one of
/// its direct dependencies to an incompatible version.
#[test]
fn conflicting_url_markers() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("filelock==1.0.0")?;
let constraints_txt = context.temp_dir.child("constraints.txt");
constraints_txt.write_str("filelock==3.8.0")?;
uv_snapshot!(context.compile()
.arg("requirements.in")
.arg("--constraint")
.arg("constraints.txt"), @r###"
success: false
exit_code: 1
----- stdout -----
----- stderr -----
× No solution found when resolving dependencies:
╰─▶ Because you require filelock==1.0.0 and you require filelock==3.8.0, we
can conclude that the requirements are unsatisfiable.
"###
);
Ok(())
}
/// Override a regular package with an editable.
///
/// At present, this incorrectly resolves to the regular package.
#[test]
fn editable_override() -> Result<()> {
let context = TestContext::new("3.12");
// Add a non-editable requirement.
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("black")?;
// Add an editable override.
let overrides_txt = context.temp_dir.child("overrides.txt");
overrides_txt.write_str("-e file://../../scripts/editable-installs/black_editable")?;
uv_snapshot!(context.compile()
.arg("requirements.in")
.arg("--override")
.arg("overrides.txt"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2023-11-18T12:00:00Z requirements.in --override overrides.txt
black==23.11.0
click==8.1.7
# via black
mypy-extensions==1.0.0
# via black
packaging==23.2
# via black
pathspec==0.11.2
# via black
platformdirs==4.0.0
# via black
----- stderr -----
Resolved 6 packages in [TIME]
"###
);
Ok(())
}
/// Override an editable with a regular package.
///
/// At present, this incorrectly resolves to the editable.
#[test]
fn override_editable() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("-e ../../scripts/editable-installs/black_editable")?;
let overrides_txt = context.temp_dir.child("overrides.txt");
overrides_txt.write_str("black==23.10.1")?;
let requirements_path = regex::escape(&requirements_in.user_display().to_string());
let overrides_path = regex::escape(&overrides_txt.user_display().to_string());
let filters: Vec<_> = [
(requirements_path.as_str(), "requirements.in"),
(overrides_path.as_str(), "overrides.txt"),
]
.into_iter()
.chain(INSTA_FILTERS.to_vec())
.collect();
uv_snapshot!(filters, context.compile()
.arg(requirements_in.path())
.arg("--override")
.arg(overrides_txt.path())
.current_dir(current_dir()?), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2023-11-18T12:00:00Z requirements.in --override overrides.txt
-e ../../scripts/editable-installs/black_editable
----- stderr -----
Built 1 editable in [TIME]
Resolved 1 package in [TIME]
"###);
Ok(())
}
/// Resolve a package with both a constraint _and_ an override. The override and the constraint are
/// compatible, but resolve to exactly the same version.
#[test]
fn override_with_compatible_constraint() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("anyio")?;
let constraints_txt = context.temp_dir.child("constraints.txt");
constraints_txt.write_str("anyio<=3.0.0")?;
let overrides_txt = context.temp_dir.child("overrides.txt");
overrides_txt.write_str("anyio>=3.0.0")?;
uv_snapshot!(context.compile()
.arg("requirements.in")
.arg("--constraint")
.arg("constraints.txt")
.arg("--override")
.arg("overrides.txt"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2023-11-18T12:00:00Z requirements.in --constraint constraints.txt --override overrides.txt
anyio==3.0.0
idna==3.4
# via anyio
sniffio==1.3.0
# via anyio
----- stderr -----
Resolved 3 packages in [TIME]
"###
);
Ok(())
}
/// Resolve a package with both a constraint _and_ an override. The override and the constraint are
/// incompatible, and so should error. (The correctness of this behavior is subject to debate.)
#[test]
fn override_with_incompatible_constraint() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("anyio")?;
let constraints_txt = context.temp_dir.child("constraints.txt");
constraints_txt.write_str("anyio<3.0.0")?;
let overrides_txt = context.temp_dir.child("overrides.txt");
overrides_txt.write_str("anyio>=3.0.0")?;
uv_snapshot!(context.compile()
.arg("requirements.in")
.arg("--constraint")
.arg("constraints.txt")
.arg("--override")
.arg("overrides.txt"), @r###"
success: false
exit_code: 1
----- stdout -----
----- stderr -----
× No solution found when resolving dependencies:
╰─▶ Because you require anyio>=3.0.0 and you require anyio<3.0.0, we can
conclude that the requirements are unsatisfiable.
"###
);
Ok(())
}
/// Resolve a package, marking a dependency as unsafe.
#[test]
fn unsafe_package() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("flask")?;
uv_snapshot!(context.compile()
.arg("requirements.in")
.arg("--unsafe-package")
.arg("jinja2")
.arg("--unsafe-package")
.arg("pydantic"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2023-11-18T12:00:00Z requirements.in --unsafe-package jinja2 --unsafe-package pydantic
blinker==1.7.0
# via flask
click==8.1.7
# via flask
flask==3.0.0
itsdangerous==2.1.2
# via flask
markupsafe==2.1.3
# via
# jinja2
# werkzeug
werkzeug==3.0.1
# via flask
# The following packages were excluded from the output:
# jinja2
----- stderr -----
Resolved 7 packages in [TIME]
"###
);
Ok(())
}
/// Resolve a package with a strict upper bound, allowing pre-releases. Per PEP 440, pre-releases
/// that match the bound (e.g., `2.0.0rc1`) should be _not_ allowed.
#[test]
fn pre_release_upper_bound_exclude() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("flask<2.0.0")?;
uv_snapshot!(context.compile()
.arg("requirements.in")
.arg("--prerelease=allow"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2023-11-18T12:00:00Z requirements.in --prerelease=allow
click==7.1.2
# via flask
flask==1.1.4
itsdangerous==1.1.0
# via flask
jinja2==2.11.3
# via flask
markupsafe==2.1.3
# via jinja2
werkzeug==1.0.1
# via flask
----- stderr -----
Resolved 6 packages in [TIME]
"###
);
Ok(())
}
/// Resolve a package with a strict upper bound that includes a pre-release. Per PEP 440,
/// pre-releases _should_ be allowed.
#[test]
fn pre_release_upper_bound_include() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("flask<2.0.0rc4")?;
uv_snapshot!(context.compile()
.arg("requirements.in")
.arg("--prerelease=allow"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2023-11-18T12:00:00Z requirements.in --prerelease=allow
click==8.1.7
# via flask
flask==2.0.0rc2
itsdangerous==2.1.2
# via flask
jinja2==3.1.2
# via flask
markupsafe==2.1.3
# via
# jinja2
# werkzeug
werkzeug==3.0.1
# via flask
----- stderr -----
Resolved 6 packages in [TIME]
"###
);
Ok(())
}
/// Allow `--pre` as an alias for `--prerelease=allow`.
#[test]
fn pre_alias() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("flask<2.0.0")?;
uv_snapshot!(context.compile()
.arg("requirements.in")
.arg("--pre"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2023-11-18T12:00:00Z requirements.in --pre
click==7.1.2
# via flask
flask==1.1.4
itsdangerous==1.1.0
# via flask
jinja2==2.11.3
# via flask
markupsafe==2.1.3
# via jinja2
werkzeug==1.0.1
# via flask
----- stderr -----
Resolved 6 packages in [TIME]
"###
);
Ok(())
}
/// Allow a pre-release for a version specifier in a constraint file.
#[test]
fn pre_release_constraint() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("flask")?;
let constraints_txt = context.temp_dir.child("constraints.txt");
constraints_txt.write_str("flask<=2.0.0rc2")?;
uv_snapshot!(context.compile()
.arg("requirements.in")
.arg("--constraint")
.arg("constraints.txt"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2023-11-18T12:00:00Z requirements.in --constraint constraints.txt
click==8.1.7
# via flask
flask==2.0.0rc2
itsdangerous==2.1.2
# via flask
jinja2==3.1.2
# via flask
markupsafe==2.1.3
# via
# jinja2
# werkzeug
werkzeug==3.0.1
# via flask
----- stderr -----
Resolved 6 packages in [TIME]
"###
);
Ok(())
}
/// Resolve from a `pyproject.toml` file with a recursive extra.
#[test]
fn compile_pyproject_toml_recursive_extra() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"
[project]
name = "my-project"
version = "0.0.1"
dependencies = [
"tomli",
]
[project.optional-dependencies]
test = [
"pep517",
"my-project[dev]"
]
dev = [
"my-project[test]",
]
"#,
)?;
uv_snapshot!(context.compile()
.arg("pyproject.toml")
.arg("--extra")
.arg("dev"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2023-11-18T12:00:00Z pyproject.toml --extra dev
pep517==0.13.1
tomli==2.0.1
----- stderr -----
Resolved 2 packages in [TIME]
"###
);
Ok(())
}
/// The dependencies of a local editable dependency should be considered "direct" dependencies.
#[test]
fn editable_direct_dependency() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("-e ../../scripts/editable-installs/setuptools_editable")?;
let requirements_path = regex::escape(&requirements_in.user_display().to_string());
let filters: Vec<_> = [
(r" file://.*/", " file://[TEMP_DIR]/"),
(requirements_path.as_str(), "requirements.in"),
]
.into_iter()
.chain(INSTA_FILTERS.to_vec())
.collect();
uv_snapshot!(filters, context.compile()
.arg(requirements_in.path())
.arg("--resolution")
.arg("lowest-direct")
.current_dir(current_dir()?), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2023-11-18T12:00:00Z requirements.in --resolution lowest-direct
-e ../../scripts/editable-installs/setuptools_editable
iniconfig==0.1
# via setuptools-editable
----- stderr -----
Built 1 editable in [TIME]
Resolved 2 packages in [TIME]
"###);
Ok(())
}
/// Setting `UV_INDEX_URL` to the empty string should treat it as "unset".
#[test]
fn empty_index_url_env_var() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("anyio")?;
uv_snapshot!(context.compile()
.arg("requirements.in")
.arg("--emit-index-url")
.env("UV_INDEX_URL", ""), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2023-11-18T12:00:00Z requirements.in --emit-index-url
--index-url https://pypi.org/simple
anyio==4.0.0
idna==3.4
# via anyio
sniffio==1.3.0
# via anyio
----- stderr -----
Resolved 3 packages in [TIME]
"###
);
Ok(())
}
/// Setting `EXTRA_UV_INDEX_URL` to the empty string should treat it as "unset".
#[test]
fn empty_extra_index_url_env_var() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("anyio")?;
uv_snapshot!(context.compile()
.arg("requirements.in")
.arg("--emit-index-url")
.env("EXTRA_UV_INDEX_URL", ""), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2023-11-18T12:00:00Z requirements.in --emit-index-url
--index-url https://pypi.org/simple
anyio==4.0.0
idna==3.4
# via anyio
sniffio==1.3.0
# via anyio
----- stderr -----
Resolved 3 packages in [TIME]
"###
);
Ok(())
}
/// Setting `UV_INDEX_URL` to the empty string should treat it as "unset", and so should be
/// overridden by an `--index-url` in a requirements file.
#[test]
fn empty_index_url_env_var_override() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("--index-url https://test.pypi.org/simple\nidna")?;
uv_snapshot!(context.compile()
.arg("requirements.in")
.arg("--emit-index-url")
.env("UV_INDEX_URL", ""), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2023-11-18T12:00:00Z requirements.in --emit-index-url
--index-url https://test.pypi.org/simple
idna==2.7
----- stderr -----
Resolved 1 package in [TIME]
"###
);
Ok(())
}
/// The `UV_INDEX_URL` should override an `--index-url` in a requirements file.
#[test]
fn index_url_env_var_override() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("--index-url https://pypi.org/simple\nidna")?;
uv_snapshot!(context.compile()
.arg("requirements.in")
.arg("--emit-index-url")
.env("UV_INDEX_URL", "https://test.pypi.org/simple"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2023-11-18T12:00:00Z requirements.in --emit-index-url
--index-url https://test.pypi.org/simple
idna==2.7
----- stderr -----
Resolved 1 package in [TIME]
"###
);
Ok(())
}
/// Expand an environment variable in a `-r` path within a `requirements.in` file.
#[test]
fn expand_env_var_requirements_txt() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("-r ${PROJECT_ROOT}/requirements-dev.in")?;
let requirements_dev_in = context.temp_dir.child("requirements-dev.in");
requirements_dev_in.write_str("anyio")?;
uv_snapshot!(context.compile()
.arg("requirements.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2023-11-18T12:00:00Z requirements.in
anyio==4.0.0
idna==3.4
# via anyio
sniffio==1.3.0
# via anyio
----- stderr -----
Resolved 3 packages in [TIME]
"###
);
Ok(())
}
/// Raise an error when an editable's `Requires-Python` constraint is not met.
#[test]
fn requires_python_editable() -> Result<()> {
let context = TestContext::new("3.12");
// Create an editable package with a `Requires-Python` constraint that is not met.
let editable_dir = TempDir::new()?;
let pyproject_toml = editable_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"[project]
name = "example"
version = "0.0.0"
dependencies = [
"anyio==4.0.0"
]
requires-python = "<=3.8"
"#,
)?;
// Write to a requirements file.
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str(&format!("-e {}", editable_dir.path().display()))?;
uv_snapshot!(context.compile()
.arg("requirements.in"), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: Editable `example` requires Python <=3.8, but resolution targets Python 3.12.1
"###
);
Ok(())
}
/// Raise an error when an editable's `Requires-Python` constraint is not met.
#[test]
fn requires_python_editable_target_version() -> Result<()> {
let context = TestContext::new("3.12");
// Create an editable package with a `Requires-Python` constraint that is not met.
let editable_dir = TempDir::new()?;
let pyproject_toml = editable_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"[project]
name = "example"
version = "0.0.0"
dependencies = [
"anyio==4.0.0"
]
requires-python = "<=3.8"
"#,
)?;
// Write to a requirements file.
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str(&format!("-e {}", editable_dir.path().display()))?;
let filters: Vec<_> = [
// 3.11 may not be installed
(
"warning: The requested Python version 3.11 is not available; .* will be used to build dependencies instead.\n",
"",
),
]
.into_iter()
.chain(INSTA_FILTERS.to_vec())
.collect();
uv_snapshot!(filters, context.compile()
.arg("requirements.in")
.arg("--python-version=3.11"), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: Editable `example` requires Python <=3.8, but resolution targets Python 3.11
"###
);
Ok(())
}
#[test]
fn editable_optional_url() -> Result<()> {
let context = TestContext::new("3.12");
// Create an editable package with an optional URL dependency.
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"[project]
name = "example"
version = "0.0.0"
dependencies = []
requires-python = '>=3.8'
[project.optional-dependencies]
dev = [
"anyio @ https://files.pythonhosted.org/packages/bf/cd/d6d9bb1dadf73e7af02d18225cbd2c93f8552e13130484f1c8dcfece292b/anyio-4.2.0-py3-none-any.whl"
]
"#,
)?;
// Write to a requirements file.
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("-e .[dev]")?;
uv_snapshot!(context.compile()
.arg("requirements.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2023-11-18T12:00:00Z requirements.in
-e .
anyio @ https://files.pythonhosted.org/packages/bf/cd/d6d9bb1dadf73e7af02d18225cbd2c93f8552e13130484f1c8dcfece292b/anyio-4.2.0-py3-none-any.whl
# via example
idna==3.4
# via anyio
sniffio==1.3.0
# via anyio
----- stderr -----
Built 1 editable in [TIME]
Resolved 4 packages in [TIME]
"###
);
Ok(())
}
/// Under `--resolution=lowest-direct`, ignore optional dependencies.
///
/// In the below example, ensure that `setuptools` does not resolve to the lowest-available version.
#[test]
fn editable_optional_lowest_direct() -> Result<()> {
let context = TestContext::new("3.12");
// Create an editable package with an optional URL dependency.
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"[project]
name = "example"
version = "0.0.0"
dependencies = ["setuptools-scm>=8.0.0"]
requires-python = '>=3.8'
[project.optional-dependencies]
dev = ["setuptools"]
"#,
)?;
// Write to a requirements file.
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("-e .")?;
uv_snapshot!(context.compile()
.arg("requirements.in")
.arg("--resolution=lowest-direct"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2023-11-18T12:00:00Z requirements.in --resolution=lowest-direct
-e .
packaging==23.2
# via setuptools-scm
setuptools==68.2.2
# via setuptools-scm
setuptools-scm==8.0.1
# via example
----- stderr -----
Built 1 editable in [TIME]
Resolved 4 packages in [TIME]
"###
);
Ok(())
}
/// Resolve a source distribution that leverages Metadata 2.2.
#[test]
fn metadata_2_2() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("pyo3-mixed @ https://files.pythonhosted.org/packages/2b/b8/e04b783d3569d5b61b1dcdfda683ac2e3617340539aecd0f099fbade0b4a/pyo3_mixed-2.1.5.tar.gz")?;
uv_snapshot!(context.compile()
.arg("requirements.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2023-11-18T12:00:00Z requirements.in
boltons==23.1.1
# via pyo3-mixed
pyo3-mixed @ https://files.pythonhosted.org/packages/2b/b8/e04b783d3569d5b61b1dcdfda683ac2e3617340539aecd0f099fbade0b4a/pyo3_mixed-2.1.5.tar.gz
----- stderr -----
Resolved 2 packages in [TIME]
"###
);
Ok(())
}
/// Resolve packages from an index that "doesn't support" zip file streaming (by way of using
/// data descriptors).
#[test]
fn no_stream() -> Result<()> {
let context = TestContext::new("3.12");
// Write to a requirements file.
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in
.write_str("hashb_foxglove_protocolbuffers_python==25.3.0.1.20240226043130+465630478360")?;
let constraints_in = context.temp_dir.child("constraints.in");
constraints_in.write_str("protobuf<=5.26.0")?;
uv_snapshot!(Command::new(get_bin())
.arg("pip")
.arg("compile")
.arg("requirements.in")
.arg("-c")
.arg("constraints.in")
.arg("--extra-index-url")
.arg("https://buf.build/gen/python")
.arg("--cache-dir")
.arg(context.cache_dir.path())
.env("VIRTUAL_ENV", context.venv.as_os_str())
.current_dir(&context.temp_dir), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile requirements.in -c constraints.in --cache-dir [CACHE_DIR]
hashb-foxglove-protocolbuffers-python==25.3.0.1.20240226043130+465630478360
protobuf==5.26.0
# via hashb-foxglove-protocolbuffers-python
----- stderr -----
Resolved 2 packages in [TIME]
"###
);
Ok(())
}
/// Raise an error when a direct URL dependency's `Requires-Python` constraint is not met.
#[test]
fn requires_python_direct_url() -> Result<()> {
let context = TestContext::new("3.12");
// Create an editable package with a `Requires-Python` constraint that is not met.
let editable_dir = TempDir::new()?;
let pyproject_toml = editable_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"[project]
name = "example"
version = "0.0.0"
dependencies = [
"anyio==4.0.0"
]
requires-python = "<=3.8"
"#,
)?;
// Write to a requirements file.
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str(&format!("example @ {}", editable_dir.path().display()))?;
uv_snapshot!(context.compile()
.arg("requirements.in"), @r###"
success: false
exit_code: 1
----- stdout -----
----- stderr -----
× No solution found when resolving dependencies:
╰─▶ Because the current Python version (3.12.1) does not satisfy Python<=3.8
and example==0.0.0 depends on Python<=3.8, we can conclude that
example==0.0.0 cannot be used.
And because only example==0.0.0 is available and you require example, we
can conclude that the requirements are unsatisfiable.
"###
);
Ok(())
}
/// Build an editable package with Hatchling's {root:uri} feature.
#[test]
fn compile_root_uri() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("-e ${ROOT_PATH}")?;
// In addition to the standard filters, remove the temporary directory from the snapshot.
let filters: Vec<_> = [(r"file://.*/", "file://[TEMP_DIR]/")]
.into_iter()
.chain(INSTA_FILTERS.to_vec())
.collect();
let root_path = current_dir()?.join("../../scripts/editable-installs/root_editable");
uv_snapshot!(filters, context.compile()
.arg("requirements.in")
.env("ROOT_PATH", root_path.as_os_str()), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2023-11-18T12:00:00Z requirements.in
-e ${ROOT_PATH}
black @ file://[TEMP_DIR]/black_editable
# via root-editable
----- stderr -----
Built 1 editable in [TIME]
Resolved 2 packages in [TIME]
"###
);
Ok(())
}
/// Request a local wheel with a mismatched package name.
#[test]
fn requirement_wheel_name_mismatch() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("dateutil @ https://files.pythonhosted.org/packages/ec/57/56b9bcc3c9c6a792fcbaf139543cee77261f3651ca9da0c93f5c1221264b/python_dateutil-2.9.0.post0-py2.py3-none-any.whl")?;
uv_snapshot!(context.compile()
.arg("requirements.in"), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: Requested package name `dateutil` does not match `python-dateutil` in the distribution filename: https://files.pythonhosted.org/packages/ec/57/56b9bcc3c9c6a792fcbaf139543cee77261f3651ca9da0c93f5c1221264b/python_dateutil-2.9.0.post0-py2.py3-none-any.whl
"###
);
Ok(())
}
/// `--generate-hashes` should not update the hashes in the "lockfile" if the package is not
/// upgraded.
#[test]
fn preserve_hashes_no_upgrade() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("markupsafe")?;
// Write a subset of the hashes to the "lockfile".
let requirements_txt = context.temp_dir.child("requirements.txt");
requirements_txt.write_str(indoc! {r"
# This file was autogenerated by uv via the following command:
# uv pip compile requirements.in --python-version 3.12 --cache-dir [CACHE_DIR]
markupsafe==2.1.2 \
--hash=sha256:0576fe974b40a400449768941d5d0858cc624e3249dfd1e0c33674e5c7ca7aed \
--hash=sha256:085fd3201e7b12809f9e6e9bc1e5c96a368c8523fad5afb02afe3c051ae4afcc \
--hash=sha256:090376d812fb6ac5f171e5938e82e7f2d7adc2b629101cec0db8b267815c85e2
"})?;
// Avoid adding any additional hashes to the "lockfile".
uv_snapshot!(context.compile()
.arg("requirements.in")
.arg("--output-file")
.arg("requirements.txt")
.arg("--generate-hashes"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2023-11-18T12:00:00Z requirements.in --output-file requirements.txt --generate-hashes
markupsafe==2.1.2 \
--hash=sha256:0576fe974b40a400449768941d5d0858cc624e3249dfd1e0c33674e5c7ca7aed \
--hash=sha256:085fd3201e7b12809f9e6e9bc1e5c96a368c8523fad5afb02afe3c051ae4afcc \
--hash=sha256:090376d812fb6ac5f171e5938e82e7f2d7adc2b629101cec0db8b267815c85e2
----- stderr -----
Resolved 1 package in [TIME]
"###
);
Ok(())
}
/// `--generate-hashes` should update the hashes in the "lockfile" if the package is upgraded via
/// `--upgrade`.
#[test]
fn preserve_hashes_upgrade() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("markupsafe==2.1.2")?;
// Write a subset of the hashes to the "lockfile".
let requirements_txt = context.temp_dir.child("requirements.txt");
requirements_txt.write_str(indoc! {r"
# This file was autogenerated by uv via the following command:
# uv pip compile requirements.in --python-version 3.12 --cache-dir [CACHE_DIR]
markupsafe==2.1.2 \
--hash=sha256:0576fe974b40a400449768941d5d0858cc624e3249dfd1e0c33674e5c7ca7aed \
--hash=sha256:085fd3201e7b12809f9e6e9bc1e5c96a368c8523fad5afb02afe3c051ae4afcc \
--hash=sha256:090376d812fb6ac5f171e5938e82e7f2d7adc2b629101cec0db8b267815c85e2
"})?;
// Requesting an upgrade should update the hashes, even if the version didn't change.
uv_snapshot!(context.compile()
.arg("requirements.in")
.arg("--output-file")
.arg("requirements.txt")
.arg("--generate-hashes")
.arg("--upgrade"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2023-11-18T12:00:00Z requirements.in --output-file requirements.txt --generate-hashes
markupsafe==2.1.2 \
--hash=sha256:0576fe974b40a400449768941d5d0858cc624e3249dfd1e0c33674e5c7ca7aed \
--hash=sha256:085fd3201e7b12809f9e6e9bc1e5c96a368c8523fad5afb02afe3c051ae4afcc \
--hash=sha256:090376d812fb6ac5f171e5938e82e7f2d7adc2b629101cec0db8b267815c85e2 \
--hash=sha256:0b462104ba25f1ac006fdab8b6a01ebbfbce9ed37fd37fd4acd70c67c973e460 \
--hash=sha256:137678c63c977754abe9086a3ec011e8fd985ab90631145dfb9294ad09c102a7 \
--hash=sha256:1bea30e9bf331f3fef67e0a3877b2288593c98a21ccb2cf29b74c581a4eb3af0 \
--hash=sha256:22152d00bf4a9c7c83960521fc558f55a1adbc0631fbb00a9471e097b19d72e1 \
--hash=sha256:22731d79ed2eb25059ae3df1dfc9cb1546691cc41f4e3130fe6bfbc3ecbbecfa \
--hash=sha256:2298c859cfc5463f1b64bd55cb3e602528db6fa0f3cfd568d3605c50678f8f03 \
--hash=sha256:28057e985dace2f478e042eaa15606c7efccb700797660629da387eb289b9323 \
--hash=sha256:2e7821bffe00aa6bd07a23913b7f4e01328c3d5cc0b40b36c0bd81d362faeb65 \
--hash=sha256:2ec4f2d48ae59bbb9d1f9d7efb9236ab81429a764dedca114f5fdabbc3788013 \
--hash=sha256:340bea174e9761308703ae988e982005aedf427de816d1afe98147668cc03036 \
--hash=sha256:40627dcf047dadb22cd25ea7ecfe9cbf3bbbad0482ee5920b582f3809c97654f \
--hash=sha256:40dfd3fefbef579ee058f139733ac336312663c6706d1163b82b3003fb1925c4 \
--hash=sha256:4cf06cdc1dda95223e9d2d3c58d3b178aa5dacb35ee7e3bbac10e4e1faacb419 \
--hash=sha256:50c42830a633fa0cf9e7d27664637532791bfc31c731a87b202d2d8ac40c3ea2 \
--hash=sha256:55f44b440d491028addb3b88f72207d71eeebfb7b5dbf0643f7c023ae1fba619 \
--hash=sha256:608e7073dfa9e38a85d38474c082d4281f4ce276ac0010224eaba11e929dd53a \
--hash=sha256:63ba06c9941e46fa389d389644e2d8225e0e3e5ebcc4ff1ea8506dce646f8c8a \
--hash=sha256:65608c35bfb8a76763f37036547f7adfd09270fbdbf96608be2bead319728fcd \
--hash=sha256:665a36ae6f8f20a4676b53224e33d456a6f5a72657d9c83c2aa00765072f31f7 \
--hash=sha256:6d6607f98fcf17e534162f0709aaad3ab7a96032723d8ac8750ffe17ae5a0666 \
--hash=sha256:7313ce6a199651c4ed9d7e4cfb4aa56fe923b1adf9af3b420ee14e6d9a73df65 \
--hash=sha256:7668b52e102d0ed87cb082380a7e2e1e78737ddecdde129acadb0eccc5423859 \
--hash=sha256:7df70907e00c970c60b9ef2938d894a9381f38e6b9db73c5be35e59d92e06625 \
--hash=sha256:7e007132af78ea9df29495dbf7b5824cb71648d7133cf7848a2a5dd00d36f9ff \
--hash=sha256:835fb5e38fd89328e9c81067fd642b3593c33e1e17e2fdbf77f5676abb14a156 \
--hash=sha256:8bca7e26c1dd751236cfb0c6c72d4ad61d986e9a41bbf76cb445f69488b2a2bd \
--hash=sha256:8db032bf0ce9022a8e41a22598eefc802314e81b879ae093f36ce9ddf39ab1ba \
--hash=sha256:99625a92da8229df6d44335e6fcc558a5037dd0a760e11d84be2260e6f37002f \
--hash=sha256:9cad97ab29dfc3f0249b483412c85c8ef4766d96cdf9dcf5a1e3caa3f3661cf1 \
--hash=sha256:a4abaec6ca3ad8660690236d11bfe28dfd707778e2442b45addd2f086d6ef094 \
--hash=sha256:a6e40afa7f45939ca356f348c8e23048e02cb109ced1eb8420961b2f40fb373a \
--hash=sha256:a6f2fcca746e8d5910e18782f976489939d54a91f9411c32051b4aab2bd7c513 \
--hash=sha256:a806db027852538d2ad7555b203300173dd1b77ba116de92da9afbc3a3be3eed \
--hash=sha256:abcabc8c2b26036d62d4c746381a6f7cf60aafcc653198ad678306986b09450d \
--hash=sha256:b8526c6d437855442cdd3d87eede9c425c4445ea011ca38d937db299382e6fa3 \
--hash=sha256:bb06feb762bade6bf3c8b844462274db0c76acc95c52abe8dbed28ae3d44a147 \
--hash=sha256:c0a33bc9f02c2b17c3ea382f91b4db0e6cde90b63b296422a939886a7a80de1c \
--hash=sha256:c4a549890a45f57f1ebf99c067a4ad0cb423a05544accaf2b065246827ed9603 \
--hash=sha256:ca244fa73f50a800cf8c3ebf7fd93149ec37f5cb9596aa8873ae2c1d23498601 \
--hash=sha256:cf877ab4ed6e302ec1d04952ca358b381a882fbd9d1b07cccbfd61783561f98a \
--hash=sha256:d9d971ec1e79906046aa3ca266de79eac42f1dbf3612a05dc9368125952bd1a1 \
--hash=sha256:da25303d91526aac3672ee6d49a2f3db2d9502a4a60b55519feb1a4c7714e07d \
--hash=sha256:e55e40ff0cc8cc5c07996915ad367fa47da6b3fc091fdadca7f5403239c5fec3 \
--hash=sha256:f03a532d7dee1bed20bc4884194a16160a2de9ffc6354b3878ec9682bb623c54 \
--hash=sha256:f1cd098434e83e656abf198f103a8207a8187c0fc110306691a2e94a78d0abb2 \
--hash=sha256:f2bfb563d0211ce16b63c7cb9395d2c682a23187f54c3d79bfec33e6705473c6 \
--hash=sha256:f8ffb705ffcf5ddd0e80b65ddf7bed7ee4f5a441ea7d3419e861a12eaf41af58
----- stderr -----
Resolved 1 package in [TIME]
"###
);
Ok(())
}
/// `--generate-hashes` should update the hashes in the "lockfile" if the package is upgraded due
/// to a change in requirements.
#[test]
fn preserve_hashes_newer_version() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("markupsafe==2.1.3")?;
// Write a subset of the hashes to the "lockfile".
let requirements_txt = context.temp_dir.child("requirements.txt");
requirements_txt.write_str(indoc! {r"
# This file was autogenerated by uv via the following command:
# uv pip compile requirements.in --python-version 3.12 --cache-dir [CACHE_DIR]
markupsafe==2.1.2 \
--hash=sha256:0576fe974b40a400449768941d5d0858cc624e3249dfd1e0c33674e5c7ca7aed \
--hash=sha256:085fd3201e7b12809f9e6e9bc1e5c96a368c8523fad5afb02afe3c051ae4afcc \
--hash=sha256:090376d812fb6ac5f171e5938e82e7f2d7adc2b629101cec0db8b267815c85e2
"})?;
// Requesting a more specific version should update the hashes.
uv_snapshot!(context.compile()
.arg("requirements.in")
.arg("--output-file")
.arg("requirements.txt")
.arg("--generate-hashes"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2023-11-18T12:00:00Z requirements.in --output-file requirements.txt --generate-hashes
markupsafe==2.1.3 \
--hash=sha256:05fb21170423db021895e1ea1e1f3ab3adb85d1c2333cbc2310f2a26bc77272e \
--hash=sha256:0a4e4a1aff6c7ac4cd55792abf96c915634c2b97e3cc1c7129578aa68ebd754e \
--hash=sha256:10bbfe99883db80bdbaff2dcf681dfc6533a614f700da1287707e8a5d78a8431 \
--hash=sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686 \
--hash=sha256:14ff806850827afd6b07a5f32bd917fb7f45b046ba40c57abdb636674a8b559c \
--hash=sha256:1577735524cdad32f9f694208aa75e422adba74f1baee7551620e43a3141f559 \
--hash=sha256:1b40069d487e7edb2676d3fbdb2b0829ffa2cd63a2ec26c4938b2d34391b4ecc \
--hash=sha256:1b8dd8c3fd14349433c79fa8abeb573a55fc0fdd769133baac1f5e07abf54aeb \
--hash=sha256:1f67c7038d560d92149c060157d623c542173016c4babc0c1913cca0564b9939 \
--hash=sha256:282c2cb35b5b673bbcadb33a585408104df04f14b2d9b01d4c345a3b92861c2c \
--hash=sha256:2c1b19b3aaacc6e57b7e25710ff571c24d6c3613a45e905b1fde04d691b98ee0 \
--hash=sha256:2ef12179d3a291be237280175b542c07a36e7f60718296278d8593d21ca937d4 \
--hash=sha256:338ae27d6b8745585f87218a3f23f1512dbf52c26c28e322dbe54bcede54ccb9 \
--hash=sha256:3c0fae6c3be832a0a0473ac912810b2877c8cb9d76ca48de1ed31e1c68386575 \
--hash=sha256:3fd4abcb888d15a94f32b75d8fd18ee162ca0c064f35b11134be77050296d6ba \
--hash=sha256:42de32b22b6b804f42c5d98be4f7e5e977ecdd9ee9b660fda1a3edf03b11792d \
--hash=sha256:47d4f1c5f80fc62fdd7777d0d40a2e9dda0a05883ab11374334f6c4de38adffd \
--hash=sha256:504b320cd4b7eff6f968eddf81127112db685e81f7e36e75f9f84f0df46041c3 \
--hash=sha256:525808b8019e36eb524b8c68acdd63a37e75714eac50e988180b169d64480a00 \
--hash=sha256:56d9f2ecac662ca1611d183feb03a3fa4406469dafe241673d521dd5ae92a155 \
--hash=sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac \
--hash=sha256:65c1a9bcdadc6c28eecee2c119465aebff8f7a584dd719facdd9e825ec61ab52 \
--hash=sha256:68e78619a61ecf91e76aa3e6e8e33fc4894a2bebe93410754bd28fce0a8a4f9f \
--hash=sha256:69c0f17e9f5a7afdf2cc9fb2d1ce6aabdb3bafb7f38017c0b77862bcec2bbad8 \
--hash=sha256:6b2b56950d93e41f33b4223ead100ea0fe11f8e6ee5f641eb753ce4b77a7042b \
--hash=sha256:715d3562f79d540f251b99ebd6d8baa547118974341db04f5ad06d5ea3eb8007 \
--hash=sha256:787003c0ddb00500e49a10f2844fac87aa6ce977b90b0feaaf9de23c22508b24 \
--hash=sha256:7ef3cb2ebbf91e330e3bb937efada0edd9003683db6b57bb108c4001f37a02ea \
--hash=sha256:8023faf4e01efadfa183e863fefde0046de576c6f14659e8782065bcece22198 \
--hash=sha256:8758846a7e80910096950b67071243da3e5a20ed2546e6392603c096778d48e0 \
--hash=sha256:8afafd99945ead6e075b973fefa56379c5b5c53fd8937dad92c662da5d8fd5ee \
--hash=sha256:8c41976a29d078bb235fea9b2ecd3da465df42a562910f9022f1a03107bd02be \
--hash=sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2 \
--hash=sha256:8f9293864fe09b8149f0cc42ce56e3f0e54de883a9de90cd427f191c346eb2e1 \
--hash=sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707 \
--hash=sha256:962f82a3086483f5e5f64dbad880d31038b698494799b097bc59c2edf392fce6 \
--hash=sha256:9aad3c1755095ce347e26488214ef77e0485a3c34a50c5a5e2471dff60b9dd9c \
--hash=sha256:9dcdfd0eaf283af041973bff14a2e143b8bd64e069f4c383416ecd79a81aab58 \
--hash=sha256:aa57bd9cf8ae831a362185ee444e15a93ecb2e344c8e52e4d721ea3ab6ef1823 \
--hash=sha256:aa7bd130efab1c280bed0f45501b7c8795f9fdbeb02e965371bbef3523627779 \
--hash=sha256:ab4a0df41e7c16a1392727727e7998a467472d0ad65f3ad5e6e765015df08636 \
--hash=sha256:ad9e82fb8f09ade1c3e1b996a6337afac2b8b9e365f926f5a61aacc71adc5b3c \
--hash=sha256:af598ed32d6ae86f1b747b82783958b1a4ab8f617b06fe68795c7f026abbdcad \
--hash=sha256:b076b6226fb84157e3f7c971a47ff3a679d837cf338547532ab866c57930dbee \
--hash=sha256:b7ff0f54cb4ff66dd38bebd335a38e2c22c41a8ee45aa608efc890ac3e3931bc \
--hash=sha256:bfce63a9e7834b12b87c64d6b155fdd9b3b96191b6bd334bf37db7ff1fe457f2 \
--hash=sha256:c011a4149cfbcf9f03994ec2edffcb8b1dc2d2aede7ca243746df97a5d41ce48 \
--hash=sha256:c9c804664ebe8f83a211cace637506669e7890fec1b4195b505c214e50dd4eb7 \
--hash=sha256:ca379055a47383d02a5400cb0d110cef0a776fc644cda797db0c5696cfd7e18e \
--hash=sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b \
--hash=sha256:cd0f502fe016460680cd20aaa5a76d241d6f35a1c3350c474bac1273803893fa \
--hash=sha256:ceb01949af7121f9fc39f7d27f91be8546f3fb112c608bc4029aef0bab86a2a5 \
--hash=sha256:d080e0a5eb2529460b30190fcfcc4199bd7f827663f858a226a81bc27beaa97e \
--hash=sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb \
--hash=sha256:df0be2b576a7abbf737b1575f048c23fb1d769f267ec4358296f31c2479db8f9 \
--hash=sha256:e09031c87a1e51556fdcb46e5bd4f59dfb743061cf93c4d6831bf894f125eb57 \
--hash=sha256:e4dd52d80b8c83fdce44e12478ad2e85c64ea965e75d66dbeafb0a3e77308fcc \
--hash=sha256:f698de3fd0c4e6972b92290a45bd9b1536bffe8c6759c62471efaa8acb4c37bc \
--hash=sha256:fec21693218efe39aa7f8599346e90c705afa52c5b31ae019b2e57e8f6542bb2 \
--hash=sha256:ffcc3f7c66b5f5b7931a5aa68fc9cecc51e685ef90282f4a82f0f5e9b704ad11
----- stderr -----
Resolved 1 package in [TIME]
"###
);
Ok(())
}
/// Detect the package name from metadata sources from local directories.
#[test]
fn detect_package_name() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str(indoc! {r"
../../scripts/editable-installs/poetry_editable
../../scripts/editable-installs/black_editable
../../scripts/editable-installs/setup_py_editable
../../scripts/editable-installs/setup_cfg_editable
"
})?;
let filter_path = regex::escape(&requirements_in.user_display().to_string());
let filters: Vec<_> = [(filter_path.as_str(), "requirements.in")]
.into_iter()
.chain(INSTA_FILTERS.to_vec())
.collect();
uv_snapshot!(filters, context.compile()
.arg(requirements_in.path())
.current_dir(current_dir()?), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2023-11-18T12:00:00Z requirements.in
anyio==4.0.0
# via
# httpx
# poetry-editable
black @ ../../scripts/editable-installs/black_editable
certifi==2023.11.17
# via
# httpcore
# httpx
# requests
charset-normalizer==3.3.2
# via requests
h11==0.14.0
# via httpcore
httpcore==1.0.2
# via httpx
httpx==0.25.1
# via setup-py-editable
idna==3.4
# via
# anyio
# httpx
# requests
poetry-editable @ ../../scripts/editable-installs/poetry_editable
requests==2.31.0
# via setup-cfg-editable
setup-cfg-editable @ ../../scripts/editable-installs/setup_cfg_editable
setup-py-editable @ ../../scripts/editable-installs/setup_py_editable
sniffio==1.3.0
# via
# anyio
# httpx
urllib3==2.1.0
# via requests
----- stderr -----
Resolved 14 packages in [TIME]
"###);
Ok(())
}