Re-enable zlib-ng

This commit is contained in:
Charlie Marsh 2024-01-19 19:32:29 -05:00
parent 69d2791a43
commit 305855a8a4
28 changed files with 4 additions and 16313 deletions

View File

@ -33,315 +33,6 @@ env:
RUSTUP_MAX_RETRIES: 10
jobs:
sdist:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
with:
submodules: recursive
- uses: actions/setup-python@v5
with:
python-version: ${{ env.PYTHON_VERSION }}
- name: "Prep README.md"
run: echo "# Puffin" > README.md
- name: "Build sdist"
uses: PyO3/maturin-action@v1
with:
command: sdist
args: --out dist
- name: "Test sdist"
run: |
pip install dist/${{ env.PACKAGE_NAME }}-*.tar.gz --force-reinstall
${{ env.MODULE_NAME }} --help
python -m ${{ env.MODULE_NAME }} --help
- name: "Upload sdist"
uses: actions/upload-artifact@v3
with:
name: wheels
path: dist
macos-x86_64:
runs-on: macos-latest
steps:
- uses: actions/checkout@v4
with:
submodules: recursive
- uses: actions/setup-python@v5
with:
python-version: ${{ env.PYTHON_VERSION }}
architecture: x64
- name: "Prep README.md"
run: echo "# Puffin" > README.md
- name: "Build wheels - x86_64"
uses: PyO3/maturin-action@v1
with:
target: x86_64
args: --release --locked --out dist
- name: "Test wheel - x86_64"
run: |
pip install dist/${{ env.PACKAGE_NAME }}-*.whl --force-reinstall
${{ env.MODULE_NAME }} --help
python -m ${{ env.MODULE_NAME }} --help
- name: "Upload wheels"
uses: actions/upload-artifact@v3
with:
name: wheels
path: dist
- name: "Archive binary"
run: |
TARGET=x86_64-apple-darwin
ARCHIVE_NAME=puffin-$TARGET
ARCHIVE_FILE=$ARCHIVE_NAME.tar.gz
mkdir -p $ARCHIVE_NAME
cp target/$TARGET/release/puffin $ARCHIVE_NAME/puffin
tar czvf $ARCHIVE_FILE $ARCHIVE_NAME
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
- name: "Upload binary"
uses: actions/upload-artifact@v3
with:
name: artifacts
path: |
*.tar.gz
*.sha256
macos-universal:
runs-on: macos-latest
steps:
- uses: actions/checkout@v4
with:
submodules: recursive
- uses: actions/setup-python@v5
with:
python-version: ${{ env.PYTHON_VERSION }}
architecture: x64
- name: "Prep README.md"
run: echo "# Puffin" > README.md
- name: "Build wheels - universal2"
uses: PyO3/maturin-action@v1
with:
args: --release --locked --target universal2-apple-darwin --out dist
- name: "Test wheel - universal2"
run: |
pip install dist/${{ env.PACKAGE_NAME }}-*universal2.whl --force-reinstall
${{ env.MODULE_NAME }} --help
python -m ${{ env.MODULE_NAME }} --help
- name: "Upload wheels"
uses: actions/upload-artifact@v3
with:
name: wheels
path: dist
- name: "Archive binary"
run: |
TARGET=aarch64-apple-darwin
ARCHIVE_NAME=puffin-$TARGET
ARCHIVE_FILE=$ARCHIVE_NAME.tar.gz
mkdir -p $ARCHIVE_NAME
cp target/$TARGET/release/puffin $ARCHIVE_NAME/puffin
tar czvf $ARCHIVE_FILE $ARCHIVE_NAME
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
- name: "Upload binary"
uses: actions/upload-artifact@v3
with:
name: artifacts
path: |
*.tar.gz
*.sha256
windows:
# TODO(charlie): Enable Windows builds.
if: false
runs-on: windows-latest
strategy:
matrix:
platform:
- target: x86_64-pc-windows-msvc
arch: x64
- target: i686-pc-windows-msvc
arch: x86
- target: aarch64-pc-windows-msvc
arch: x64
steps:
- uses: actions/checkout@v4
with:
submodules: recursive
- uses: actions/setup-python@v5
with:
python-version: ${{ env.PYTHON_VERSION }}
architecture: ${{ matrix.platform.arch }}
- name: "Prep README.md"
run: echo "# Puffin" > README.md
- name: "Build wheels"
uses: PyO3/maturin-action@v1
with:
target: ${{ matrix.platform.target }}
args: --release --locked --out dist
- name: "Test wheel"
if: ${{ !startsWith(matrix.platform.target, 'aarch64') }}
shell: bash
run: |
python -m pip install dist/${{ env.PACKAGE_NAME }}-*.whl --force-reinstall
${{ env.MODULE_NAME }} --help
python -m ${{ env.MODULE_NAME }} --help
- name: "Upload wheels"
uses: actions/upload-artifact@v3
with:
name: wheels
path: dist
- name: "Archive binary"
shell: bash
run: |
ARCHIVE_FILE=puffin-${{ matrix.platform.target }}.zip
7z a $ARCHIVE_FILE ./target/${{ matrix.platform.target }}/release/puffin.exe
sha256sum $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
- name: "Upload binary"
uses: actions/upload-artifact@v3
with:
name: artifacts
path: |
*.zip
*.sha256
linux:
runs-on: ubuntu-latest
strategy:
matrix:
target:
- x86_64-unknown-linux-gnu
- i686-unknown-linux-gnu
steps:
- uses: actions/checkout@v4
with:
submodules: recursive
- uses: actions/setup-python@v5
with:
python-version: ${{ env.PYTHON_VERSION }}
architecture: x64
- name: "Prep README.md"
run: echo "# Puffin" > README.md
- name: "Build wheels"
uses: PyO3/maturin-action@v1
with:
target: ${{ matrix.target }}
manylinux: auto
args: --release --locked --out dist
# See: https://github.com/sfackler/rust-openssl/issues/2036#issuecomment-1724324145
before-script-linux: |
# If we're running on rhel centos, install needed packages.
if command -v yum &> /dev/null; then
yum update -y && yum install -y perl-core openssl openssl-devel pkgconfig libatomic
# If we're running on i686 we need to symlink libatomic
# in order to build openssl with -latomic flag.
if [[ ! -d "/usr/lib64" ]]; then
ln -s /usr/lib/libatomic.so.1 /usr/lib/libatomic.so
fi
else
# If we're running on debian-based system.
apt update -y && apt-get install -y libssl-dev openssl pkg-config
fi
- name: "Test wheel"
if: ${{ startsWith(matrix.target, 'x86_64') }}
run: |
pip install dist/${{ env.PACKAGE_NAME }}-*.whl --force-reinstall
${{ env.MODULE_NAME }} --help
python -m ${{ env.MODULE_NAME }} --help
- name: "Upload wheels"
uses: actions/upload-artifact@v3
with:
name: wheels
path: dist
- name: "Archive binary"
shell: bash
run: |
set -euo pipefail
TARGET=${{ matrix.target }}
ARCHIVE_NAME=puffin-$TARGET
ARCHIVE_FILE=$ARCHIVE_NAME.tar.gz
mkdir -p $ARCHIVE_NAME
cp target/$TARGET/release/puffin $ARCHIVE_NAME/puffin
tar czvf $ARCHIVE_FILE $ARCHIVE_NAME
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
- name: "Upload binary"
uses: actions/upload-artifact@v3
with:
name: artifacts
path: |
*.tar.gz
*.sha256
linux-arm:
runs-on: ubuntu-latest
strategy:
matrix:
platform:
- target: aarch64-unknown-linux-gnu
arch: aarch64
# see https://github.com/astral-sh/ruff/issues/3791
# and https://github.com/gnzlbg/jemallocator/issues/170#issuecomment-1503228963
maturin_docker_options: -e JEMALLOC_SYS_WITH_LG_PAGE=16
- target: armv7-unknown-linux-gnueabihf
arch: armv7
steps:
- uses: actions/checkout@v4
with:
submodules: recursive
- uses: actions/setup-python@v5
with:
python-version: ${{ env.PYTHON_VERSION }}
- name: "Prep README.md"
run: echo "# Puffin" > README.md
- name: "Build wheels"
uses: PyO3/maturin-action@v1
with:
target: ${{ matrix.platform.target }}
manylinux: 2_28
docker-options: ${{ matrix.platform.maturin_docker_options }}
args: --release --locked --out dist
- uses: uraimo/run-on-arch-action@v2
if: matrix.platform.arch != 'ppc64'
name: Test wheel
with:
arch: ${{ matrix.platform.arch }}
distro: ubuntu20.04
githubToken: ${{ github.token }}
install: |
apt-get update
apt-get install -y --no-install-recommends python3 python3-pip
pip3 install -U pip
run: |
pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links dist/ --force-reinstall
${{ env.MODULE_NAME }} --help
- name: "Upload wheels"
uses: actions/upload-artifact@v3
with:
name: wheels
path: dist
- name: "Archive binary"
shell: bash
run: |
set -euo pipefail
TARGET=${{ matrix.platform.target }}
ARCHIVE_NAME=puffin-$TARGET
ARCHIVE_FILE=$ARCHIVE_NAME.tar.gz
mkdir -p $ARCHIVE_NAME
cp target/$TARGET/release/puffin $ARCHIVE_NAME/puffin
tar czvf $ARCHIVE_FILE $ARCHIVE_NAME
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
- name: "Upload binary"
uses: actions/upload-artifact@v3
with:
name: artifacts
path: |
*.tar.gz
*.sha256
# Like `linux-arm`, but use `--no-default-features --features flate2-rust_backend` when
# building Puffin.
linux-s390x:
@ -367,7 +58,7 @@ jobs:
target: ${{ matrix.platform.target }}
manylinux: auto
docker-options: ${{ matrix.platform.maturin_docker_options }}
args: --release --locked --out dist --no-default-features --features flate2-rust_backend
args: --release --locked --out dist
- uses: uraimo/run-on-arch-action@v2
if: matrix.platform.arch != 'ppc64'
name: Test wheel
@ -436,7 +127,7 @@ jobs:
target: ${{ matrix.platform.target }}
manylinux: auto
docker-options: ${{ matrix.platform.maturin_docker_options }}
args: --release --locked --out dist --no-default-features --features flate2-rust_backend
args: --release --locked --out dist
before-script-linux: |
if command -v yum &> /dev/null; then
yum update -y
@ -483,128 +174,3 @@ jobs:
path: |
*.tar.gz
*.sha256
musllinux:
runs-on: ubuntu-latest
strategy:
matrix:
target:
- x86_64-unknown-linux-musl
- i686-unknown-linux-musl
steps:
- uses: actions/checkout@v4
with:
submodules: recursive
- uses: actions/setup-python@v5
with:
python-version: ${{ env.PYTHON_VERSION }}
architecture: x64
- name: "Prep README.md"
run: echo "# Puffin" > README.md
- name: "Build wheels"
uses: PyO3/maturin-action@v1
with:
target: ${{ matrix.target }}
manylinux: musllinux_1_2
args: --release --locked --out dist
- name: "Test wheel"
if: matrix.target == 'x86_64-unknown-linux-musl'
uses: addnab/docker-run-action@v3
with:
image: alpine:latest
options: -v ${{ github.workspace }}:/io -w /io
run: |
apk add python3
python -m venv .venv
.venv/bin/pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links dist/ --force-reinstall
.venv/bin/${{ env.MODULE_NAME }} --help
- name: "Upload wheels"
uses: actions/upload-artifact@v3
with:
name: wheels
path: dist
- name: "Archive binary"
shell: bash
run: |
set -euo pipefail
TARGET=${{ matrix.target }}
ARCHIVE_NAME=puffin-$TARGET
ARCHIVE_FILE=$ARCHIVE_NAME.tar.gz
mkdir -p $ARCHIVE_NAME
cp target/$TARGET/release/puffin $ARCHIVE_NAME/puffin
tar czvf $ARCHIVE_FILE $ARCHIVE_NAME
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
- name: "Upload binary"
uses: actions/upload-artifact@v3
with:
name: artifacts
path: |
*.tar.gz
*.sha256
musllinux-cross:
runs-on: ubuntu-latest
strategy:
matrix:
platform:
- target: aarch64-unknown-linux-musl
arch: aarch64
maturin_docker_options: -e JEMALLOC_SYS_WITH_LG_PAGE=16
- target: armv7-unknown-linux-musleabihf
arch: armv7
steps:
- uses: actions/checkout@v4
with:
submodules: recursive
- uses: actions/setup-python@v5
with:
python-version: ${{ env.PYTHON_VERSION }}
- name: "Prep README.md"
run: echo "# Puffin" > README.md
- name: "Build wheels"
uses: PyO3/maturin-action@v1
with:
target: ${{ matrix.platform.target }}
manylinux: musllinux_1_2
args: --release --locked --out dist
docker-options: ${{ matrix.platform.maturin_docker_options }}
- uses: uraimo/run-on-arch-action@v2
name: Test wheel
with:
arch: ${{ matrix.platform.arch }}
distro: alpine_latest
githubToken: ${{ github.token }}
install: |
apk add python3
run: |
python -m venv .venv
.venv/bin/pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links dist/ --force-reinstall
.venv/bin/${{ env.MODULE_NAME }} --help
- name: "Upload wheels"
uses: actions/upload-artifact@v3
with:
name: wheels
path: dist
- name: "Archive binary"
shell: bash
run: |
set -euo pipefail
TARGET=${{ matrix.platform.target }}
ARCHIVE_NAME=puffin-$TARGET
ARCHIVE_FILE=$ARCHIVE_NAME.tar.gz
mkdir -p $ARCHIVE_NAME
cp target/$TARGET/release/puffin $ARCHIVE_NAME/puffin
tar czvf $ARCHIVE_FILE $ARCHIVE_NAME
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
- name: "Upload binary"
uses: actions/upload-artifact@v3
with:
name: artifacts
path: |
*.tar.gz
*.sha256

View File

@ -1,78 +0,0 @@
name: CI
on:
push:
branches: [main]
pull_request:
workflow_dispatch:
concurrency:
group: ${{ github.workflow }}-${{ github.ref_name }}-${{ github.event.pull_request.number || github.sha }}
cancel-in-progress: true
env:
CARGO_INCREMENTAL: 0
CARGO_NET_RETRY: 10
CARGO_TERM_COLOR: always
RUSTUP_MAX_RETRIES: 10
PYTHON_VERSION: "3.12"
jobs:
cargo-fmt:
name: "cargo fmt"
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: "Install Rust toolchain"
run: rustup component add rustfmt
- name: "rustfmt"
run: cargo fmt --all --check
cargo-clippy:
name: "cargo clippy"
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: "Install Rust toolchain"
run: |
rustup component add clippy
- uses: Swatinem/rust-cache@v2
with:
save-if: ${{ github.ref == 'refs/heads/main' }}
- name: "Clippy"
run: cargo clippy --workspace --all-targets --all-features --locked -- -D warnings
cargo-test:
strategy:
matrix:
os: [ubuntu-latest]
runs-on:
# We use the large GitHub actions runners for faster testing
# For Ubuntu and Windows, this requires Organization-level configuration
# See: https://docs.github.com/en/actions/using-github-hosted-runners/about-larger-runners/about-larger-runners#about-ubuntu-and-windows-larger-runners
labels: ${{ matrix.os }}-large
name: "cargo test | ${{ matrix.os }}"
steps:
- uses: actions/checkout@v4
- name: "Install Python"
uses: actions/setup-python@v4
with:
python-version: |
3.7
3.8
3.9
3.10
3.11
3.12
- name: "Install Rust toolchain"
run: rustup show
- uses: rui314/setup-mold@v1
- name: "Install cargo nextest"
uses: taiki-e/install-action@v2
with:
tool: cargo-nextest
- uses: Swatinem/rust-cache@v2
with:
save-if: ${{ github.ref == 'refs/heads/main' }}
- name: "Tests"
run: cargo nextest run --all --all-features --status-level skip --failure-output immediate-final --no-fail-fast -j 12

352
Cargo.lock generated
View File

@ -137,36 +137,6 @@ version = "0.7.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "96d30a06541fbafbc7f82ed10c06164cfbd2c401138f6addd8404629c4b16711"
[[package]]
name = "assert_cmd"
version = "2.0.13"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "00ad3f3a942eee60335ab4342358c161ee296829e0d16ff42fc1d6cb07815467"
dependencies = [
"anstyle",
"bstr",
"doc-comment",
"predicates",
"predicates-core",
"predicates-tree",
"wait-timeout",
]
[[package]]
name = "assert_fs"
version = "1.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2cd762e110c8ed629b11b6cde59458cc1c71de78ebbcc30099fc8e0403a2a2ec"
dependencies = [
"anstyle",
"doc-comment",
"globwalk",
"predicates",
"predicates-core",
"predicates-tree",
"tempfile",
]
[[package]]
name = "async-compression"
version = "0.4.6"
@ -247,15 +217,6 @@ dependencies = [
"rustc-demangle",
]
[[package]]
name = "backtrace-ext"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "537beee3be4a18fb023b570f80e3ae28003db9167a751266b259926e25539d50"
dependencies = [
"backtrace",
]
[[package]]
name = "base64"
version = "0.13.1"
@ -325,17 +286,6 @@ dependencies = [
"alloc-stdlib",
]
[[package]]
name = "bstr"
version = "1.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c48f0051a4b4c5e0b6d365cd04af53aeaa209e3cc15ec2cdb69e73cc87fbd0dc"
dependencies = [
"memchr",
"regex-automata 0.4.3",
"serde",
]
[[package]]
name = "bumpalo"
version = "3.14.0"
@ -763,12 +713,6 @@ dependencies = [
"syn 1.0.109",
]
[[package]]
name = "difflib"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6184e33543162437515c2e2b48714794e37845ec9851711914eec9d308f6ebe8"
[[package]]
name = "digest"
version = "0.10.7"
@ -839,12 +783,6 @@ dependencies = [
"url",
]
[[package]]
name = "doc-comment"
version = "0.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fea41bba32d969b513997752735605054bc0dfa92b4c56bf1189f2e174be7a10"
[[package]]
name = "either"
version = "1.9.0"
@ -917,15 +855,6 @@ dependencies = [
"miniz_oxide",
]
[[package]]
name = "float-cmp"
version = "0.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "98de4bbd547a563b716d8dfa9aad1cb19bfab00f4fa09a6a4ed21dbcf44ce9c4"
dependencies = [
"num-traits",
]
[[package]]
name = "fnv"
version = "1.0.7"
@ -1113,30 +1042,6 @@ version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d2fabcfbdc87f4758337ca535fb41a6d701b65693ce38287d856d1674551ec9b"
[[package]]
name = "globset"
version = "0.4.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "57da3b9b5b85bd66f31093f8c408b90a74431672542466497dcbdfdc02034be1"
dependencies = [
"aho-corasick",
"bstr",
"log",
"regex-automata 0.4.3",
"regex-syntax 0.8.2",
]
[[package]]
name = "globwalk"
version = "0.9.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0bf760ebf69878d9fd8f110c89703d90ce35095324d1f1edcb595c63945ee757"
dependencies = [
"bitflags 2.4.2",
"ignore",
"walkdir",
]
[[package]]
name = "goblin"
version = "0.8.0"
@ -1375,22 +1280,6 @@ dependencies = [
"unicode-normalization",
]
[[package]]
name = "ignore"
version = "0.4.22"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b46810df39e66e925525d6e38ce1e7f6e1d208f72dc39757880fcb66e2c58af1"
dependencies = [
"crossbeam-deque",
"globset",
"log",
"memchr",
"regex-automata 0.4.3",
"same-file",
"walkdir",
"winapi-util",
]
[[package]]
name = "indexmap"
version = "1.9.3"
@ -1441,23 +1330,10 @@ dependencies = [
"console",
"lazy_static",
"linked-hash-map",
"regex",
"serde",
"similar",
"yaml-rust",
]
[[package]]
name = "insta-cmd"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "809d3023d1d6e8d5c2206f199251f75cb26180e41f18cb0f22dd119161cb5127"
dependencies = [
"insta",
"serde",
"serde_json",
]
[[package]]
name = "install-wheel-rs"
version = "0.0.1"
@ -1526,12 +1402,6 @@ dependencies = [
"windows-sys 0.52.0",
]
[[package]]
name = "is_ci"
version = "1.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "616cde7c720bb2bb5824a224687d8f77bfd38922027f01d825cd7453be5099fb"
[[package]]
name = "itertools"
version = "0.10.5"
@ -1747,36 +1617,6 @@ dependencies = [
"autocfg",
]
[[package]]
name = "miette"
version = "5.10.0"
source = "git+https://github.com/zkat/miette.git?rev=b0744462adbbfbb6d845f382db36be883c7f3c45#b0744462adbbfbb6d845f382db36be883c7f3c45"
dependencies = [
"backtrace",
"backtrace-ext",
"is-terminal",
"miette-derive",
"once_cell",
"owo-colors",
"supports-color",
"supports-hyperlinks",
"supports-unicode",
"terminal_size",
"textwrap",
"thiserror",
"unicode-width",
]
[[package]]
name = "miette-derive"
version = "5.10.0"
source = "git+https://github.com/zkat/miette.git?rev=b0744462adbbfbb6d845f382db36be883c7f3c45#b0744462adbbfbb6d845f382db36be883c7f3c45"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.48",
]
[[package]]
name = "mimalloc"
version = "0.1.39"
@ -1831,12 +1671,6 @@ dependencies = [
"windows-sys 0.48.0",
]
[[package]]
name = "normalize-line-endings"
version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "61807f77802ff30975e01f4f071c8ba10c022052f98b3294119f3e615d13e5be"
[[package]]
name = "nu-ansi-term"
version = "0.46.0"
@ -1847,15 +1681,6 @@ dependencies = [
"winapi",
]
[[package]]
name = "nu-ansi-term"
version = "0.49.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c073d3c1930d0751774acf49e66653acecb416c3a54c6ec095a9b11caddb5a68"
dependencies = [
"windows-sys 0.48.0",
]
[[package]]
name = "num-traits"
version = "0.2.17"
@ -2192,36 +2017,6 @@ version = "0.2.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5b40af805b3121feab8a3c29f04d8ad262fa8e0561883e7653e024ae4479e6de"
[[package]]
name = "predicates"
version = "3.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "68b87bfd4605926cdfefc1c3b5f8fe560e3feca9d5552cf68c466d3d8236c7e8"
dependencies = [
"anstyle",
"difflib",
"float-cmp",
"normalize-line-endings",
"predicates-core",
"regex",
]
[[package]]
name = "predicates-core"
version = "1.0.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b794032607612e7abeb4db69adb4e33590fa6cf1149e95fd7cb00e634b92f174"
[[package]]
name = "predicates-tree"
version = "1.0.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "368ba315fb8c5052ab692e68a0eefec6ec57b23a36959c14496f0b0df2c0cecf"
dependencies = [
"predicates-core",
"termtree",
]
[[package]]
name = "priority-queue"
version = "1.3.2"
@ -2257,64 +2052,7 @@ dependencies = [
name = "puffin"
version = "0.0.3"
dependencies = [
"anstream",
"anyhow",
"assert_cmd",
"assert_fs",
"bitflags 2.4.2",
"chrono",
"clap",
"distribution-filename",
"distribution-types",
"flate2",
"fs-err",
"futures",
"gourgeist",
"indicatif",
"indoc",
"insta",
"insta-cmd",
"install-wheel-rs",
"itertools 0.12.0",
"miette",
"mimalloc",
"owo-colors",
"pep440_rs 0.3.12",
"pep508_rs",
"platform-host",
"platform-tags",
"predicates",
"pubgrub",
"puffin-build",
"puffin-cache",
"puffin-client",
"puffin-dispatch",
"puffin-distribution",
"puffin-installer",
"puffin-interpreter",
"puffin-normalize",
"puffin-resolver",
"puffin-traits",
"puffin-warnings",
"puffin-workspace",
"pypi-types",
"pyproject-toml",
"requirements-txt",
"reqwest",
"rustc-hash",
"tempfile",
"textwrap",
"thiserror",
"tikv-jemallocator",
"tokio",
"toml",
"tracing",
"tracing-durations-export",
"tracing-subscriber",
"tracing-tree",
"url",
"waitmap",
"which",
]
[[package]]
@ -3363,12 +3101,6 @@ version = "1.12.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2593d31f82ead8df961d8bd23a64c2ccf2eb5dd34b0a34bfb4dd54011c72009e"
[[package]]
name = "smawk"
version = "0.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b7c388c1b5e93756d0c740965c41e8822f866621d41acbdf6336a6a168f8840c"
[[package]]
name = "socket2"
version = "0.5.5"
@ -3391,34 +3123,6 @@ version = "0.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "73473c0e59e6d5812c5dfe2a064a6444949f089e20eec9a2e5506596494e4623"
[[package]]
name = "supports-color"
version = "2.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d6398cde53adc3c4557306a96ce67b302968513830a77a95b2b17305d9719a89"
dependencies = [
"is-terminal",
"is_ci",
]
[[package]]
name = "supports-hyperlinks"
version = "2.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f84231692eb0d4d41e4cdd0cabfdd2e6cd9e255e65f80c9aa7c98dd502b4233d"
dependencies = [
"is-terminal",
]
[[package]]
name = "supports-unicode"
version = "2.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4b6c2cb240ab5dd21ed4906895ee23fe5a48acdbd15a3ce388e7b62a9b66baf7"
dependencies = [
"is-terminal",
]
[[package]]
name = "svg"
version = "0.14.0"
@ -3507,22 +3211,6 @@ dependencies = [
"windows-sys 0.52.0",
]
[[package]]
name = "terminal_size"
version = "0.1.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "633c1a546cee861a1a6d0dc69ebeca693bf4296661ba7852b9d21d159e0506df"
dependencies = [
"libc",
"winapi",
]
[[package]]
name = "termtree"
version = "0.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3369f5ac52d5eb6ab48c6b4ffdc8efbcad6b89c765749064ba298f2c68a16a76"
[[package]]
name = "test-case"
version = "3.3.1"
@ -3565,17 +3253,6 @@ dependencies = [
"log",
]
[[package]]
name = "textwrap"
version = "0.15.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b7b3e525a49ec206798b40326a44121291b530c963cfb01018f63e135bac543d"
dependencies = [
"smawk",
"unicode-linebreak",
"unicode-width",
]
[[package]]
name = "thiserror"
version = "1.0.56"
@ -3882,7 +3559,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ad0f048c97dbd9faa9b7df56362b8ebcaa52adb06b498c050d2f4e32f90a7a8b"
dependencies = [
"matchers",
"nu-ansi-term 0.46.0",
"nu-ansi-term",
"once_cell",
"regex",
"sharded-slab",
@ -3893,18 +3570,6 @@ dependencies = [
"tracing-log",
]
[[package]]
name = "tracing-tree"
version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "65139ecd2c3f6484c3b99bc01c77afe21e95473630747c7aca525e78b0666675"
dependencies = [
"nu-ansi-term 0.49.0",
"tracing-core",
"tracing-log",
"tracing-subscriber",
]
[[package]]
name = "try-lock"
version = "0.2.5"
@ -3938,12 +3603,6 @@ version = "1.0.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b"
[[package]]
name = "unicode-linebreak"
version = "0.1.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3b09c83c3c29d37506a3e260c08c03743a6bb66a9cd432c6934ab501a190571f"
[[package]]
name = "unicode-normalization"
version = "0.1.22"
@ -4052,15 +3711,6 @@ dependencies = [
"quote",
]
[[package]]
name = "wait-timeout"
version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9f200f5b12eb75f8c1ed65abd4b2db8a6e1b138a20de009dacee265a2498f3f6"
dependencies = [
"libc",
]
[[package]]
name = "waitmap"
version = "1.1.0"

View File

@ -14,76 +14,12 @@ default-run = "puffin"
workspace = true
[dependencies]
distribution-filename = { path = "../distribution-filename" }
distribution-types = { path = "../distribution-types" }
gourgeist = { path = "../gourgeist" }
install-wheel-rs = { path = "../install-wheel-rs", default-features = false }
pep440_rs = { path = "../pep440-rs" }
pep508_rs = { path = "../pep508-rs" }
platform-host = { path = "../platform-host" }
platform-tags = { path = "../platform-tags" }
puffin-build = { path = "../puffin-build" }
puffin-cache = { path = "../puffin-cache", features = ["clap"] }
puffin-client = { path = "../puffin-client" }
puffin-dispatch = { path = "../puffin-dispatch" }
puffin-distribution = { path = "../puffin-distribution" }
puffin-installer = { path = "../puffin-installer" }
puffin-interpreter = { path = "../puffin-interpreter" }
puffin-normalize = { path = "../puffin-normalize" }
puffin-resolver = { path = "../puffin-resolver", features = ["clap"] }
puffin-traits = { path = "../puffin-traits" }
puffin-warnings = { path = "../puffin-warnings" }
puffin-workspace = { path = "../puffin-workspace" }
pypi-types = { path = "../pypi-types" }
requirements-txt = { path = "../requirements-txt" }
# This tells flate2 (and all libraries that depend on it, including async_compression
# and async_zip) to use zlib-ng, which about 2x faster than the default flate2 backend
# at decompression. See https://github.com/rust-lang/flate2-rs#backends
flate2 = { workspace = true, default-features = false }
anstream = { workspace = true }
anyhow = { workspace = true }
bitflags = { workspace = true }
chrono = { workspace = true }
clap = { workspace = true, features = ["derive"] }
fs-err = { workspace = true, features = ["tokio"] }
futures = { workspace = true }
indicatif = { workspace = true }
itertools = { workspace = true }
miette = { workspace = true, features = ["fancy"] }
owo-colors = { workspace = true }
pubgrub = { workspace = true }
pyproject-toml = { workspace = true }
rustc-hash = { workspace = true }
tempfile = { workspace = true }
textwrap = { workspace = true }
thiserror = { workspace = true }
tokio = { workspace = true }
toml = { workspace = true }
tracing = { workspace = true }
tracing-durations-export = { workspace = true, features = ["plot"], optional = true }
tracing-subscriber = { workspace = true }
tracing-tree = { workspace = true }
url = { workspace = true }
waitmap = { workspace = true }
which = { workspace = true }
[target.'cfg(target_os = "windows")'.dependencies]
mimalloc = "0.1.39"
[target.'cfg(all(not(target_os = "windows"), not(target_os = "openbsd"), any(target_arch = "x86_64", target_arch = "aarch64", target_arch = "powerpc64")))'.dependencies]
tikv-jemallocator = "0.5.4"
[dev-dependencies]
assert_cmd = { version = "2.0.12" }
assert_fs = { version = "1.1.0" }
indoc = { version = "2.0.4" }
insta-cmd = { version = "0.4.0" }
insta = { version = "1.34.0", features = ["filters"] }
predicates = { version = "3.0.4" }
reqwest = { version = "0.11.23", features = ["blocking", "rustls"], default-features = false }
[features]
default = ["flate2-zlib-ng"]
# Introduces a dependency on a local Python installation.

View File

@ -1,74 +0,0 @@
use std::path::PathBuf;
use anyhow::Result;
use miette::{Diagnostic, IntoDiagnostic};
use thiserror::Error;
use tracing::info;
use puffin_workspace::WorkspaceError;
use crate::commands::ExitStatus;
use crate::printer::Printer;
/// Add a dependency to the workspace.
#[allow(clippy::unnecessary_wraps)]
pub(crate) fn add(name: &str, _printer: Printer) -> Result<ExitStatus> {
match add_impl(name) {
Ok(status) => Ok(status),
Err(err) => {
#[allow(clippy::print_stderr)]
{
eprint!("{err:?}");
}
Ok(ExitStatus::Failure)
}
}
}
#[derive(Error, Debug, Diagnostic)]
enum AddError {
#[error(
"Could not find a `pyproject.toml` file in the current directory or any of its parents"
)]
#[diagnostic(code(puffin::add::workspace_not_found))]
WorkspaceNotFound,
#[error("Failed to parse requirement: `{0}`")]
#[diagnostic(code(puffin::add::invalid_requirement))]
InvalidRequirement(String, #[source] pep508_rs::Pep508Error),
#[error("Failed to parse `pyproject.toml` at: `{0}`")]
#[diagnostic(code(puffin::add::parse))]
ParseError(PathBuf, #[source] WorkspaceError),
#[error("Failed to write `pyproject.toml` to: `{0}`")]
#[diagnostic(code(puffin::add::write))]
WriteError(PathBuf, #[source] WorkspaceError),
}
fn add_impl(name: &str) -> miette::Result<ExitStatus> {
let requirement = puffin_workspace::VerbatimRequirement::try_from(name)
.map_err(|err| AddError::InvalidRequirement(name.to_string(), err))?;
// Locate the workspace.
let cwd = std::env::current_dir().into_diagnostic()?;
let Some(workspace_root) = puffin_workspace::find_pyproject_toml(cwd) else {
return Err(AddError::WorkspaceNotFound.into());
};
info!("Found workspace at: {}", workspace_root.display());
// Parse the manifest.
let mut manifest = puffin_workspace::Workspace::try_from(workspace_root.as_path())
.map_err(|err| AddError::ParseError(workspace_root.clone(), err))?;
// Add the dependency.
manifest.add_dependency(&requirement);
// Write the manifest back to disk.
manifest
.save(&workspace_root)
.map_err(|err| AddError::WriteError(workspace_root.clone(), err))?;
Ok(ExitStatus::Success)
}

View File

@ -1,52 +0,0 @@
use std::fmt::Write;
use anyhow::{Context, Result};
use fs_err as fs;
use owo_colors::OwoColorize;
use puffin_cache::Cache;
use puffin_normalize::PackageName;
use crate::commands::ExitStatus;
use crate::printer::Printer;
/// Clear the cache.
pub(crate) fn clean(
cache: &Cache,
packages: &[PackageName],
mut printer: Printer,
) -> Result<ExitStatus> {
if !cache.root().exists() {
writeln!(
printer,
"No cache found at: {}",
cache.root().display().cyan()
)?;
return Ok(ExitStatus::Success);
}
if packages.is_empty() {
writeln!(
printer,
"Clearing cache at: {}",
cache.root().display().cyan()
)?;
fs::remove_dir_all(cache.root())
.with_context(|| format!("Failed to clear cache at: {}", cache.root().display()))?;
} else {
for package in packages {
let count = cache.purge(package)?;
match count {
0 => writeln!(printer, "No entries found for package: {}", package.cyan())?,
1 => writeln!(printer, "Cleared 1 entry for package: {}", package.cyan())?,
count => writeln!(
printer,
"Cleared {count} entries for package: {}",
package.cyan()
)?,
}
}
}
Ok(ExitStatus::Success)
}

View File

@ -1,53 +0,0 @@
use std::fmt::Write;
use anyhow::Result;
use itertools::Itertools;
use owo_colors::OwoColorize;
use tracing::debug;
use distribution_types::Name;
use platform_host::Platform;
use puffin_cache::Cache;
use puffin_installer::SitePackages;
use puffin_interpreter::Virtualenv;
use crate::commands::ExitStatus;
use crate::printer::Printer;
/// Enumerate the installed packages in the current environment.
pub(crate) fn freeze(cache: &Cache, strict: bool, mut printer: Printer) -> Result<ExitStatus> {
// Detect the current Python interpreter.
let platform = Platform::current()?;
let python = Virtualenv::from_env(platform, cache)?;
debug!(
"Using Python interpreter: {}",
python.python_executable().display()
);
// Build the installed index.
let site_packages = SitePackages::from_executable(&python)?;
for dist in site_packages
.iter()
.sorted_unstable_by(|a, b| a.name().cmp(b.name()))
{
#[allow(clippy::print_stdout)]
{
println!("{dist}");
}
}
// Validate that the environment is consistent.
if strict {
for diagnostic in site_packages.diagnostics()? {
writeln!(
printer,
"{}{} {}",
"warning".yellow().bold(),
":".bold(),
diagnostic.message().bold()
)?;
}
}
Ok(ExitStatus::Success)
}

View File

@ -1,76 +0,0 @@
use std::process::ExitCode;
use std::time::Duration;
pub(crate) use add::add;
pub(crate) use clean::clean;
use distribution_types::InstalledMetadata;
pub(crate) use freeze::freeze;
pub(crate) use pip_compile::{extra_name_with_clap_error, pip_compile, Upgrade};
pub(crate) use pip_install::pip_install;
pub(crate) use pip_sync::pip_sync;
pub(crate) use pip_uninstall::pip_uninstall;
pub(crate) use remove::remove;
pub(crate) use venv::venv;
mod add;
mod clean;
mod freeze;
mod pip_compile;
mod pip_install;
mod pip_sync;
mod pip_uninstall;
mod remove;
mod reporters;
mod venv;
#[derive(Copy, Clone)]
pub(crate) enum ExitStatus {
/// The command succeeded.
#[allow(unused)]
Success,
/// The command failed due to an error in the user input.
#[allow(unused)]
Failure,
/// The command failed with an unexpected error.
#[allow(unused)]
Error,
}
impl From<ExitStatus> for ExitCode {
fn from(status: ExitStatus) -> Self {
match status {
ExitStatus::Success => ExitCode::from(0),
ExitStatus::Failure => ExitCode::from(1),
ExitStatus::Error => ExitCode::from(2),
}
}
}
/// Format a duration as a human-readable string, Cargo-style.
pub(super) fn elapsed(duration: Duration) -> String {
let secs = duration.as_secs();
if secs >= 60 {
format!("{}m {:02}s", secs / 60, secs % 60)
} else if secs > 0 {
format!("{}.{:02}s", secs, duration.subsec_nanos() / 10_000_000)
} else {
format!("{}ms", duration.subsec_millis())
}
}
#[derive(Debug, Copy, Clone, Eq, PartialEq, Ord, PartialOrd)]
pub(super) enum ChangeEventKind {
/// The package was removed from the environment.
Removed,
/// The package was added to the environment.
Added,
}
#[derive(Debug)]
pub(super) struct ChangeEvent<T: InstalledMetadata> {
dist: T,
kind: ChangeEventKind,
}

View File

@ -1,379 +0,0 @@
use std::borrow::Cow;
use std::env;
use std::fmt::Write;
use std::io::stdout;
use std::ops::Deref;
use std::path::Path;
use std::str::FromStr;
use anstream::AutoStream;
use anyhow::{anyhow, Context, Result};
use chrono::{DateTime, Utc};
use itertools::Itertools;
use owo_colors::OwoColorize;
use rustc_hash::FxHashSet;
use tempfile::tempdir_in;
use tracing::debug;
use distribution_types::{IndexLocations, LocalEditable};
use pep508_rs::Requirement;
use platform_host::Platform;
use platform_tags::Tags;
use puffin_cache::Cache;
use puffin_client::{FlatIndex, FlatIndexClient, RegistryClientBuilder};
use puffin_dispatch::BuildDispatch;
use puffin_installer::{Downloader, NoBinary};
use puffin_interpreter::{Interpreter, PythonVersion};
use puffin_normalize::{ExtraName, PackageName};
use puffin_resolver::{
DisplayResolutionGraph, InMemoryIndex, Manifest, PreReleaseMode, ResolutionMode,
ResolutionOptions, Resolver,
};
use puffin_traits::{InFlight, SetupPyStrategy};
use requirements_txt::EditableRequirement;
use crate::commands::reporters::{DownloadReporter, ResolverReporter};
use crate::commands::{elapsed, ExitStatus};
use crate::printer::Printer;
use crate::requirements::{ExtrasSpecification, RequirementsSource, RequirementsSpecification};
const VERSION: &str = env!("CARGO_PKG_VERSION");
/// Resolve a set of requirements into a set of pinned versions.
#[allow(clippy::too_many_arguments)]
pub(crate) async fn pip_compile(
requirements: &[RequirementsSource],
constraints: &[RequirementsSource],
overrides: &[RequirementsSource],
extras: ExtrasSpecification<'_>,
output_file: Option<&Path>,
resolution_mode: ResolutionMode,
prerelease_mode: PreReleaseMode,
upgrade: Upgrade,
generate_hashes: bool,
index_locations: IndexLocations,
setup_py: SetupPyStrategy,
no_build: bool,
python_version: Option<PythonVersion>,
exclude_newer: Option<DateTime<Utc>>,
cache: Cache,
mut printer: Printer,
) -> Result<ExitStatus> {
let start = std::time::Instant::now();
// If the user requests `extras` but does not provide a pyproject toml source
if !matches!(extras, ExtrasSpecification::None)
&& !requirements
.iter()
.any(|source| matches!(source, RequirementsSource::PyprojectToml(_)))
{
return Err(anyhow!(
"Requesting extras requires a pyproject.toml input file."
));
}
// Read all requirements from the provided sources.
let RequirementsSpecification {
project,
requirements,
constraints,
overrides,
editables,
extras: used_extras,
} = RequirementsSpecification::from_sources(requirements, constraints, overrides, &extras)?;
// Check that all provided extras are used
if let ExtrasSpecification::Some(extras) = extras {
let mut unused_extras = extras
.iter()
.filter(|extra| !used_extras.contains(extra))
.collect::<Vec<_>>();
if !unused_extras.is_empty() {
unused_extras.sort_unstable();
unused_extras.dedup();
let s = if unused_extras.len() == 1 { "" } else { "s" };
return Err(anyhow!(
"Requested extra{s} not found: {}",
unused_extras.iter().join(", ")
));
}
}
let preferences: Vec<Requirement> = output_file
// As an optimization, skip reading the lockfile is we're upgrading all packages anyway.
.filter(|_| !upgrade.is_all())
.filter(|output_file| output_file.exists())
.map(Path::to_path_buf)
.map(RequirementsSource::from)
.as_ref()
.map(|source| RequirementsSpecification::from_source(source, &extras))
.transpose()?
.map(|spec| spec.requirements)
.map(|requirements| match upgrade {
// Respect all pinned versions from the existing lockfile.
Upgrade::None => requirements,
// Ignore all pinned versions from the existing lockfile.
Upgrade::All => vec![],
// Ignore pinned versions for the specified packages.
Upgrade::Packages(packages) => requirements
.into_iter()
.filter(|requirement| !packages.contains(&requirement.name))
.collect(),
})
.unwrap_or_default();
// Detect the current Python interpreter.
let platform = Platform::current()?;
let interpreter = Interpreter::find(python_version.as_ref(), platform, &cache)?;
debug!(
"Using Python {} at {}",
interpreter.markers().python_version,
interpreter.sys_executable().display()
);
// Create a shared in-memory index.
let source_index = InMemoryIndex::default();
// If we're resolving against a different Python version, use a separate index. Source
// distributions will be built against the installed version, and so the index may contain
// different package priorities than in the top-level resolution.
let top_level_index = if python_version.is_some() {
InMemoryIndexRef::Owned(InMemoryIndex::default())
} else {
InMemoryIndexRef::Borrowed(&source_index)
};
// Determine the tags, markers, and interpreter to use for resolution.
let tags = if let Some(python_version) = python_version.as_ref() {
Cow::Owned(Tags::from_env(
interpreter.platform(),
python_version.simple_version(),
)?)
} else {
Cow::Borrowed(interpreter.tags()?)
};
let markers = python_version.map_or_else(
|| Cow::Borrowed(interpreter.markers()),
|python_version| Cow::Owned(python_version.markers(interpreter.markers())),
);
// Instantiate a client.
let client = RegistryClientBuilder::new(cache.clone())
.index_urls(index_locations.index_urls())
.build();
// Resolve the flat indexes from `--find-links`.
let flat_index = {
let client = FlatIndexClient::new(&client, &cache);
let entries = client.fetch(index_locations.flat_indexes()).await?;
FlatIndex::from_entries(entries, &tags)
};
// Track in-flight downloads, builds, etc., across resolutions.
let in_flight = InFlight::default();
let options = ResolutionOptions::new(resolution_mode, prerelease_mode, exclude_newer);
let build_dispatch = BuildDispatch::new(
&client,
&cache,
&interpreter,
&index_locations,
&flat_index,
&source_index,
&in_flight,
interpreter.sys_executable().to_path_buf(),
setup_py,
no_build,
&NoBinary::None,
)
.with_options(options);
// Build the editables and add their requirements
let editable_metadata = if editables.is_empty() {
Vec::new()
} else {
let start = std::time::Instant::now();
let editables: Vec<LocalEditable> = editables
.into_iter()
.map(|editable| {
let EditableRequirement { path, url } = editable;
Ok(LocalEditable { url, path })
})
.collect::<Result<_>>()?;
let downloader = Downloader::new(&cache, &tags, &client, &build_dispatch)
.with_reporter(DownloadReporter::from(printer).with_length(editables.len() as u64));
let editable_wheel_dir = tempdir_in(cache.root())?;
let editable_metadata: Vec<_> = downloader
.build_editables(editables, editable_wheel_dir.path())
.await
.context("Failed to build editables")?
.into_iter()
.map(|built_editable| (built_editable.editable, built_editable.metadata))
.collect();
let s = if editable_metadata.len() == 1 {
""
} else {
"s"
};
writeln!(
printer,
"{}",
format!(
"Built {} in {}",
format!("{} editable{}", editable_metadata.len(), s).bold(),
elapsed(start.elapsed())
)
.dimmed()
)?;
editable_metadata
};
// Create a manifest of the requirements.
let manifest = Manifest::new(
requirements,
constraints,
overrides,
preferences,
project,
editable_metadata,
);
// Resolve the dependencies.
let resolver = Resolver::new(
manifest,
options,
&markers,
&interpreter,
&tags,
&client,
&flat_index,
&top_level_index,
&build_dispatch,
)
.with_reporter(ResolverReporter::from(printer));
let resolution = match resolver.resolve().await {
Err(puffin_resolver::ResolveError::NoSolution(err)) => {
#[allow(clippy::print_stderr)]
{
let report = miette::Report::msg(format!("{err}"))
.context("No solution found when resolving dependencies:");
eprint!("{report:?}");
}
return Ok(ExitStatus::Failure);
}
result => result,
}?;
let s = if resolution.len() == 1 { "" } else { "s" };
writeln!(
printer,
"{}",
format!(
"Resolved {} in {}",
format!("{} package{}", resolution.len(), s).bold(),
elapsed(start.elapsed())
)
.dimmed()
)?;
// Notify the user of any diagnostics.
for diagnostic in resolution.diagnostics() {
writeln!(
printer,
"{}{} {}",
"warning".yellow().bold(),
":".bold(),
diagnostic.message().bold()
)?;
}
// Write the resolved dependencies to the output channel.
let mut writer: Box<dyn std::io::Write> = if let Some(output_file) = output_file {
Box::new(AutoStream::<std::fs::File>::auto(
fs_err::File::create(output_file)?.into(),
))
} else {
Box::new(AutoStream::auto(stdout()))
};
writeln!(
writer,
"{}",
format!("# This file was autogenerated by Puffin v{VERSION} via the following command:")
.green()
)?;
writeln!(
writer,
"{}",
format!("# puffin {}", env::args().skip(1).join(" ")).green()
)?;
write!(
writer,
"{}",
DisplayResolutionGraph::new(&resolution, generate_hashes)
)?;
Ok(ExitStatus::Success)
}
/// Whether to allow package upgrades.
#[derive(Debug)]
pub(crate) enum Upgrade {
/// Prefer pinned versions from the existing lockfile, if possible.
None,
/// Allow package upgrades for all packages, ignoring the existing lockfile.
All,
/// Allow package upgrades, but only for the specified packages.
Packages(FxHashSet<PackageName>),
}
impl Upgrade {
/// Determine the upgrade strategy from the command-line arguments.
pub(crate) fn from_args(upgrade: bool, upgrade_package: Vec<PackageName>) -> Self {
if upgrade {
Self::All
} else if !upgrade_package.is_empty() {
Self::Packages(upgrade_package.into_iter().collect())
} else {
Self::None
}
}
/// Returns `true` if all packages should be upgraded.
pub(crate) fn is_all(&self) -> bool {
matches!(self, Self::All)
}
}
pub(crate) fn extra_name_with_clap_error(arg: &str) -> Result<ExtraName> {
ExtraName::from_str(arg).map_err(|_err| {
anyhow!(
"Extra names must start and end with a letter or digit and may only \
contain -, _, ., and alphanumeric characters"
)
})
}
/// An owned or unowned [`InMemoryIndex`].
enum InMemoryIndexRef<'a> {
Owned(InMemoryIndex),
Borrowed(&'a InMemoryIndex),
}
impl Deref for InMemoryIndexRef<'_> {
type Target = InMemoryIndex;
fn deref(&self) -> &Self::Target {
match self {
Self::Owned(index) => index,
Self::Borrowed(index) => index,
}
}
}

View File

@ -1,659 +0,0 @@
use std::fmt::Write;
use std::path::Path;
use anstream::eprint;
use anyhow::{anyhow, Context, Result};
use chrono::{DateTime, Utc};
use itertools::Itertools;
use owo_colors::OwoColorize;
use tempfile::tempdir_in;
use tracing::debug;
use distribution_types::{
IndexLocations, InstalledMetadata, LocalDist, LocalEditable, Name, Resolution,
};
use install_wheel_rs::linker::LinkMode;
use pep508_rs::{MarkerEnvironment, Requirement};
use platform_host::Platform;
use platform_tags::Tags;
use puffin_cache::Cache;
use puffin_client::{FlatIndex, FlatIndexClient, RegistryClient, RegistryClientBuilder};
use puffin_dispatch::BuildDispatch;
use puffin_installer::{
BuiltEditable, Downloader, NoBinary, Plan, Planner, Reinstall, ResolvedEditable, SitePackages,
};
use puffin_interpreter::{Interpreter, Virtualenv};
use puffin_normalize::PackageName;
use puffin_resolver::{
InMemoryIndex, Manifest, PreReleaseMode, ResolutionGraph, ResolutionMode, ResolutionOptions,
Resolver,
};
use puffin_traits::{InFlight, SetupPyStrategy};
use requirements_txt::EditableRequirement;
use crate::commands::reporters::{DownloadReporter, InstallReporter, ResolverReporter};
use crate::commands::{elapsed, ChangeEvent, ChangeEventKind, ExitStatus};
use crate::printer::Printer;
use crate::requirements::{ExtrasSpecification, RequirementsSource, RequirementsSpecification};
/// Install packages into the current environment.
#[allow(clippy::too_many_arguments)]
pub(crate) async fn pip_install(
requirements: &[RequirementsSource],
constraints: &[RequirementsSource],
overrides: &[RequirementsSource],
extras: &ExtrasSpecification<'_>,
resolution_mode: ResolutionMode,
prerelease_mode: PreReleaseMode,
index_locations: IndexLocations,
reinstall: &Reinstall,
link_mode: LinkMode,
setup_py: SetupPyStrategy,
no_build: bool,
no_binary: &NoBinary,
strict: bool,
exclude_newer: Option<DateTime<Utc>>,
cache: Cache,
mut printer: Printer,
) -> Result<ExitStatus> {
let start = std::time::Instant::now();
// Read all requirements from the provided sources.
let RequirementsSpecification {
project,
requirements,
constraints,
overrides,
editables,
extras: used_extras,
} = specification(requirements, constraints, overrides, extras)?;
// Check that all provided extras are used
if let ExtrasSpecification::Some(extras) = extras {
let mut unused_extras = extras
.iter()
.filter(|extra| !used_extras.contains(extra))
.collect::<Vec<_>>();
if !unused_extras.is_empty() {
unused_extras.sort_unstable();
unused_extras.dedup();
let s = if unused_extras.len() == 1 { "" } else { "s" };
return Err(anyhow!(
"Requested extra{s} not found: {}",
unused_extras.iter().join(", ")
));
}
}
// Detect the current Python interpreter.
let platform = Platform::current()?;
let venv = Virtualenv::from_env(platform, &cache)?;
debug!(
"Using Python interpreter: {}",
venv.python_executable().display()
);
let _lock = venv.lock()?;
// Determine the set of installed packages.
let site_packages =
SitePackages::from_executable(&venv).context("Failed to list installed packages")?;
// If the requirements are already satisfied, we're done. Ideally, the resolver would be fast
// enough to let us remove this check. But right now, for large environments, it's an order of
// magnitude faster to validate the environment than to resolve the requirements.
if reinstall.is_none() && site_packages.satisfies(&requirements, &editables, &constraints)? {
let num_requirements = requirements.len() + editables.len();
let s = if num_requirements == 1 { "" } else { "s" };
writeln!(
printer,
"{}",
format!(
"Audited {} in {}",
format!("{num_requirements} package{s}").bold(),
elapsed(start.elapsed())
)
.dimmed()
)?;
return Ok(ExitStatus::Success);
}
// Determine the tags, markers, and interpreter to use for resolution.
let interpreter = venv.interpreter().clone();
let tags = venv.interpreter().tags()?;
let markers = venv.interpreter().markers();
// Instantiate a client.
let client = RegistryClientBuilder::new(cache.clone())
.index_urls(index_locations.index_urls())
.build();
// Resolve the flat indexes from `--find-links`.
let flat_index = {
let client = FlatIndexClient::new(&client, &cache);
let entries = client.fetch(index_locations.flat_indexes()).await?;
FlatIndex::from_entries(entries, tags)
};
// Create a shared in-memory index.
let index = InMemoryIndex::default();
// Track in-flight downloads, builds, etc., across resolutions.
let in_flight = InFlight::default();
let options = ResolutionOptions::new(resolution_mode, prerelease_mode, exclude_newer);
let resolve_dispatch = BuildDispatch::new(
&client,
&cache,
&interpreter,
&index_locations,
&flat_index,
&index,
&in_flight,
venv.python_executable(),
setup_py,
no_build,
no_binary,
)
.with_options(options);
// Build all editable distributions. The editables are shared between resolution and
// installation, and should live for the duration of the command. If an editable is already
// installed in the environment, we'll still re-build it here.
let editable_wheel_dir;
let editables = if editables.is_empty() {
vec![]
} else {
editable_wheel_dir = tempdir_in(venv.root())?;
build_editables(
&editables,
editable_wheel_dir.path(),
&cache,
tags,
&client,
&resolve_dispatch,
printer,
)
.await?
};
// Resolve the requirements.
let resolution = match resolve(
requirements,
constraints,
overrides,
project,
&editables,
&site_packages,
reinstall,
&interpreter,
tags,
markers,
&client,
&flat_index,
&index,
&resolve_dispatch,
options,
printer,
)
.await
{
Ok(resolution) => Resolution::from(resolution),
Err(Error::Resolve(puffin_resolver::ResolveError::NoSolution(err))) => {
#[allow(clippy::print_stderr)]
{
let report = miette::Report::msg(format!("{err}"))
.context("No solution found when resolving dependencies:");
eprint!("{report:?}");
}
return Ok(ExitStatus::Failure);
}
Err(err) => return Err(err.into()),
};
// Re-initialize the in-flight map.
let in_flight = InFlight::default();
// If we're running with `--reinstall`, initialize a separate `BuildDispatch`, since we may
// end up removing some distributions from the environment.
let install_dispatch = if reinstall.is_none() {
resolve_dispatch
} else {
BuildDispatch::new(
&client,
&cache,
&interpreter,
&index_locations,
&flat_index,
&index,
&in_flight,
venv.python_executable(),
setup_py,
no_build,
no_binary,
)
};
// Sync the environment.
install(
&resolution,
editables,
site_packages,
reinstall,
no_binary,
link_mode,
&index_locations,
tags,
&client,
&in_flight,
&install_dispatch,
&cache,
&venv,
printer,
)
.await?;
// Validate the environment.
if strict {
validate(&resolution, &venv, printer)?;
}
Ok(ExitStatus::Success)
}
/// Consolidate the requirements for an installation.
fn specification(
requirements: &[RequirementsSource],
constraints: &[RequirementsSource],
overrides: &[RequirementsSource],
extras: &ExtrasSpecification<'_>,
) -> Result<RequirementsSpecification, Error> {
// If the user requests `extras` but does not provide a pyproject toml source
if !matches!(extras, ExtrasSpecification::None)
&& !requirements
.iter()
.any(|source| matches!(source, RequirementsSource::PyprojectToml(_)))
{
return Err(anyhow!("Requesting extras requires a pyproject.toml input file.").into());
}
// Read all requirements from the provided sources.
let spec =
RequirementsSpecification::from_sources(requirements, constraints, overrides, extras)?;
// Check that all provided extras are used
if let ExtrasSpecification::Some(extras) = extras {
let mut unused_extras = extras
.iter()
.filter(|extra| !spec.extras.contains(extra))
.collect::<Vec<_>>();
if !unused_extras.is_empty() {
unused_extras.sort_unstable();
unused_extras.dedup();
let s = if unused_extras.len() == 1 { "" } else { "s" };
return Err(anyhow!(
"Requested extra{s} not found: {}",
unused_extras.iter().join(", ")
)
.into());
}
}
Ok(spec)
}
/// Build a set of editable distributions.
async fn build_editables(
editables: &[EditableRequirement],
editable_wheel_dir: &Path,
cache: &Cache,
tags: &Tags,
client: &RegistryClient,
build_dispatch: &BuildDispatch<'_>,
mut printer: Printer,
) -> Result<Vec<BuiltEditable>, Error> {
let start = std::time::Instant::now();
let downloader = Downloader::new(cache, tags, client, build_dispatch)
.with_reporter(DownloadReporter::from(printer).with_length(editables.len() as u64));
let editables: Vec<LocalEditable> = editables
.iter()
.map(|editable| {
let EditableRequirement { path, url } = editable;
Ok(LocalEditable {
path: path.clone(),
url: url.clone(),
})
})
.collect::<Result<_>>()?;
let editables: Vec<_> = downloader
.build_editables(editables, editable_wheel_dir)
.await
.context("Failed to build editables")?
.into_iter()
.collect();
let s = if editables.len() == 1 { "" } else { "s" };
writeln!(
printer,
"{}",
format!(
"Built {} in {}",
format!("{} editable{}", editables.len(), s).bold(),
elapsed(start.elapsed())
)
.dimmed()
)?;
Ok(editables)
}
/// Resolve a set of requirements, similar to running `pip compile`.
#[allow(clippy::too_many_arguments)]
async fn resolve(
requirements: Vec<Requirement>,
constraints: Vec<Requirement>,
overrides: Vec<Requirement>,
project: Option<PackageName>,
editables: &[BuiltEditable],
site_packages: &SitePackages<'_>,
reinstall: &Reinstall,
interpreter: &Interpreter,
tags: &Tags,
markers: &MarkerEnvironment,
client: &RegistryClient,
flat_index: &FlatIndex,
index: &InMemoryIndex,
build_dispatch: &BuildDispatch<'_>,
options: ResolutionOptions,
mut printer: Printer,
) -> Result<ResolutionGraph, Error> {
let start = std::time::Instant::now();
// Respect preferences from the existing environments.
let preferences: Vec<Requirement> = match reinstall {
Reinstall::All => vec![],
Reinstall::None => site_packages.requirements().collect(),
Reinstall::Packages(packages) => site_packages
.requirements()
.filter(|requirement| !packages.contains(&requirement.name))
.collect(),
};
// Map the editables to their metadata.
let editables = editables
.iter()
.map(|built_editable| {
(
built_editable.editable.clone(),
built_editable.metadata.clone(),
)
})
.collect();
// Create a manifest of the requirements.
let manifest = Manifest::new(
requirements,
constraints,
overrides,
preferences,
project,
editables,
);
// Resolve the dependencies.
let resolver = Resolver::new(
manifest,
options,
markers,
interpreter,
tags,
client,
flat_index,
index,
build_dispatch,
)
.with_reporter(ResolverReporter::from(printer));
let resolution = resolver.resolve().await?;
let s = if resolution.len() == 1 { "" } else { "s" };
writeln!(
printer,
"{}",
format!(
"Resolved {} in {}",
format!("{} package{}", resolution.len(), s).bold(),
elapsed(start.elapsed())
)
.dimmed()
)?;
Ok(resolution)
}
/// Install a set of requirements into the current environment.
#[allow(clippy::too_many_arguments)]
async fn install(
resolution: &Resolution,
built_editables: Vec<BuiltEditable>,
site_packages: SitePackages<'_>,
reinstall: &Reinstall,
no_binary: &NoBinary,
link_mode: LinkMode,
index_urls: &IndexLocations,
tags: &Tags,
client: &RegistryClient,
in_flight: &InFlight,
build_dispatch: &BuildDispatch<'_>,
cache: &Cache,
venv: &Virtualenv,
mut printer: Printer,
) -> Result<(), Error> {
let start = std::time::Instant::now();
// Partition into those that should be linked from the cache (`local`), those that need to be
// downloaded (`remote`), and those that should be removed (`extraneous`).
let requirements = resolution.requirements();
let editables = built_editables
.into_iter()
.map(ResolvedEditable::Built)
.collect::<Vec<_>>();
let Plan {
local,
remote,
reinstalls,
extraneous: _,
} = Planner::with_requirements(&requirements)
.with_editable_requirements(editables)
.build(
site_packages,
reinstall,
no_binary,
index_urls,
cache,
venv,
tags,
)
.context("Failed to determine installation plan")?;
// Nothing to do.
if remote.is_empty() && local.is_empty() && reinstalls.is_empty() {
let s = if resolution.len() == 1 { "" } else { "s" };
writeln!(
printer,
"{}",
format!(
"Audited {} in {}",
format!("{} package{}", resolution.len(), s).bold(),
elapsed(start.elapsed())
)
.dimmed()
)?;
return Ok(());
}
// Map any registry-based requirements back to those returned by the resolver.
let remote = remote
.iter()
.map(|dist| {
resolution
.get(&dist.name)
.cloned()
.expect("Resolution should contain all packages")
})
.collect::<Vec<_>>();
// Download, build, and unzip any missing distributions.
let wheels = if remote.is_empty() {
vec![]
} else {
let start = std::time::Instant::now();
let downloader = Downloader::new(cache, tags, client, build_dispatch)
.with_reporter(DownloadReporter::from(printer).with_length(remote.len() as u64));
let wheels = downloader
.download(remote, in_flight)
.await
.context("Failed to download distributions")?;
let s = if wheels.len() == 1 { "" } else { "s" };
writeln!(
printer,
"{}",
format!(
"Downloaded {} in {}",
format!("{} package{}", wheels.len(), s).bold(),
elapsed(start.elapsed())
)
.dimmed()
)?;
wheels
};
// Remove any existing installations.
if !reinstalls.is_empty() {
for dist_info in &reinstalls {
let summary = puffin_installer::uninstall(dist_info).await?;
debug!(
"Uninstalled {} ({} file{}, {} director{})",
dist_info.name(),
summary.file_count,
if summary.file_count == 1 { "" } else { "s" },
summary.dir_count,
if summary.dir_count == 1 { "y" } else { "ies" },
);
}
}
// Install the resolved distributions.
let wheels = wheels.into_iter().chain(local).collect::<Vec<_>>();
if !wheels.is_empty() {
let start = std::time::Instant::now();
puffin_installer::Installer::new(venv)
.with_link_mode(link_mode)
.with_reporter(InstallReporter::from(printer).with_length(wheels.len() as u64))
.install(&wheels)?;
let s = if wheels.len() == 1 { "" } else { "s" };
writeln!(
printer,
"{}",
format!(
"Installed {} in {}",
format!("{} package{}", wheels.len(), s).bold(),
elapsed(start.elapsed())
)
.dimmed()
)?;
}
for event in reinstalls
.into_iter()
.map(|distribution| ChangeEvent {
dist: LocalDist::from(distribution),
kind: ChangeEventKind::Removed,
})
.chain(wheels.into_iter().map(|distribution| ChangeEvent {
dist: LocalDist::from(distribution),
kind: ChangeEventKind::Added,
}))
.sorted_unstable_by(|a, b| {
a.dist
.name()
.cmp(b.dist.name())
.then_with(|| a.kind.cmp(&b.kind))
})
{
match event.kind {
ChangeEventKind::Added => {
writeln!(
printer,
" {} {}{}",
"+".green(),
event.dist.name().as_ref().white().bold(),
event.dist.installed_version().to_string().dimmed()
)?;
}
ChangeEventKind::Removed => {
writeln!(
printer,
" {} {}{}",
"-".red(),
event.dist.name().as_ref().white().bold(),
event.dist.installed_version().to_string().dimmed()
)?;
}
}
}
Ok(())
}
/// Validate the installed packages in the virtual environment.
fn validate(resolution: &Resolution, venv: &Virtualenv, mut printer: Printer) -> Result<(), Error> {
let site_packages = SitePackages::from_executable(venv)?;
let diagnostics = site_packages.diagnostics()?;
for diagnostic in diagnostics {
// Only surface diagnostics that are "relevant" to the current resolution.
if resolution
.packages()
.any(|package| diagnostic.includes(package))
{
writeln!(
printer,
"{}{} {}",
"warning".yellow().bold(),
":".bold(),
diagnostic.message().bold()
)?;
}
}
Ok(())
}
#[derive(thiserror::Error, Debug)]
enum Error {
#[error(transparent)]
Resolve(#[from] puffin_resolver::ResolveError),
#[error(transparent)]
Client(#[from] puffin_client::Error),
#[error(transparent)]
Platform(#[from] platform_host::PlatformError),
#[error(transparent)]
Io(#[from] std::io::Error),
#[error(transparent)]
Fmt(#[from] std::fmt::Error),
#[error(transparent)]
Anyhow(#[from] anyhow::Error),
}

View File

@ -1,469 +0,0 @@
use std::fmt::Write;
use anyhow::{Context, Result};
use itertools::Itertools;
use owo_colors::OwoColorize;
use tracing::debug;
use distribution_types::{IndexLocations, InstalledMetadata, LocalDist, LocalEditable, Name};
use install_wheel_rs::linker::LinkMode;
use platform_host::Platform;
use platform_tags::Tags;
use puffin_cache::Cache;
use puffin_client::{FlatIndex, FlatIndexClient, RegistryClient, RegistryClientBuilder};
use puffin_dispatch::BuildDispatch;
use puffin_installer::{
Downloader, NoBinary, Plan, Planner, Reinstall, ResolvedEditable, SitePackages,
};
use puffin_interpreter::Virtualenv;
use puffin_resolver::InMemoryIndex;
use puffin_traits::{InFlight, SetupPyStrategy};
use pypi_types::Yanked;
use requirements_txt::EditableRequirement;
use crate::commands::reporters::{DownloadReporter, FinderReporter, InstallReporter};
use crate::commands::{elapsed, ChangeEvent, ChangeEventKind, ExitStatus};
use crate::printer::Printer;
use crate::requirements::{RequirementsSource, RequirementsSpecification};
/// Install a set of locked requirements into the current Python environment.
#[allow(clippy::too_many_arguments)]
pub(crate) async fn pip_sync(
sources: &[RequirementsSource],
reinstall: &Reinstall,
link_mode: LinkMode,
index_locations: IndexLocations,
setup_py: SetupPyStrategy,
no_build: bool,
no_binary: &NoBinary,
strict: bool,
cache: Cache,
mut printer: Printer,
) -> Result<ExitStatus> {
let start = std::time::Instant::now();
// Read all requirements from the provided sources.
let (requirements, editables) = RequirementsSpecification::requirements_and_editables(sources)?;
let num_requirements = requirements.len() + editables.len();
if num_requirements == 0 {
writeln!(printer, "No requirements found")?;
return Ok(ExitStatus::Success);
}
// Detect the current Python interpreter.
let platform = Platform::current()?;
let venv = Virtualenv::from_env(platform, &cache)?;
debug!(
"Using Python interpreter: {}",
venv.python_executable().display()
);
let _lock = venv.lock()?;
// Determine the current environment markers.
let tags = venv.interpreter().tags()?;
// Prep the registry client.
let client = RegistryClientBuilder::new(cache.clone())
.index_urls(index_locations.index_urls())
.build();
// Resolve the flat indexes from `--find-links`.
let flat_index = {
let client = FlatIndexClient::new(&client, &cache);
let entries = client.fetch(index_locations.flat_indexes()).await?;
FlatIndex::from_entries(entries, tags)
};
// Create a shared in-memory index.
let index = InMemoryIndex::default();
// Track in-flight downloads, builds, etc., across resolutions.
let in_flight = InFlight::default();
// Prep the build context.
let build_dispatch = BuildDispatch::new(
&client,
&cache,
venv.interpreter(),
&index_locations,
&flat_index,
&index,
&in_flight,
venv.python_executable(),
setup_py,
no_build,
no_binary,
);
// Determine the set of installed packages.
let site_packages =
SitePackages::from_executable(&venv).context("Failed to list installed packages")?;
// Resolve any editables.
let resolved_editables = resolve_editables(
editables,
&site_packages,
reinstall,
&venv,
tags,
&cache,
&client,
&build_dispatch,
printer,
)
.await?;
// Partition into those that should be linked from the cache (`local`), those that need to be
// downloaded (`remote`), and those that should be removed (`extraneous`).
let Plan {
local,
remote,
reinstalls,
extraneous,
} = Planner::with_requirements(&requirements)
.with_editable_requirements(resolved_editables.editables)
.build(
site_packages,
reinstall,
no_binary,
&index_locations,
&cache,
&venv,
tags,
)
.context("Failed to determine installation plan")?;
// Nothing to do.
if remote.is_empty() && local.is_empty() && reinstalls.is_empty() && extraneous.is_empty() {
let s = if num_requirements == 1 { "" } else { "s" };
writeln!(
printer,
"{}",
format!(
"Audited {} in {}",
format!("{num_requirements} package{s}").bold(),
elapsed(start.elapsed())
)
.dimmed()
)?;
return Ok(ExitStatus::Success);
}
// Instantiate a client.
let client = RegistryClientBuilder::new(cache.clone())
.index_urls(index_locations.index_urls())
.build();
// Resolve any registry-based requirements.
let remote = if remote.is_empty() {
Vec::new()
} else {
let start = std::time::Instant::now();
// Resolve the flat indexes from `--find-links`.
let flat_index = {
let client = FlatIndexClient::new(&client, &cache);
let entries = client.fetch(index_locations.flat_indexes()).await?;
FlatIndex::from_entries(entries, tags)
};
let wheel_finder = puffin_resolver::DistFinder::new(
tags,
&client,
venv.interpreter(),
&flat_index,
no_binary,
)
.with_reporter(FinderReporter::from(printer).with_length(remote.len() as u64));
let resolution = wheel_finder.resolve(&remote).await?;
let s = if resolution.len() == 1 { "" } else { "s" };
writeln!(
printer,
"{}",
format!(
"Resolved {} in {}",
format!("{} package{}", resolution.len(), s).bold(),
elapsed(start.elapsed())
)
.dimmed()
)?;
resolution.into_distributions().collect::<Vec<_>>()
};
// TODO(konstin): Also check the cache whether any cached or installed dist is already known to
// have been yanked, we currently don't show this message on the second run anymore
for dist in &remote {
let Some(file) = dist.file() else {
continue;
};
match &file.yanked {
None | Some(Yanked::Bool(false)) => {}
Some(Yanked::Bool(true)) => {
writeln!(
printer,
"{}{} {dist} is yanked. Refresh your lockfile to pin an un-yanked version.",
"warning".yellow().bold(),
":".bold(),
)?;
}
Some(Yanked::Reason(reason)) => {
writeln!(
printer,
"{}{} {dist} is yanked (reason: \"{reason}\"). Refresh your lockfile to pin an un-yanked version.",
"warning".yellow().bold(),
":".bold(),
)?;
}
}
}
// Download, build, and unzip any missing distributions.
let wheels = if remote.is_empty() {
Vec::new()
} else {
let start = std::time::Instant::now();
let downloader = Downloader::new(&cache, tags, &client, &build_dispatch)
.with_reporter(DownloadReporter::from(printer).with_length(remote.len() as u64));
let wheels = downloader
.download(remote, &in_flight)
.await
.context("Failed to download distributions")?;
let s = if wheels.len() == 1 { "" } else { "s" };
writeln!(
printer,
"{}",
format!(
"Downloaded {} in {}",
format!("{} package{}", wheels.len(), s).bold(),
elapsed(start.elapsed())
)
.dimmed()
)?;
wheels
};
// Remove any unnecessary packages.
if !extraneous.is_empty() || !reinstalls.is_empty() {
let start = std::time::Instant::now();
for dist_info in extraneous.iter().chain(reinstalls.iter()) {
let summary = puffin_installer::uninstall(dist_info).await?;
debug!(
"Uninstalled {} ({} file{}, {} director{})",
dist_info.name(),
summary.file_count,
if summary.file_count == 1 { "" } else { "s" },
summary.dir_count,
if summary.dir_count == 1 { "y" } else { "ies" },
);
}
let s = if extraneous.len() + reinstalls.len() == 1 {
""
} else {
"s"
};
writeln!(
printer,
"{}",
format!(
"Uninstalled {} in {}",
format!("{} package{}", extraneous.len() + reinstalls.len(), s).bold(),
elapsed(start.elapsed())
)
.dimmed()
)?;
}
// Install the resolved distributions.
let wheels = wheels.into_iter().chain(local).collect::<Vec<_>>();
if !wheels.is_empty() {
let start = std::time::Instant::now();
puffin_installer::Installer::new(&venv)
.with_link_mode(link_mode)
.with_reporter(InstallReporter::from(printer).with_length(wheels.len() as u64))
.install(&wheels)?;
let s = if wheels.len() == 1 { "" } else { "s" };
writeln!(
printer,
"{}",
format!(
"Installed {} in {}",
format!("{} package{}", wheels.len(), s).bold(),
elapsed(start.elapsed())
)
.dimmed()
)?;
}
// Report on any changes in the environment.
for event in extraneous
.into_iter()
.chain(reinstalls.into_iter())
.map(|distribution| ChangeEvent {
dist: LocalDist::from(distribution),
kind: ChangeEventKind::Removed,
})
.chain(wheels.into_iter().map(|distribution| ChangeEvent {
dist: LocalDist::from(distribution),
kind: ChangeEventKind::Added,
}))
.sorted_unstable_by(|a, b| {
a.dist
.name()
.cmp(b.dist.name())
.then_with(|| a.kind.cmp(&b.kind))
})
{
match event.kind {
ChangeEventKind::Added => {
writeln!(
printer,
" {} {}{}",
"+".green(),
event.dist.name().as_ref().white().bold(),
event.dist.installed_version().to_string().dimmed()
)?;
}
ChangeEventKind::Removed => {
writeln!(
printer,
" {} {}{}",
"-".red(),
event.dist.name().as_ref().white().bold(),
event.dist.installed_version().to_string().dimmed()
)?;
}
}
}
// Validate that the environment is consistent.
if strict {
let site_packages = SitePackages::from_executable(&venv)?;
for diagnostic in site_packages.diagnostics()? {
writeln!(
printer,
"{}{} {}",
"warning".yellow().bold(),
":".bold(),
diagnostic.message().bold()
)?;
}
}
Ok(ExitStatus::Success)
}
#[derive(Debug)]
struct ResolvedEditables {
/// The set of resolved editables, including both those that were already installed and those
/// that were built.
editables: Vec<ResolvedEditable>,
/// The temporary directory in which the built editables were stored.
#[allow(dead_code)]
temp_dir: Option<tempfile::TempDir>,
}
/// Resolve the set of editables that need to be installed.
#[allow(clippy::too_many_arguments)]
async fn resolve_editables(
editables: Vec<EditableRequirement>,
site_packages: &SitePackages<'_>,
reinstall: &Reinstall,
venv: &Virtualenv,
tags: &Tags,
cache: &Cache,
client: &RegistryClient,
build_dispatch: &BuildDispatch<'_>,
mut printer: Printer,
) -> Result<ResolvedEditables> {
// Partition the editables into those that are already installed, and those that must be built.
let mut installed = Vec::with_capacity(editables.len());
let mut uninstalled = Vec::with_capacity(editables.len());
for editable in editables {
match reinstall {
Reinstall::None => {
if let Some(dist) = site_packages.get_editable(editable.raw()) {
installed.push(dist.clone());
} else {
uninstalled.push(editable);
}
}
Reinstall::All => {
uninstalled.push(editable);
}
Reinstall::Packages(packages) => {
if let Some(dist) = site_packages.get_editable(editable.raw()) {
if packages.contains(dist.name()) {
uninstalled.push(editable);
} else {
installed.push(dist.clone());
}
} else {
uninstalled.push(editable);
}
}
}
}
// Build any editable installs.
let (built_editables, temp_dir) = if uninstalled.is_empty() {
(Vec::new(), None)
} else {
let start = std::time::Instant::now();
let temp_dir = tempfile::tempdir_in(venv.root())?;
let downloader = Downloader::new(cache, tags, client, build_dispatch)
.with_reporter(DownloadReporter::from(printer).with_length(uninstalled.len() as u64));
let local_editables: Vec<LocalEditable> = uninstalled
.iter()
.map(|editable| {
let EditableRequirement { path, url } = editable;
Ok(LocalEditable {
path: path.clone(),
url: url.clone(),
})
})
.collect::<Result<_>>()?;
let built_editables: Vec<_> = downloader
.build_editables(local_editables, temp_dir.path())
.await
.context("Failed to build editables")?
.into_iter()
.collect();
let s = if built_editables.len() == 1 { "" } else { "s" };
writeln!(
printer,
"{}",
format!(
"Built {} in {}",
format!("{} editable{}", built_editables.len(), s).bold(),
elapsed(start.elapsed())
)
.dimmed()
)?;
(built_editables, Some(temp_dir))
};
Ok(ResolvedEditables {
editables: installed
.into_iter()
.map(ResolvedEditable::Installed)
.chain(built_editables.into_iter().map(ResolvedEditable::Built))
.collect::<Vec<_>>(),
temp_dir,
})
}

View File

@ -1,151 +0,0 @@
use std::fmt::Write;
use anyhow::Result;
use owo_colors::OwoColorize;
use tracing::debug;
use distribution_types::{InstalledMetadata, Name};
use platform_host::Platform;
use puffin_cache::Cache;
use puffin_interpreter::Virtualenv;
use crate::commands::{elapsed, ExitStatus};
use crate::printer::Printer;
use crate::requirements::{RequirementsSource, RequirementsSpecification};
/// Uninstall packages from the current environment.
pub(crate) async fn pip_uninstall(
sources: &[RequirementsSource],
cache: Cache,
mut printer: Printer,
) -> Result<ExitStatus> {
let start = std::time::Instant::now();
// Read all requirements from the provided sources.
let (requirements, editables) = RequirementsSpecification::requirements_and_editables(sources)?;
// Detect the current Python interpreter.
let platform = Platform::current()?;
let venv = Virtualenv::from_env(platform, &cache)?;
debug!(
"Using Python interpreter: {}",
venv.python_executable().display()
);
let _lock = venv.lock()?;
// Index the current `site-packages` directory.
let site_packages = puffin_installer::SitePackages::from_executable(&venv)?;
// Sort and deduplicate the packages, which are keyed by name.
let packages = {
let mut packages = requirements
.into_iter()
.map(|requirement| requirement.name)
.collect::<Vec<_>>();
packages.sort_unstable();
packages.dedup();
packages
};
// Sort and deduplicate the editable packages, which are keyed by URL rather than package name.
let editables = {
let mut editables = editables
.iter()
.map(requirements_txt::EditableRequirement::raw)
.collect::<Vec<_>>();
editables.sort_unstable();
editables.dedup();
editables
};
// Map to the local distributions.
let distributions = {
let mut distributions = Vec::with_capacity(packages.len() + editables.len());
// Identify all packages that are installed.
for package in &packages {
if let Some(distribution) = site_packages.get(package) {
distributions.push(distribution);
} else {
writeln!(
printer,
"{}{} Skipping {} as it is not installed.",
"warning".yellow().bold(),
":".bold(),
package.as_ref().bold()
)?;
};
}
// Identify all editables that are installed.
for editable in &editables {
if let Some(distribution) = site_packages.get_editable(editable) {
distributions.push(distribution);
} else {
writeln!(
printer,
"{}{} Skipping {} as it is not installed.",
"warning".yellow().bold(),
":".bold(),
editable.as_ref().bold()
)?;
};
}
// Deduplicate, since a package could be listed both by name and editable URL.
distributions.sort_unstable_by_key(|dist| dist.path());
distributions.dedup_by_key(|dist| dist.path());
distributions
};
if distributions.is_empty() {
writeln!(
printer,
"{}{} No packages to uninstall.",
"warning".yellow().bold(),
":".bold(),
)?;
return Ok(ExitStatus::Success);
}
// Uninstall each package.
for distribution in &distributions {
let summary = puffin_installer::uninstall(distribution).await?;
debug!(
"Uninstalled {} ({} file{}, {} director{})",
distribution.name(),
summary.file_count,
if summary.file_count == 1 { "" } else { "s" },
summary.dir_count,
if summary.dir_count == 1 { "y" } else { "ies" },
);
}
writeln!(
printer,
"{}",
format!(
"Uninstalled {} in {}",
format!(
"{} package{}",
distributions.len(),
if distributions.len() == 1 { "" } else { "s" }
)
.bold(),
elapsed(start.elapsed())
)
.dimmed()
)?;
for distribution in distributions {
writeln!(
printer,
" {} {}{}",
"-".red(),
distribution.name().as_ref().white().bold(),
distribution.installed_version().to_string().dimmed()
)?;
}
Ok(ExitStatus::Success)
}

View File

@ -1,74 +0,0 @@
use std::path::PathBuf;
use anyhow::Result;
use miette::{Diagnostic, IntoDiagnostic};
use thiserror::Error;
use tracing::info;
use puffin_normalize::PackageName;
use puffin_workspace::WorkspaceError;
use crate::commands::ExitStatus;
use crate::printer::Printer;
/// Remove a dependency from the workspace.
#[allow(clippy::unnecessary_wraps)]
pub(crate) fn remove(name: &PackageName, _printer: Printer) -> Result<ExitStatus> {
match remove_impl(name) {
Ok(status) => Ok(status),
Err(err) => {
#[allow(clippy::print_stderr)]
{
eprint!("{err:?}");
}
Ok(ExitStatus::Failure)
}
}
}
#[derive(Error, Debug, Diagnostic)]
enum RemoveError {
#[error(
"Could not find a `pyproject.toml` file in the current directory or any of its parents"
)]
#[diagnostic(code(puffin::remove::workspace_not_found))]
WorkspaceNotFound,
#[error("Failed to parse `pyproject.toml` at: `{0}`")]
#[diagnostic(code(puffin::remove::parse))]
ParseError(PathBuf, #[source] WorkspaceError),
#[error("Failed to write `pyproject.toml` to: `{0}`")]
#[diagnostic(code(puffin::remove::write))]
WriteError(PathBuf, #[source] WorkspaceError),
#[error("Failed to remove `{0}` from `pyproject.toml`")]
#[diagnostic(code(puffin::remove::parse))]
RemovalError(String, #[source] WorkspaceError),
}
fn remove_impl(name: &PackageName) -> miette::Result<ExitStatus> {
// Locate the workspace.
let cwd = std::env::current_dir().into_diagnostic()?;
let Some(workspace_root) = puffin_workspace::find_pyproject_toml(cwd) else {
return Err(RemoveError::WorkspaceNotFound.into());
};
info!("Found workspace at: {}", workspace_root.display());
// Parse the manifest.
let mut manifest = puffin_workspace::Workspace::try_from(workspace_root.as_path())
.map_err(|err| RemoveError::ParseError(workspace_root.clone(), err))?;
// Remove the dependency.
manifest
.remove_dependency(name)
.map_err(|err| RemoveError::RemovalError(name.to_string(), err))?;
// Write the manifest back to disk.
manifest
.save(&workspace_root)
.map_err(|err| RemoveError::WriteError(workspace_root.clone(), err))?;
Ok(ExitStatus::Success)
}

View File

@ -1,323 +0,0 @@
use std::sync::{Arc, Mutex};
use std::time::Duration;
use indicatif::{MultiProgress, ProgressBar, ProgressStyle};
use owo_colors::OwoColorize;
use url::Url;
use distribution_types::{
CachedDist, Dist, DistributionMetadata, LocalEditable, Name, SourceDist, VersionOrUrl,
};
use puffin_normalize::PackageName;
use crate::printer::Printer;
#[derive(Debug)]
pub(crate) struct FinderReporter {
progress: ProgressBar,
}
impl From<Printer> for FinderReporter {
fn from(printer: Printer) -> Self {
let progress = ProgressBar::with_draw_target(None, printer.target());
progress.set_style(
ProgressStyle::with_template("{bar:20} [{pos}/{len}] {wide_msg:.dim}").unwrap(),
);
progress.set_message("Resolving dependencies...");
Self { progress }
}
}
impl FinderReporter {
#[must_use]
pub(crate) fn with_length(self, length: u64) -> Self {
self.progress.set_length(length);
self
}
}
impl puffin_resolver::FinderReporter for FinderReporter {
fn on_progress(&self, dist: &Dist) {
self.progress.set_message(format!("{dist}"));
self.progress.inc(1);
}
fn on_complete(&self) {
self.progress.finish_and_clear();
}
}
#[derive(Debug)]
pub(crate) struct DownloadReporter {
printer: Printer,
multi_progress: MultiProgress,
progress: ProgressBar,
bars: Arc<Mutex<Vec<ProgressBar>>>,
}
impl From<Printer> for DownloadReporter {
fn from(printer: Printer) -> Self {
let multi_progress = MultiProgress::with_draw_target(printer.target());
let progress = multi_progress.add(ProgressBar::with_draw_target(None, printer.target()));
progress.set_style(
ProgressStyle::with_template("{bar:20} [{pos}/{len}] {wide_msg:.dim}").unwrap(),
);
progress.set_message("Fetching packages...");
Self {
printer,
multi_progress,
progress,
bars: Arc::new(Mutex::new(Vec::new())),
}
}
}
impl DownloadReporter {
#[must_use]
pub(crate) fn with_length(self, length: u64) -> Self {
self.progress.set_length(length);
self
}
}
impl DownloadReporter {
fn on_any_build_start(&self, color_string: &str) -> usize {
let progress = self.multi_progress.insert_before(
&self.progress,
ProgressBar::with_draw_target(None, self.printer.target()),
);
progress.set_style(ProgressStyle::with_template("{wide_msg}").unwrap());
progress.set_message(format!("{} {}", "Building".bold().cyan(), color_string));
let mut bars = self.bars.lock().unwrap();
bars.push(progress);
bars.len() - 1
}
fn on_any_build_complete(&self, color_string: &str, id: usize) {
let bars = self.bars.lock().unwrap();
let progress = &bars[id];
progress.finish_with_message(format!(" {} {}", "Built".bold().green(), color_string));
}
}
impl puffin_installer::DownloadReporter for DownloadReporter {
fn on_progress(&self, dist: &CachedDist) {
self.progress.set_message(format!("{dist}"));
self.progress.inc(1);
}
fn on_complete(&self) {
self.progress.finish_and_clear();
}
fn on_build_start(&self, dist: &SourceDist) -> usize {
self.on_any_build_start(&dist.to_color_string())
}
fn on_build_complete(&self, dist: &SourceDist, index: usize) {
self.on_any_build_complete(&dist.to_color_string(), index);
}
fn on_editable_build_start(&self, dist: &LocalEditable) -> usize {
self.on_any_build_start(&dist.to_color_string())
}
fn on_editable_build_complete(&self, dist: &LocalEditable, id: usize) {
self.on_any_build_complete(&dist.to_color_string(), id);
}
fn on_checkout_start(&self, url: &Url, rev: &str) -> usize {
let progress = self.multi_progress.insert_before(
&self.progress,
ProgressBar::with_draw_target(None, self.printer.target()),
);
progress.set_style(ProgressStyle::with_template("{wide_msg}").unwrap());
progress.set_message(format!(
"{} {} ({})",
"Updating".bold().cyan(),
url,
rev.dimmed()
));
progress.finish();
let mut bars = self.bars.lock().unwrap();
bars.push(progress);
bars.len() - 1
}
fn on_checkout_complete(&self, url: &Url, rev: &str, index: usize) {
let bars = self.bars.lock().unwrap();
let progress = &bars[index];
progress.finish_with_message(format!(
" {} {} ({})",
"Updated".bold().green(),
url,
rev.dimmed()
));
}
}
#[derive(Debug)]
pub(crate) struct InstallReporter {
progress: ProgressBar,
}
impl From<Printer> for InstallReporter {
fn from(printer: Printer) -> Self {
let progress = ProgressBar::with_draw_target(None, printer.target());
progress.set_style(
ProgressStyle::with_template("{bar:20} [{pos}/{len}] {wide_msg:.dim}").unwrap(),
);
progress.set_message("Installing wheels...");
Self { progress }
}
}
impl InstallReporter {
#[must_use]
pub(crate) fn with_length(self, length: u64) -> Self {
self.progress.set_length(length);
self
}
}
impl puffin_installer::InstallReporter for InstallReporter {
fn on_install_progress(&self, wheel: &CachedDist) {
self.progress.set_message(format!("{wheel}"));
self.progress.inc(1);
}
fn on_install_complete(&self) {
self.progress.finish_and_clear();
}
}
#[derive(Debug)]
pub(crate) struct ResolverReporter {
printer: Printer,
multi_progress: MultiProgress,
progress: ProgressBar,
bars: Arc<Mutex<Vec<ProgressBar>>>,
}
impl From<Printer> for ResolverReporter {
fn from(printer: Printer) -> Self {
let multi_progress = MultiProgress::with_draw_target(printer.target());
let progress = multi_progress.add(ProgressBar::with_draw_target(None, printer.target()));
progress.enable_steady_tick(Duration::from_millis(200));
progress.set_style(
ProgressStyle::with_template("{spinner:.white} {wide_msg:.dim}")
.unwrap()
.tick_strings(&["", "", "", "", "", "", "", "", "", ""]),
);
progress.set_message("Resolving dependencies...");
Self {
printer,
multi_progress,
progress,
bars: Arc::new(Mutex::new(Vec::new())),
}
}
}
impl puffin_resolver::ResolverReporter for ResolverReporter {
fn on_progress(&self, name: &PackageName, version_or_url: VersionOrUrl) {
match version_or_url {
VersionOrUrl::Version(version) => {
self.progress.set_message(format!("{name}=={version}"));
}
VersionOrUrl::Url(url) => {
self.progress.set_message(format!("{name} @ {url}"));
}
}
}
fn on_complete(&self) {
self.progress.finish_and_clear();
}
fn on_build_start(&self, dist: &SourceDist) -> usize {
let progress = self.multi_progress.insert_before(
&self.progress,
ProgressBar::with_draw_target(None, self.printer.target()),
);
progress.set_style(ProgressStyle::with_template("{wide_msg}").unwrap());
progress.set_message(format!(
"{} {}",
"Building".bold().cyan(),
dist.to_color_string(),
));
let mut bars = self.bars.lock().unwrap();
bars.push(progress);
bars.len() - 1
}
fn on_build_complete(&self, dist: &SourceDist, index: usize) {
let bars = self.bars.lock().unwrap();
let progress = &bars[index];
progress.finish_with_message(format!(
" {} {}",
"Built".bold().green(),
dist.to_color_string(),
));
}
fn on_checkout_start(&self, url: &Url, rev: &str) -> usize {
let progress = self.multi_progress.insert_before(
&self.progress,
ProgressBar::with_draw_target(None, self.printer.target()),
);
progress.set_style(ProgressStyle::with_template("{wide_msg}").unwrap());
progress.set_message(format!(
"{} {} ({})",
"Updating".bold().cyan(),
url,
rev.dimmed()
));
progress.finish();
let mut bars = self.bars.lock().unwrap();
bars.push(progress);
bars.len() - 1
}
fn on_checkout_complete(&self, url: &Url, rev: &str, index: usize) {
let bars = self.bars.lock().unwrap();
let progress = &bars[index];
progress.finish_with_message(format!(
" {} {} ({})",
"Updated".bold().green(),
url,
rev.dimmed()
));
}
}
/// Like [`std::fmt::Display`], but with colors.
trait ColorDisplay {
fn to_color_string(&self) -> String;
}
impl ColorDisplay for SourceDist {
fn to_color_string(&self) -> String {
let name = self.name();
let version_or_url = self.version_or_url();
format!("{}{}", name, version_or_url.to_string().dimmed())
}
}
impl ColorDisplay for LocalEditable {
fn to_color_string(&self) -> String {
format!("{}", self.to_string().dimmed())
}
}

View File

@ -1,194 +0,0 @@
use std::fmt::Write;
use std::path::{Path, PathBuf};
use std::str::FromStr;
use anyhow::Result;
use fs_err as fs;
use miette::{Diagnostic, IntoDiagnostic};
use owo_colors::OwoColorize;
use puffin_installer::NoBinary;
use thiserror::Error;
use distribution_types::{DistributionMetadata, IndexLocations, Name};
use pep508_rs::Requirement;
use platform_host::Platform;
use puffin_cache::Cache;
use puffin_client::{FlatIndex, FlatIndexClient, RegistryClientBuilder};
use puffin_dispatch::BuildDispatch;
use puffin_interpreter::Interpreter;
use puffin_resolver::InMemoryIndex;
use puffin_traits::{BuildContext, InFlight, SetupPyStrategy};
use crate::commands::ExitStatus;
use crate::printer::Printer;
/// Create a virtual environment.
#[allow(clippy::unnecessary_wraps)]
pub(crate) async fn venv(
path: &Path,
base_python: Option<&Path>,
index_locations: &IndexLocations,
seed: bool,
cache: &Cache,
printer: Printer,
) -> Result<ExitStatus> {
match venv_impl(path, base_python, index_locations, seed, cache, printer).await {
Ok(status) => Ok(status),
Err(err) => {
#[allow(clippy::print_stderr)]
{
eprint!("{err:?}");
}
Ok(ExitStatus::Failure)
}
}
}
#[derive(Error, Debug, Diagnostic)]
enum VenvError {
#[error("Unable to find a Python interpreter")]
#[diagnostic(code(puffin::venv::python_not_found))]
PythonNotFound,
#[error("Unable to find a Python interpreter {0}")]
#[diagnostic(code(puffin::venv::python_not_found))]
UserPythonNotFound(PathBuf),
#[error("Failed to extract Python interpreter info")]
#[diagnostic(code(puffin::venv::interpreter))]
InterpreterError(#[source] puffin_interpreter::Error),
#[error("Failed to create virtual environment")]
#[diagnostic(code(puffin::venv::creation))]
CreationError(#[source] gourgeist::Error),
#[error("Failed to install seed packages")]
#[diagnostic(code(puffin::venv::seed))]
SeedError(#[source] anyhow::Error),
#[error("Failed to extract interpreter tags")]
#[diagnostic(code(puffin::venv::tags))]
TagsError(#[source] platform_host::PlatformError),
#[error("Failed to resolve `--find-links` entry")]
#[diagnostic(code(puffin::venv::flat_index))]
FlatIndexError(#[source] puffin_client::FlatIndexError),
}
/// Create a virtual environment.
async fn venv_impl(
path: &Path,
base_python: Option<&Path>,
index_locations: &IndexLocations,
seed: bool,
cache: &Cache,
mut printer: Printer,
) -> miette::Result<ExitStatus> {
// Locate the Python interpreter.
let base_python = if let Some(base_python) = base_python {
fs::canonicalize(
which::which_global(base_python)
.map_err(|_| VenvError::UserPythonNotFound(base_python.to_path_buf()))?,
)
.into_diagnostic()?
} else {
fs::canonicalize(
which::which_global("python3")
.or_else(|_| which::which_global("python"))
.map_err(|_| VenvError::PythonNotFound)?,
)
.into_diagnostic()?
};
let platform = Platform::current().into_diagnostic()?;
let interpreter =
Interpreter::query(&base_python, platform, cache).map_err(VenvError::InterpreterError)?;
writeln!(
printer,
"Using Python {} at {}",
interpreter.version(),
interpreter.sys_executable().display().cyan()
)
.into_diagnostic()?;
writeln!(
printer,
"Creating virtual environment at: {}",
path.display().cyan()
)
.into_diagnostic()?;
// Create the virtual environment.
let venv = gourgeist::create_venv(path, interpreter).map_err(VenvError::CreationError)?;
// Install seed packages.
if seed {
// Extract the interpreter.
let interpreter = venv.interpreter();
// Instantiate a client.
let client = RegistryClientBuilder::new(cache.clone()).build();
// Resolve the flat indexes from `--find-links`.
let flat_index = {
let tags = interpreter.tags().map_err(VenvError::TagsError)?;
let client = FlatIndexClient::new(&client, cache);
let entries = client
.fetch(index_locations.flat_indexes())
.await
.map_err(VenvError::FlatIndexError)?;
FlatIndex::from_entries(entries, tags)
};
// Create a shared in-memory index.
let index = InMemoryIndex::default();
// Track in-flight downloads, builds, etc., across resolutions.
let in_flight = InFlight::default();
// Prep the build context.
let build_dispatch = BuildDispatch::new(
&client,
cache,
interpreter,
index_locations,
&flat_index,
&index,
&in_flight,
venv.python_executable(),
SetupPyStrategy::default(),
true,
&NoBinary::None,
);
// Resolve the seed packages.
let resolution = build_dispatch
.resolve(&[
Requirement::from_str("wheel").unwrap(),
Requirement::from_str("pip").unwrap(),
Requirement::from_str("setuptools").unwrap(),
])
.await
.map_err(VenvError::SeedError)?;
// Install into the environment.
build_dispatch
.install(&resolution, &venv)
.await
.map_err(VenvError::SeedError)?;
for distribution in resolution.distributions() {
writeln!(
printer,
" {} {}{}",
"+".green(),
distribution.name().as_ref().white().bold(),
distribution.version_or_url().dimmed()
)
.into_diagnostic()?;
}
}
Ok(ExitStatus::Success)
}

View File

@ -1,100 +0,0 @@
use tracing::level_filters::LevelFilter;
#[cfg(feature = "tracing-durations-export")]
use tracing_durations_export::{
plot::PlotConfig, DurationsLayer, DurationsLayerBuilder, DurationsLayerDropGuard,
};
use tracing_subscriber::layer::SubscriberExt;
use tracing_subscriber::util::SubscriberInitExt;
use tracing_subscriber::{EnvFilter, Layer, Registry};
use tracing_tree::time::Uptime;
use tracing_tree::HierarchicalLayer;
#[derive(Debug, Default, Clone, Copy, PartialEq, Eq)]
pub(crate) enum Level {
/// Suppress all tracing output by default (overrideable by `RUST_LOG`).
#[default]
Default,
/// Show debug messages by default (overrideable by `RUST_LOG`).
Verbose,
}
/// Configure `tracing` based on the given [`Level`], taking into account the `RUST_LOG` environment
/// variable.
///
/// The [`Level`] is used to dictate the default filters (which can be overridden by the `RUST_LOG`
/// environment variable) along with the formatting of the output. For example, [`Level::Verbose`]
/// includes targets and timestamps, along with all `puffin=debug` messages by default.
pub(crate) fn setup_logging(level: Level, duration: impl Layer<Registry> + Send + Sync) {
match level {
Level::Default => {
// Show nothing, but allow `RUST_LOG` to override.
let filter = EnvFilter::builder()
.with_default_directive(LevelFilter::OFF.into())
.from_env_lossy();
// Regardless of the tracing level, show messages without any adornment.
tracing_subscriber::registry()
.with(duration)
.with(filter)
.with(
tracing_subscriber::fmt::layer()
.without_time()
.with_target(false)
.with_writer(std::io::sink),
)
.init();
}
Level::Verbose => {
// Show `DEBUG` messages from the CLI crate, but allow `RUST_LOG` to override.
let filter = EnvFilter::try_from_default_env()
.or_else(|_| EnvFilter::try_new("puffin=debug"))
.unwrap();
// Regardless of the tracing level, include the uptime and target for each message.
tracing_subscriber::registry()
.with(duration)
.with(filter)
.with(
HierarchicalLayer::default()
.with_targets(true)
.with_timer(Uptime::default())
.with_writer(std::io::stderr),
)
.init();
}
}
}
/// Setup the `TRACING_DURATIONS_FILE` environment variable to enable tracing durations.
#[cfg(feature = "tracing-durations-export")]
pub(crate) fn setup_duration() -> (
Option<DurationsLayer<Registry>>,
Option<DurationsLayerDropGuard>,
) {
if let Ok(location) = std::env::var("TRACING_DURATIONS_FILE") {
let location = std::path::PathBuf::from(location);
if let Some(parent) = location.parent() {
fs_err::create_dir_all(parent)
.expect("Failed to create parent of TRACING_DURATIONS_FILE");
}
let plot_config = PlotConfig {
multi_lane: true,
min_length: Some(std::time::Duration::from_secs_f32(0.002)),
remove: Some(
["get_cached_with_callback".to_string()]
.into_iter()
.collect(),
),
..PlotConfig::default()
};
let (layer, guard) = DurationsLayerBuilder::default()
.durations_file(&location)
.plot_file(location.with_extension("svg"))
.plot_config(plot_config)
.build()
.expect("Couldn't create TRACING_DURATIONS_FILE files");
(Some(layer), Some(guard))
} else {
(None, None)
}
}

View File

@ -1,799 +1,2 @@
use std::env;
use std::path::PathBuf;
use std::process::ExitCode;
use std::str::FromStr;
use anstream::eprintln;
use anyhow::Result;
use chrono::{DateTime, Days, NaiveDate, NaiveTime, Utc};
use clap::{Args, Parser, Subcommand};
use owo_colors::OwoColorize;
use distribution_types::{FlatIndexLocation, IndexLocations, IndexUrl};
use puffin_cache::{Cache, CacheArgs};
use puffin_installer::{NoBinary, Reinstall};
use puffin_interpreter::PythonVersion;
use puffin_normalize::{ExtraName, PackageName};
use puffin_resolver::{PreReleaseMode, ResolutionMode};
use puffin_traits::SetupPyStrategy;
use requirements::ExtrasSpecification;
use crate::commands::{extra_name_with_clap_error, ExitStatus, Upgrade};
use crate::requirements::RequirementsSource;
#[cfg(target_os = "windows")]
#[global_allocator]
static GLOBAL: mimalloc::MiMalloc = mimalloc::MiMalloc;
#[cfg(all(
not(target_os = "windows"),
not(target_os = "openbsd"),
any(
target_arch = "x86_64",
target_arch = "aarch64",
target_arch = "powerpc64"
)
))]
#[global_allocator]
static GLOBAL: tikv_jemallocator::Jemalloc = tikv_jemallocator::Jemalloc;
mod commands;
mod logging;
mod printer;
mod requirements;
#[derive(Parser)]
#[command(author, version, about)]
#[command(propagate_version = true)]
struct Cli {
#[command(subcommand)]
command: Commands,
/// Do not print any output.
#[arg(global = true, long, short, conflicts_with = "verbose")]
quiet: bool,
/// Use verbose output.
#[arg(global = true, long, short, conflicts_with = "quiet")]
verbose: bool,
#[command(flatten)]
cache_args: CacheArgs,
}
#[derive(Subcommand)]
#[allow(clippy::large_enum_variant)]
enum Commands {
/// Resolve and install Python packages.
Pip(PipArgs),
/// Create a virtual environment.
#[clap(alias = "virtualenv", alias = "v")]
Venv(VenvArgs),
/// Clear the cache.
Clean(CleanArgs),
/// Add a dependency to the workspace.
#[clap(hide = true)]
Add(AddArgs),
/// Remove a dependency from the workspace.
#[clap(hide = true)]
Remove(RemoveArgs),
}
#[derive(Args)]
struct PipArgs {
#[clap(subcommand)]
command: PipCommand,
}
#[derive(Subcommand)]
enum PipCommand {
/// Compile a `requirements.in` file to a `requirements.txt` file.
Compile(PipCompileArgs),
/// Sync dependencies from a `requirements.txt` file.
Sync(PipSyncArgs),
/// Install packages into the current environment.
Install(PipInstallArgs),
/// Uninstall packages from the current environment.
Uninstall(PipUninstallArgs),
/// Enumerate the installed packages in the current environment.
Freeze(PipFreezeArgs),
}
/// Clap parser for the union of date and datetime
fn date_or_datetime(input: &str) -> Result<DateTime<Utc>, String> {
let date_err = match NaiveDate::from_str(input) {
Ok(date) => {
// Midnight that day is 00:00:00 the next day
return Ok((date + Days::new(1)).and_time(NaiveTime::MIN).and_utc());
}
Err(err) => err,
};
let datetime_err = match DateTime::parse_from_rfc3339(input) {
Ok(datetime) => return Ok(datetime.with_timezone(&Utc)),
Err(err) => err,
};
Err(format!(
"Neither a valid date ({date_err}) not a valid datetime ({datetime_err})"
))
}
#[derive(Args)]
#[allow(clippy::struct_excessive_bools)]
struct PipCompileArgs {
/// Include all packages listed in the given `requirements.in` files.
#[clap(required(true))]
src_file: Vec<PathBuf>,
/// Constrain versions using the given requirements files.
///
/// Constraints files are `requirements.txt`-like files that only control the _version_ of a
/// requirement that's installed. However, including a package in a constraints file will _not_
/// trigger the installation of that package.
///
/// This is equivalent to pip's `--constraint` option.
#[clap(short, long)]
constraint: Vec<PathBuf>,
/// Override versions using the given requirements files.
///
/// Overrides files are `requirements.txt`-like files that force a specific version of a
/// requirement to be installed, regardless of the requirements declared by any constituent
/// package, and regardless of whether this would be considered an invalid resolution.
///
/// While constraints are _additive_, in that they're combined with the requirements of the
/// constituent packages, overrides are _absolute_, in that they completely replace the
/// requirements of the constituent packages.
#[clap(long)]
r#override: Vec<PathBuf>,
/// Include optional dependencies in the given extra group name; may be provided more than once.
#[clap(long, conflicts_with = "all_extras", value_parser = extra_name_with_clap_error)]
extra: Vec<ExtraName>,
/// Include all optional dependencies.
#[clap(long, conflicts_with = "extra")]
all_extras: bool,
#[clap(long, value_enum, default_value_t = ResolutionMode::default())]
resolution: ResolutionMode,
#[clap(long, value_enum, default_value_t = PreReleaseMode::default())]
prerelease: PreReleaseMode,
/// Write the compiled requirements to the given `requirements.txt` file.
#[clap(short, long)]
output_file: Option<PathBuf>,
/// The URL of the Python Package Index.
#[clap(long, short, default_value = IndexUrl::Pypi.as_str(), env = "PUFFIN_INDEX_URL")]
index_url: IndexUrl,
/// Extra URLs of package indexes to use, in addition to `--index-url`.
#[clap(long)]
extra_index_url: Vec<IndexUrl>,
/// Locations to search for candidate distributions, beyond those found in the indexes.
///
/// If a path, the target must be a directory that contains package as wheel files (`.whl`) or
/// source distributions (`.tar.gz` or `.zip`) at the top level.
///
/// If a URL, the page must contain a flat list of links to package files.
#[clap(long)]
find_links: Vec<FlatIndexLocation>,
/// Ignore the package index, instead relying on local archives and caches.
#[clap(long, conflicts_with = "index_url", conflicts_with = "extra_index_url")]
no_index: bool,
/// Allow package upgrades, ignoring pinned versions in the existing output file.
#[clap(long)]
upgrade: bool,
/// Allow upgrades for a specific package, ignoring pinned versions in the existing output
/// file.
#[clap(long)]
upgrade_package: Vec<PackageName>,
/// Include distribution hashes in the output file.
#[clap(long)]
generate_hashes: bool,
/// Use legacy `setuptools` behavior when building source distributions without a
/// `pyproject.toml`.
#[clap(long)]
legacy_setup_py: bool,
/// Don't build source distributions.
///
/// When enabled, resolving will not run arbitrary code. The cached wheels of already-built
/// source distributions will be reused, but operations that require building distributions will
/// exit with an error.
#[clap(long)]
no_build: bool,
/// The minimum Python version that should be supported by the compiled requirements (e.g.,
/// `3.7` or `3.7.9`).
///
/// If a patch version is omitted, the most recent known patch version for that minor version
/// is assumed. For example, `3.7` is mapped to `3.7.17`.
#[arg(long, short)]
python_version: Option<PythonVersion>,
/// Try to resolve at a past time.
///
/// This works by filtering out files with a more recent upload time, so if the index you use
/// does not provide upload times, the results might be inaccurate. pypi provides upload times
/// for all files.
///
/// Timestamps are given either as RFC 3339 timestamps such as `2006-12-02T02:07:43Z` or as
/// UTC dates in the same format such as `2006-12-02`. Dates are interpreted as including this
/// day, i.e. until midnight UTC that day.
#[arg(long, value_parser = date_or_datetime)]
exclude_newer: Option<DateTime<Utc>>,
}
#[derive(Args)]
#[allow(clippy::struct_excessive_bools)]
struct PipSyncArgs {
/// Include all packages listed in the given `requirements.txt` files.
#[clap(required(true))]
src_file: Vec<PathBuf>,
/// Reinstall all packages, overwriting any entries in the cache and replacing any existing
/// packages in the environment.
#[clap(long)]
reinstall: bool,
/// Reinstall a specific package, overwriting any entries in the cache and replacing any
/// existing versions in the environment.
#[clap(long)]
reinstall_package: Vec<PackageName>,
/// The method to use when installing packages from the global cache.
#[clap(long, value_enum, default_value_t = install_wheel_rs::linker::LinkMode::default())]
link_mode: install_wheel_rs::linker::LinkMode,
/// The URL of the Python Package Index.
#[clap(long, short, default_value = IndexUrl::Pypi.as_str(), env = "PUFFIN_INDEX_URL")]
index_url: IndexUrl,
/// Extra URLs of package indexes to use, in addition to `--index-url`.
#[clap(long)]
extra_index_url: Vec<IndexUrl>,
/// Locations to search for candidate distributions, beyond those found in the indexes.
///
/// If a path, the target must be a directory that contains package as wheel files (`.whl`) or
/// source distributions (`.tar.gz` or `.zip`) at the top level.
///
/// If a URL, the page must contain a flat list of links to package files.
#[clap(long)]
find_links: Vec<FlatIndexLocation>,
/// Ignore the registry index (e.g., PyPI), instead relying on local caches and `--find-links`
/// directories and URLs.
#[clap(long, conflicts_with = "index_url", conflicts_with = "extra_index_url")]
no_index: bool,
/// Use legacy `setuptools` behavior when building source distributions without a
/// `pyproject.toml`.
#[clap(long)]
legacy_setup_py: bool,
/// Don't build source distributions.
///
/// When enabled, resolving will not run arbitrary code. The cached wheels of already-built
/// source distributions will be reused, but operations that require building distributions will
/// exit with an error.
#[clap(long)]
no_build: bool,
/// Don't install pre-built wheels.
///
/// When enabled, all installed packages will be installed from a source distribution. The resolver
/// will still use pre-built wheels for metadata.
#[clap(long)]
no_binary: bool,
/// Don't install pre-built wheels for a specific package.
///
/// When enabled, the specified packages will be installed from a source distribution. The resolver
/// will still use pre-built wheels for metadata.
#[clap(long)]
no_binary_package: Vec<PackageName>,
/// Validate the virtual environment after completing the installation, to detect packages with
/// missing dependencies or other issues.
#[clap(long)]
strict: bool,
}
#[derive(Args)]
#[allow(clippy::struct_excessive_bools)]
#[command(group = clap::ArgGroup::new("sources").required(true).multiple(true))]
struct PipInstallArgs {
/// Install all listed packages.
#[clap(group = "sources")]
package: Vec<String>,
/// Install all packages listed in the given requirements files.
#[clap(short, long, group = "sources")]
requirement: Vec<PathBuf>,
/// Install the editable package based on the provided local file path.
#[clap(short, long, group = "sources")]
editable: Vec<String>,
/// Constrain versions using the given requirements files.
///
/// Constraints files are `requirements.txt`-like files that only control the _version_ of a
/// requirement that's installed. However, including a package in a constraints file will _not_
/// trigger the installation of that package.
///
/// This is equivalent to pip's `--constraint` option.
#[clap(short, long)]
constraint: Vec<PathBuf>,
/// Override versions using the given requirements files.
///
/// Overrides files are `requirements.txt`-like files that force a specific version of a
/// requirement to be installed, regardless of the requirements declared by any constituent
/// package, and regardless of whether this would be considered an invalid resolution.
///
/// While constraints are _additive_, in that they're combined with the requirements of the
/// constituent packages, overrides are _absolute_, in that they completely replace the
/// requirements of the constituent packages.
#[clap(long)]
r#override: Vec<PathBuf>,
/// Include optional dependencies in the given extra group name; may be provided more than once.
#[clap(long, conflicts_with = "all_extras", value_parser = extra_name_with_clap_error)]
extra: Vec<ExtraName>,
/// Include all optional dependencies.
#[clap(long, conflicts_with = "extra")]
all_extras: bool,
/// Reinstall all packages, overwriting any entries in the cache and replacing any existing
/// packages in the environment.
#[clap(long)]
reinstall: bool,
/// Reinstall a specific package, overwriting any entries in the cache and replacing any
/// existing versions in the environment.
#[clap(long)]
reinstall_package: Vec<PackageName>,
/// The method to use when installing packages from the global cache.
#[clap(long, value_enum, default_value_t = install_wheel_rs::linker::LinkMode::default())]
link_mode: install_wheel_rs::linker::LinkMode,
#[clap(long, value_enum, default_value_t = ResolutionMode::default())]
resolution: ResolutionMode,
#[clap(long, value_enum, default_value_t = PreReleaseMode::default())]
prerelease: PreReleaseMode,
/// Write the compiled requirements to the given `requirements.txt` file.
#[clap(short, long)]
output_file: Option<PathBuf>,
/// The URL of the Python Package Index.
#[clap(long, short, default_value = IndexUrl::Pypi.as_str(), env = "PUFFIN_INDEX_URL")]
index_url: IndexUrl,
/// Extra URLs of package indexes to use, in addition to `--index-url`.
#[clap(long)]
extra_index_url: Vec<IndexUrl>,
/// Locations to search for candidate distributions, beyond those found in the indexes.
///
/// If a path, the target must be a directory that contains package as wheel files (`.whl`) or
/// source distributions (`.tar.gz` or `.zip`) at the top level.
///
/// If a URL, the page must contain a flat list of links to package files.
#[clap(long)]
find_links: Vec<FlatIndexLocation>,
/// Ignore the package index, instead relying on local archives and caches.
#[clap(long, conflicts_with = "index_url", conflicts_with = "extra_index_url")]
no_index: bool,
/// Use legacy `setuptools` behavior when building source distributions without a
/// `pyproject.toml`.
#[clap(long)]
legacy_setup_py: bool,
/// Don't build source distributions.
///
/// When enabled, resolving will not run arbitrary code. The cached wheels of already-built
/// source distributions will be reused, but operations that require building distributions will
/// exit with an error.
#[clap(long)]
no_build: bool,
/// Don't install pre-built wheels.
///
/// When enabled, all installed packages will be installed from a source distribution. The resolver
/// will still use pre-built wheels for metadata.
#[clap(long)]
no_binary: bool,
/// Don't install pre-built wheels for a specific package.
///
/// When enabled, the specified packages will be installed from a source distribution. The resolver
/// will still use pre-built wheels for metadata.
#[clap(long)]
no_binary_package: Vec<PackageName>,
/// Validate the virtual environment after completing the installation, to detect packages with
/// missing dependencies or other issues.
#[clap(long)]
strict: bool,
/// Try to resolve at a past time.
///
/// This works by filtering out files with a more recent upload time, so if the index you use
/// does not provide upload times, the results might be inaccurate. pypi provides upload times
/// for all files.
///
/// Timestamps are given either as RFC 3339 timestamps such as `2006-12-02T02:07:43Z` or as
/// UTC dates in the same format such as `2006-12-02`. Dates are interpreted as including this
/// day, i.e. until midnight UTC that day.
#[arg(long, value_parser = date_or_datetime)]
exclude_newer: Option<DateTime<Utc>>,
}
#[derive(Args)]
#[allow(clippy::struct_excessive_bools)]
#[command(group = clap::ArgGroup::new("sources").required(true).multiple(true))]
struct PipUninstallArgs {
/// Uninstall all listed packages.
#[clap(group = "sources")]
package: Vec<String>,
/// Uninstall all packages listed in the given requirements files.
#[clap(short, long, group = "sources")]
requirement: Vec<PathBuf>,
/// Uninstall the editable package based on the provided local file path.
#[clap(short, long, group = "sources")]
editable: Vec<String>,
}
#[derive(Args)]
#[allow(clippy::struct_excessive_bools)]
struct PipFreezeArgs {
/// Validate the virtual environment, to detect packages with missing dependencies or other
/// issues.
#[clap(long)]
strict: bool,
}
#[derive(Args)]
#[allow(clippy::struct_excessive_bools)]
struct CleanArgs {
/// The packages to remove from the cache.
package: Vec<PackageName>,
}
#[derive(Args)]
#[allow(clippy::struct_excessive_bools)]
struct VenvArgs {
/// The Python interpreter to use for the virtual environment.
// Short `-p` to match `virtualenv`
// TODO(konstin): Support e.g. `-p 3.10`
#[clap(short, long)]
python: Option<PathBuf>,
/// Install seed packages (`pip`, `setuptools`, and `wheel`) into the virtual environment.
#[clap(long)]
seed: bool,
/// The path to the virtual environment to create.
#[clap(default_value = ".venv")]
name: PathBuf,
/// The URL of the Python Package Index.
#[clap(long, short, default_value = IndexUrl::Pypi.as_str(), env = "PUFFIN_INDEX_URL")]
index_url: IndexUrl,
/// Extra URLs of package indexes to use, in addition to `--index-url`.
#[clap(long)]
extra_index_url: Vec<IndexUrl>,
/// Ignore the package index, instead relying on local archives and caches.
#[clap(long, conflicts_with = "index_url", conflicts_with = "extra_index_url")]
no_index: bool,
}
#[derive(Args)]
#[allow(clippy::struct_excessive_bools)]
struct AddArgs {
/// The name of the package to add (e.g., `Django==4.2.6`).
name: String,
}
#[derive(Args)]
#[allow(clippy::struct_excessive_bools)]
struct RemoveArgs {
/// The name of the package to remove (e.g., `Django`).
name: PackageName,
}
async fn inner() -> Result<ExitStatus> {
let cli = Cli::parse();
// Configure the `tracing` crate, which controls internal logging.
#[cfg(feature = "tracing-durations-export")]
let (duration_layer, _duration_guard) = logging::setup_duration();
#[cfg(not(feature = "tracing-durations-export"))]
let duration_layer = None::<tracing_subscriber::layer::Identity>;
logging::setup_logging(
if cli.verbose {
logging::Level::Verbose
} else {
logging::Level::Default
},
duration_layer,
);
// Configure the `Printer`, which controls user-facing output in the CLI.
let printer = if cli.quiet {
printer::Printer::Quiet
} else if cli.verbose {
printer::Printer::Verbose
} else {
printer::Printer::Default
};
// Configure the `warn!` macros, which control user-facing warnings in the CLI.
if !cli.quiet {
puffin_warnings::enable();
}
miette::set_hook(Box::new(|_| {
Box::new(
miette::MietteHandlerOpts::new()
.break_words(false)
.word_separator(textwrap::WordSeparator::AsciiSpace)
.word_splitter(textwrap::WordSplitter::NoHyphenation)
.wrap_lines(env::var("PUFFIN_NO_WRAP").map(|_| false).unwrap_or(true))
.build(),
)
}))?;
let cache = Cache::try_from(cli.cache_args)?;
match cli.command {
Commands::Pip(PipArgs {
command: PipCommand::Compile(args),
}) => {
let requirements = args
.src_file
.into_iter()
.map(RequirementsSource::from)
.collect::<Vec<_>>();
let constraints = args
.constraint
.into_iter()
.map(RequirementsSource::from)
.collect::<Vec<_>>();
let overrides = args
.r#override
.into_iter()
.map(RequirementsSource::from)
.collect::<Vec<_>>();
let index_urls = IndexLocations::from_args(
args.index_url,
args.extra_index_url,
args.find_links,
args.no_index,
);
let extras = if args.all_extras {
ExtrasSpecification::All
} else if args.extra.is_empty() {
ExtrasSpecification::None
} else {
ExtrasSpecification::Some(&args.extra)
};
let upgrade = Upgrade::from_args(args.upgrade, args.upgrade_package);
commands::pip_compile(
&requirements,
&constraints,
&overrides,
extras,
args.output_file.as_deref(),
args.resolution,
args.prerelease,
upgrade,
args.generate_hashes,
index_urls,
if args.legacy_setup_py {
SetupPyStrategy::Setuptools
} else {
SetupPyStrategy::Pep517
},
args.no_build,
args.python_version,
args.exclude_newer,
cache,
printer,
)
.await
}
Commands::Pip(PipArgs {
command: PipCommand::Sync(args),
}) => {
let index_urls = IndexLocations::from_args(
args.index_url,
args.extra_index_url,
args.find_links,
args.no_index,
);
let sources = args
.src_file
.into_iter()
.map(RequirementsSource::from)
.collect::<Vec<_>>();
let reinstall = Reinstall::from_args(args.reinstall, args.reinstall_package);
let no_binary = NoBinary::from_args(args.no_binary, args.no_binary_package);
commands::pip_sync(
&sources,
&reinstall,
args.link_mode,
index_urls,
if args.legacy_setup_py {
SetupPyStrategy::Setuptools
} else {
SetupPyStrategy::Pep517
},
args.no_build,
&no_binary,
args.strict,
cache,
printer,
)
.await
}
Commands::Pip(PipArgs {
command: PipCommand::Install(args),
}) => {
let requirements = args
.package
.into_iter()
.map(RequirementsSource::Package)
.chain(args.editable.into_iter().map(RequirementsSource::Editable))
.chain(args.requirement.into_iter().map(RequirementsSource::from))
.collect::<Vec<_>>();
let constraints = args
.constraint
.into_iter()
.map(RequirementsSource::from)
.collect::<Vec<_>>();
let overrides = args
.r#override
.into_iter()
.map(RequirementsSource::from)
.collect::<Vec<_>>();
let index_urls = IndexLocations::from_args(
args.index_url,
args.extra_index_url,
args.find_links,
args.no_index,
);
let extras = if args.all_extras {
ExtrasSpecification::All
} else if args.extra.is_empty() {
ExtrasSpecification::None
} else {
ExtrasSpecification::Some(&args.extra)
};
let reinstall = Reinstall::from_args(args.reinstall, args.reinstall_package);
let no_binary = NoBinary::from_args(args.no_binary, args.no_binary_package);
commands::pip_install(
&requirements,
&constraints,
&overrides,
&extras,
args.resolution,
args.prerelease,
index_urls,
&reinstall,
args.link_mode,
if args.legacy_setup_py {
SetupPyStrategy::Setuptools
} else {
SetupPyStrategy::Pep517
},
args.no_build,
&no_binary,
args.strict,
args.exclude_newer,
cache,
printer,
)
.await
}
Commands::Pip(PipArgs {
command: PipCommand::Uninstall(args),
}) => {
let sources = args
.package
.into_iter()
.map(RequirementsSource::Package)
.chain(args.editable.into_iter().map(RequirementsSource::Editable))
.chain(args.requirement.into_iter().map(RequirementsSource::from))
.collect::<Vec<_>>();
commands::pip_uninstall(&sources, cache, printer).await
}
Commands::Pip(PipArgs {
command: PipCommand::Freeze(args),
}) => commands::freeze(&cache, args.strict, printer),
Commands::Clean(args) => commands::clean(&cache, &args.package, printer),
Commands::Venv(args) => {
let index_locations = IndexLocations::from_args(
args.index_url,
args.extra_index_url,
// No find links for the venv subcommand, to keep things simple
Vec::new(),
args.no_index,
);
commands::venv(
&args.name,
args.python.as_deref(),
&index_locations,
args.seed,
&cache,
printer,
)
.await
}
Commands::Add(args) => commands::add(&args.name, printer),
Commands::Remove(args) => commands::remove(&args.name, printer),
}
}
fn main() -> ExitCode {
let result = if let Ok(stack_size) = env::var("PUFFIN_STACK_SIZE") {
// Artificially limit the stack size to test for stack overflows. Windows has a default stack size of 1MB,
// which is lower than the linux and mac default.
// https://learn.microsoft.com/en-us/cpp/build/reference/stack-stack-allocations?view=msvc-170
let stack_size = stack_size.parse().expect("Invalid stack size");
let tokio_main = move || {
tokio::runtime::Builder::new_multi_thread()
.enable_all()
.thread_stack_size(stack_size)
.build()
.expect("Failed building the Runtime")
.block_on(inner())
};
std::thread::Builder::new()
.stack_size(stack_size)
.spawn(tokio_main)
.expect("Tokio executor failed, was there a panic?")
.join()
.expect("Tokio executor failed, was there a panic?")
} else {
tokio::runtime::Builder::new_multi_thread()
.enable_all()
.build()
.expect("Failed building the Runtime")
.block_on(inner())
};
match result {
Ok(code) => code.into(),
Err(err) => {
#[allow(clippy::print_stderr)]
{
let mut causes = err.chain();
eprintln!("{}: {}", "error".red().bold(), causes.next().unwrap());
for err in causes {
eprintln!(" {}: {}", "Caused by".red().bold(), err);
}
}
ExitStatus::Error.into()
}
}
fn main() {
}

View File

@ -1,41 +0,0 @@
use anstream::eprint;
use indicatif::ProgressDrawTarget;
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub(crate) enum Printer {
/// A printer that prints to standard streams (e.g., stdout).
Default,
/// A printer that suppresses all output.
Quiet,
/// A printer that prints all output, including debug messages.
Verbose,
}
impl Printer {
/// Return the [`ProgressDrawTarget`] for this printer.
pub(crate) fn target(self) -> ProgressDrawTarget {
match self {
Self::Default => ProgressDrawTarget::stderr(),
Self::Quiet => ProgressDrawTarget::hidden(),
// Confusingly, hide the progress bar when in verbose mode.
// Otherwise, it gets interleaved with debug messages.
Self::Verbose => ProgressDrawTarget::hidden(),
}
}
}
impl std::fmt::Write for Printer {
fn write_str(&mut self, s: &str) -> std::fmt::Result {
match self {
Self::Default | Self::Verbose => {
#[allow(clippy::print_stderr, clippy::ignored_unit_patterns)]
{
eprint!("{s}");
}
}
Self::Quiet => {}
}
Ok(())
}
}

View File

@ -1,210 +0,0 @@
//! A standard interface for working with heterogeneous sources of requirements.
use std::path::PathBuf;
use std::str::FromStr;
use anyhow::{Context, Result};
use fs_err as fs;
use rustc_hash::FxHashSet;
use pep508_rs::Requirement;
use puffin_normalize::{ExtraName, PackageName};
use requirements_txt::{EditableRequirement, RequirementsTxt};
#[derive(Debug)]
pub(crate) enum RequirementsSource {
/// A package was provided on the command line (e.g., `pip install flask`).
Package(String),
/// An editable path was provided on the command line (e.g., `pip install -e ../flask`).
Editable(String),
/// Dependencies were provided via a `requirements.txt` file (e.g., `pip install -r requirements.txt`).
RequirementsTxt(PathBuf),
/// Dependencies were provided via a `pyproject.toml` file (e.g., `pip-compile pyproject.toml`).
PyprojectToml(PathBuf),
}
impl From<PathBuf> for RequirementsSource {
fn from(path: PathBuf) -> Self {
if path.ends_with("pyproject.toml") {
Self::PyprojectToml(path)
} else {
Self::RequirementsTxt(path)
}
}
}
#[derive(Debug, Default, Clone)]
pub(crate) enum ExtrasSpecification<'a> {
#[default]
None,
All,
Some(&'a [ExtraName]),
}
impl ExtrasSpecification<'_> {
/// Returns true if a name is included in the extra specification.
fn contains(&self, name: &ExtraName) -> bool {
match self {
ExtrasSpecification::All => true,
ExtrasSpecification::None => false,
ExtrasSpecification::Some(extras) => extras.contains(name),
}
}
}
#[derive(Debug, Default)]
pub(crate) struct RequirementsSpecification {
/// The name of the project specifying requirements.
pub(crate) project: Option<PackageName>,
/// The requirements for the project.
pub(crate) requirements: Vec<Requirement>,
/// The constraints for the project.
pub(crate) constraints: Vec<Requirement>,
/// The overrides for the project.
pub(crate) overrides: Vec<Requirement>,
/// Package to install as editable installs
pub(crate) editables: Vec<EditableRequirement>,
/// The extras used to collect requirements.
pub(crate) extras: FxHashSet<ExtraName>,
}
impl RequirementsSpecification {
/// Read the requirements and constraints from a source.
pub(crate) fn from_source(
source: &RequirementsSource,
extras: &ExtrasSpecification,
) -> Result<Self> {
Ok(match source {
RequirementsSource::Package(name) => {
let requirement = Requirement::from_str(name)
.with_context(|| format!("Failed to parse `{name}`"))?;
Self {
project: None,
requirements: vec![requirement],
constraints: vec![],
overrides: vec![],
editables: vec![],
extras: FxHashSet::default(),
}
}
RequirementsSource::Editable(name) => {
let requirement = EditableRequirement::from_str(name)
.with_context(|| format!("Failed to parse `{name}`"))?;
Self {
project: None,
requirements: vec![],
constraints: vec![],
overrides: vec![],
editables: vec![requirement],
extras: FxHashSet::default(),
}
}
RequirementsSource::RequirementsTxt(path) => {
let requirements_txt = RequirementsTxt::parse(path, std::env::current_dir()?)?;
Self {
project: None,
requirements: requirements_txt
.requirements
.into_iter()
.map(|entry| entry.requirement)
.collect(),
constraints: requirements_txt.constraints,
editables: requirements_txt.editables,
overrides: vec![],
extras: FxHashSet::default(),
}
}
RequirementsSource::PyprojectToml(path) => {
let contents = fs::read_to_string(path)?;
let pyproject_toml = toml::from_str::<pyproject_toml::PyProjectToml>(&contents)
.with_context(|| format!("Failed to parse `{}`", path.display()))?;
let mut used_extras = FxHashSet::default();
let mut requirements = Vec::new();
let mut project_name = None;
if let Some(project) = pyproject_toml.project {
requirements.extend(project.dependencies.unwrap_or_default());
// Include any optional dependencies specified in `extras`
if !matches!(extras, ExtrasSpecification::None) {
for (name, optional_requirements) in
project.optional_dependencies.unwrap_or_default()
{
// TODO(konstin): It's not ideal that pyproject-toml doesn't use
// `ExtraName`
let normalized_name = ExtraName::new(name)?;
if extras.contains(&normalized_name) {
used_extras.insert(normalized_name);
requirements.extend(optional_requirements);
}
}
}
// Parse the project name
project_name = Some(PackageName::new(project.name).with_context(|| {
format!("Invalid `project.name` in {}", path.display())
})?);
}
Self {
project: project_name,
requirements,
constraints: vec![],
overrides: vec![],
editables: vec![],
extras: used_extras,
}
}
})
}
/// Read the combined requirements and constraints from a set of sources.
pub(crate) fn from_sources(
requirements: &[RequirementsSource],
constraints: &[RequirementsSource],
overrides: &[RequirementsSource],
extras: &ExtrasSpecification,
) -> Result<Self> {
let mut spec = Self::default();
// Read all requirements, and keep track of all requirements _and_ constraints.
// A `requirements.txt` can contain a `-c constraints.txt` directive within it, so reading
// a requirements file can also add constraints.
for source in requirements {
let source = Self::from_source(source, extras)?;
spec.requirements.extend(source.requirements);
spec.constraints.extend(source.constraints);
spec.overrides.extend(source.overrides);
spec.extras.extend(source.extras);
spec.editables.extend(source.editables);
// Use the first project name discovered
if spec.project.is_none() {
spec.project = source.project;
}
}
// Read all constraints, treating _everything_ as a constraint.
for source in constraints {
let source = Self::from_source(source, extras)?;
spec.constraints.extend(source.requirements);
spec.constraints.extend(source.constraints);
spec.constraints.extend(source.overrides);
}
// Read all overrides, treating both requirements _and_ constraints as overrides.
for source in overrides {
let source = Self::from_source(source, extras)?;
spec.overrides.extend(source.requirements);
spec.overrides.extend(source.constraints);
spec.overrides.extend(source.overrides);
}
Ok(spec)
}
/// Read the requirements from a set of sources.
pub(crate) fn requirements_and_editables(
requirements: &[RequirementsSource],
) -> Result<(Vec<Requirement>, Vec<EditableRequirement>)> {
let specification = Self::from_sources(requirements, &[], &[], &ExtrasSpecification::None)?;
Ok((specification.requirements, specification.editables))
}
}

View File

@ -1,169 +0,0 @@
use std::process::Command;
use anyhow::Result;
use assert_fs::prelude::*;
use insta_cmd::{assert_cmd_snapshot, get_cargo_bin};
use common::BIN_NAME;
mod common;
#[test]
fn missing_pyproject_toml() -> Result<()> {
let temp_dir = assert_fs::TempDir::new()?;
let pyproject_toml = temp_dir.child("pyproject.toml");
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
.arg("add")
.arg("flask")
.current_dir(&temp_dir), @r###"
success: false
exit_code: 1
----- stdout -----
----- stderr -----
puffin::add::workspace_not_found
× Could not find a `pyproject.toml` file in the current directory or any of
its parents
"###);
pyproject_toml.assert(predicates::path::missing());
Ok(())
}
#[test]
fn missing_project_table() -> Result<()> {
let temp_dir = assert_fs::TempDir::new()?;
let pyproject_toml = temp_dir.child("pyproject.toml");
pyproject_toml.touch()?;
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
.arg("add")
.arg("flask")
.current_dir(&temp_dir), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
"###);
pyproject_toml.assert(
r#"[project]
dependencies = [
"flask",
]
"#,
);
Ok(())
}
#[test]
fn missing_dependencies_array() -> Result<()> {
let temp_dir = assert_fs::TempDir::new()?;
let pyproject_toml = temp_dir.child("pyproject.toml");
pyproject_toml.touch()?;
pyproject_toml.write_str(
r#"[project]
name = "project"
"#,
)?;
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
.arg("add")
.arg("flask")
.current_dir(&temp_dir), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
"###);
pyproject_toml.assert(
r#"[project]
name = "project"
dependencies = [
"flask",
]
"#,
);
Ok(())
}
#[test]
fn replace_dependency() -> Result<()> {
let temp_dir = assert_fs::TempDir::new()?;
let pyproject_toml = temp_dir.child("pyproject.toml");
pyproject_toml.touch()?;
pyproject_toml.write_str(
r#"[project]
name = "project"
dependencies = [
"flask==1.0.0",
]
"#,
)?;
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
.arg("add")
.arg("flask==2.0.0")
.current_dir(&temp_dir), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
"###);
pyproject_toml.assert(
r#"[project]
name = "project"
dependencies = [
"flask==2.0.0",
]
"#,
);
Ok(())
}
#[test]
fn reformat_array() -> Result<()> {
let temp_dir = assert_fs::TempDir::new()?;
let pyproject_toml = temp_dir.child("pyproject.toml");
pyproject_toml.touch()?;
pyproject_toml.write_str(
r#"[project]
name = "project"
dependencies = ["flask==1.0.0"]
"#,
)?;
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
.arg("add")
.arg("requests")
.current_dir(&temp_dir), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
"###);
pyproject_toml.assert(
r#"[project]
name = "project"
dependencies = [
"flask==1.0.0",
"requests",
]
"#,
);
Ok(())
}

View File

@ -1,39 +0,0 @@
#![allow(dead_code)]
use assert_cmd::Command;
use assert_fs::assert::PathAssert;
use assert_fs::fixture::PathChild;
use assert_fs::TempDir;
use insta_cmd::get_cargo_bin;
use std::path::PathBuf;
pub(crate) const BIN_NAME: &str = "puffin";
pub(crate) const INSTA_FILTERS: &[(&str, &str)] = &[
(r"--cache-dir .*", "--cache-dir [CACHE_DIR]"),
(r"(\d+\.)?\d+(ms|s)", "[TIME]"),
(r"v\d+\.\d+\.\d+", "v[VERSION]"),
];
/// Create a virtual environment named `.venv` in a temporary directory.
pub(crate) fn create_venv_py312(temp_dir: &TempDir, cache_dir: &TempDir) -> PathBuf {
create_venv(temp_dir, cache_dir, "python3.12")
}
/// Create a virtual environment named `.venv` in a temporary directory with the given
/// Python version. Expected format for `python` is "python<version>".
pub(crate) fn create_venv(temp_dir: &TempDir, cache_dir: &TempDir, python: &str) -> PathBuf {
let venv = temp_dir.child(".venv");
Command::new(get_cargo_bin(BIN_NAME))
.arg("venv")
.arg(venv.as_os_str())
.arg("--cache-dir")
.arg(cache_dir.path())
.arg("--python")
.arg(python)
.current_dir(temp_dir)
.assert()
.success();
venv.assert(predicates::path::is_dir());
venv.to_path_buf()
}

File diff suppressed because it is too large Load Diff

View File

@ -1,942 +0,0 @@
#![cfg(all(feature = "python", feature = "pypi"))]
use std::iter;
use std::path::Path;
use std::process::Command;
use anyhow::Result;
use assert_cmd::assert::Assert;
use assert_cmd::prelude::*;
use assert_fs::prelude::*;
use indoc::indoc;
use insta_cmd::_macro_support::insta;
use insta_cmd::{assert_cmd_snapshot, get_cargo_bin};
use common::{create_venv_py312, BIN_NAME, INSTA_FILTERS};
mod common;
// Exclude any packages uploaded after this date.
static EXCLUDE_NEWER: &str = "2023-11-18T12:00:00Z";
fn assert_command(venv: &Path, command: &str, temp_dir: &Path) -> Assert {
Command::new(venv.join("bin").join("python"))
.arg("-c")
.arg(command)
.current_dir(temp_dir)
.assert()
}
#[test]
fn missing_requirements_txt() -> Result<()> {
let temp_dir = assert_fs::TempDir::new()?;
let cache_dir = assert_fs::TempDir::new()?;
let requirements_txt = temp_dir.child("requirements.txt");
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
.arg("pip")
.arg("install")
.arg("-r")
.arg("requirements.txt")
.arg("--strict")
.arg("--cache-dir")
.arg(cache_dir.path())
.current_dir(&temp_dir), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: failed to open file `requirements.txt`
Caused by: No such file or directory (os error 2)
"###);
requirements_txt.assert(predicates::path::missing());
Ok(())
}
#[test]
fn no_solution() -> Result<()> {
let temp_dir = assert_fs::TempDir::new()?;
let cache_dir = assert_fs::TempDir::new()?;
let venv = create_venv_py312(&temp_dir, &cache_dir);
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
.arg("pip")
.arg("install")
.arg("flask>=3.0.0")
.arg("WerkZeug<1.0.0")
.arg("--strict")
.arg("--cache-dir")
.arg(cache_dir.path())
.arg("--exclude-newer")
.arg(EXCLUDE_NEWER)
.env("VIRTUAL_ENV", venv.as_os_str())
.current_dir(&temp_dir), @r###"
success: false
exit_code: 1
----- stdout -----
----- stderr -----
× No solution found when resolving dependencies:
Because only flask<=3.0.0 is available and flask==3.0.0 depends
on werkzeug>=3.0.0, we can conclude that flask>=3.0.0 depends on
werkzeug>=3.0.0.
And because you require flask>=3.0.0 and you require werkzeug<1.0.0, we
can conclude that the requirements are unsatisfiable.
"###);
Ok(())
}
/// Install a package from the command line into a virtual environment.
#[test]
fn install_package() -> Result<()> {
let temp_dir = assert_fs::TempDir::new()?;
let cache_dir = assert_fs::TempDir::new()?;
let venv = create_venv_py312(&temp_dir, &cache_dir);
// Install Flask.
insta::with_settings!({
filters => INSTA_FILTERS.to_vec()
}, {
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
.arg("pip")
.arg("install")
.arg("Flask")
.arg("--strict")
.arg("--cache-dir")
.arg(cache_dir.path())
.arg("--exclude-newer")
.arg(EXCLUDE_NEWER)
.env("VIRTUAL_ENV", venv.as_os_str())
.current_dir(&temp_dir), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 7 packages in [TIME]
Downloaded 7 packages in [TIME]
Installed 7 packages in [TIME]
+ blinker==1.7.0
+ click==8.1.7
+ flask==3.0.0
+ itsdangerous==2.1.2
+ jinja2==3.1.2
+ markupsafe==2.1.3
+ werkzeug==3.0.1
"###);
});
assert_command(&venv, "import flask", &temp_dir).success();
Ok(())
}
/// Install a package from a `requirements.txt` into a virtual environment.
#[test]
fn install_requirements_txt() -> Result<()> {
let temp_dir = assert_fs::TempDir::new()?;
let cache_dir = assert_fs::TempDir::new()?;
let venv = create_venv_py312(&temp_dir, &cache_dir);
// Install Flask.
let requirements_txt = temp_dir.child("requirements.txt");
requirements_txt.write_str("Flask")?;
insta::with_settings!({
filters => INSTA_FILTERS.to_vec()
}, {
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
.arg("pip")
.arg("install")
.arg("-r")
.arg("requirements.txt")
.arg("--strict")
.arg("--cache-dir")
.arg(cache_dir.path())
.arg("--exclude-newer")
.arg(EXCLUDE_NEWER)
.env("VIRTUAL_ENV", venv.as_os_str())
.current_dir(&temp_dir), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 7 packages in [TIME]
Downloaded 7 packages in [TIME]
Installed 7 packages in [TIME]
+ blinker==1.7.0
+ click==8.1.7
+ flask==3.0.0
+ itsdangerous==2.1.2
+ jinja2==3.1.2
+ markupsafe==2.1.3
+ werkzeug==3.0.1
"###);
});
assert_command(&venv, "import flask", &temp_dir).success();
// Install Jinja2 (which should already be installed, but shouldn't remove other packages).
let requirements_txt = temp_dir.child("requirements.txt");
requirements_txt.write_str("Jinja2")?;
insta::with_settings!({
filters => INSTA_FILTERS.to_vec()
}, {
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
.arg("pip")
.arg("install")
.arg("-r")
.arg("requirements.txt")
.arg("--strict")
.arg("--cache-dir")
.arg(cache_dir.path())
.arg("--exclude-newer")
.arg(EXCLUDE_NEWER)
.env("VIRTUAL_ENV", venv.as_os_str())
.current_dir(&temp_dir), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Audited 1 package in [TIME]
"###);
});
assert_command(&venv, "import flask", &temp_dir).success();
Ok(())
}
/// Respect installed versions when resolving.
#[test]
fn respect_installed() -> Result<()> {
let temp_dir = assert_fs::TempDir::new()?;
let cache_dir = assert_fs::TempDir::new()?;
let venv = create_venv_py312(&temp_dir, &cache_dir);
// Install Flask.
let requirements_txt = temp_dir.child("requirements.txt");
requirements_txt.touch()?;
requirements_txt.write_str("Flask==2.3.2")?;
insta::with_settings!({
filters => INSTA_FILTERS.to_vec()
}, {
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
.arg("pip")
.arg("install")
.arg("-r")
.arg("requirements.txt")
.arg("--strict")
.arg("--cache-dir")
.arg(cache_dir.path())
.arg("--exclude-newer")
.arg(EXCLUDE_NEWER)
.env("VIRTUAL_ENV", venv.as_os_str())
.current_dir(&temp_dir), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 7 packages in [TIME]
Downloaded 7 packages in [TIME]
Installed 7 packages in [TIME]
+ blinker==1.7.0
+ click==8.1.7
+ flask==2.3.2
+ itsdangerous==2.1.2
+ jinja2==3.1.2
+ markupsafe==2.1.3
+ werkzeug==3.0.1
"###);
});
assert_command(&venv, "import flask", &temp_dir).success();
// Re-install Flask. We should respect the existing version.
let requirements_txt = temp_dir.child("requirements.txt");
requirements_txt.touch()?;
requirements_txt.write_str("Flask")?;
insta::with_settings!({
filters => INSTA_FILTERS.to_vec()
}, {
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
.arg("pip")
.arg("install")
.arg("-r")
.arg("requirements.txt")
.arg("--strict")
.arg("--cache-dir")
.arg(cache_dir.path())
.arg("--exclude-newer")
.arg(EXCLUDE_NEWER)
.env("VIRTUAL_ENV", venv.as_os_str())
.current_dir(&temp_dir), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Audited 1 package in [TIME]
"###);
});
assert_command(&venv, "import flask", &temp_dir).success();
// Install a newer version of Flask. We should upgrade it.
let requirements_txt = temp_dir.child("requirements.txt");
requirements_txt.touch()?;
requirements_txt.write_str("Flask==2.3.3")?;
insta::with_settings!({
filters => INSTA_FILTERS.to_vec()
}, {
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
.arg("pip")
.arg("install")
.arg("-r")
.arg("requirements.txt")
.arg("--strict")
.arg("--cache-dir")
.arg(cache_dir.path())
.arg("--exclude-newer")
.arg(EXCLUDE_NEWER)
.env("VIRTUAL_ENV", venv.as_os_str())
.current_dir(&temp_dir), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 7 packages in [TIME]
Downloaded 1 package in [TIME]
Installed 1 package in [TIME]
- flask==2.3.2
+ flask==2.3.3
"###);
});
// Re-install Flask. We should upgrade it.
let requirements_txt = temp_dir.child("requirements.txt");
requirements_txt.touch()?;
requirements_txt.write_str("Flask")?;
insta::with_settings!({
filters => INSTA_FILTERS.to_vec()
}, {
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
.arg("pip")
.arg("install")
.arg("-r")
.arg("requirements.txt")
.arg("--reinstall-package")
.arg("Flask")
.arg("--strict")
.arg("--cache-dir")
.arg(cache_dir.path())
.arg("--exclude-newer")
.arg(EXCLUDE_NEWER)
.env("VIRTUAL_ENV", venv.as_os_str())
.current_dir(&temp_dir), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 7 packages in [TIME]
Downloaded 1 package in [TIME]
Installed 1 package in [TIME]
- flask==2.3.3
+ flask==3.0.0
"###);
});
Ok(())
}
/// Like `pip`, we (unfortunately) allow incompatible environments.
#[test]
fn allow_incompatibilities() -> Result<()> {
let temp_dir = assert_fs::TempDir::new()?;
let cache_dir = assert_fs::TempDir::new()?;
let venv = create_venv_py312(&temp_dir, &cache_dir);
// Install Flask, which relies on `Werkzeug>=3.0.0`.
let requirements_txt = temp_dir.child("requirements.txt");
requirements_txt.touch()?;
requirements_txt.write_str("Flask")?;
insta::with_settings!({
filters => INSTA_FILTERS.to_vec()
}, {
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
.arg("pip")
.arg("install")
.arg("-r")
.arg("requirements.txt")
.arg("--strict")
.arg("--cache-dir")
.arg(cache_dir.path())
.arg("--exclude-newer")
.arg(EXCLUDE_NEWER)
.env("VIRTUAL_ENV", venv.as_os_str())
.current_dir(&temp_dir), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 7 packages in [TIME]
Downloaded 7 packages in [TIME]
Installed 7 packages in [TIME]
+ blinker==1.7.0
+ click==8.1.7
+ flask==3.0.0
+ itsdangerous==2.1.2
+ jinja2==3.1.2
+ markupsafe==2.1.3
+ werkzeug==3.0.1
"###);
});
assert_command(&venv, "import flask", &temp_dir).success();
// Install an incompatible version of Jinja2.
let requirements_txt = temp_dir.child("requirements.txt");
requirements_txt.touch()?;
requirements_txt.write_str("jinja2==2.11.3")?;
insta::with_settings!({
filters => INSTA_FILTERS.to_vec()
}, {
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
.arg("pip")
.arg("install")
.arg("-r")
.arg("requirements.txt")
.arg("--strict")
.arg("--cache-dir")
.arg(cache_dir.path())
.arg("--exclude-newer")
.arg(EXCLUDE_NEWER)
.env("VIRTUAL_ENV", venv.as_os_str())
.current_dir(&temp_dir), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 2 packages in [TIME]
Downloaded 1 package in [TIME]
Installed 1 package in [TIME]
- jinja2==3.1.2
+ jinja2==2.11.3
warning: The package `flask` requires `jinja2 >=3.1.2`, but `2.11.3` is installed.
"###);
});
// This no longer works, since we have an incompatible version of Jinja2.
assert_command(&venv, "import flask", &temp_dir).failure();
Ok(())
}
#[test]
#[cfg(feature = "maturin")]
fn install_editable() -> Result<()> {
let temp_dir = assert_fs::TempDir::new()?;
let cache_dir = assert_fs::TempDir::new()?;
let venv = create_venv_py312(&temp_dir, &cache_dir);
let current_dir = std::env::current_dir()?;
let workspace_dir = current_dir.join("..").join("..").canonicalize()?;
let filters = iter::once((workspace_dir.to_str().unwrap(), "[WORKSPACE_DIR]"))
.chain(INSTA_FILTERS.to_vec())
.collect::<Vec<_>>();
// Install the editable package.
insta::with_settings!({
filters => filters.clone()
}, {
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
.arg("pip")
.arg("install")
.arg("-e")
.arg("../../scripts/editable-installs/poetry_editable")
.arg("--strict")
.arg("--cache-dir")
.arg(cache_dir.path())
.arg("--exclude-newer")
.arg(EXCLUDE_NEWER)
.env("VIRTUAL_ENV", venv.as_os_str())
.env("CARGO_TARGET_DIR", "../../../target/target_install_editable"), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Built 1 editable in [TIME]
Resolved 2 packages in [TIME]
Downloaded 1 package in [TIME]
Installed 2 packages in [TIME]
+ numpy==1.26.2
+ poetry-editable==0.1.0 (from file://[WORKSPACE_DIR]/scripts/editable-installs/poetry_editable)
"###);
});
// Install it again (no-op).
insta::with_settings!({
filters => filters.clone()
}, {
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
.arg("pip")
.arg("install")
.arg("-e")
.arg("../../scripts/editable-installs/poetry_editable")
.arg("--strict")
.arg("--cache-dir")
.arg(cache_dir.path())
.arg("--exclude-newer")
.arg(EXCLUDE_NEWER)
.env("VIRTUAL_ENV", venv.as_os_str())
.env("CARGO_TARGET_DIR", "../../../target/target_install_editable"), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Audited 1 package in [TIME]
"###);
});
// Add another, non-editable dependency.
insta::with_settings!({
filters => filters.clone()
}, {
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
.arg("pip")
.arg("install")
.arg("-e")
.arg("../../scripts/editable-installs/poetry_editable")
.arg("black")
.arg("--strict")
.arg("--cache-dir")
.arg(cache_dir.path())
.arg("--exclude-newer")
.arg(EXCLUDE_NEWER)
.env("VIRTUAL_ENV", venv.as_os_str())
.env("CARGO_TARGET_DIR", "../../../target/target_install_editable"), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Built 1 editable in [TIME]
Resolved 8 packages in [TIME]
Downloaded 6 packages in [TIME]
Installed 7 packages in [TIME]
+ black==23.11.0
+ click==8.1.7
+ mypy-extensions==1.0.0
+ packaging==23.2
+ pathspec==0.11.2
+ platformdirs==4.0.0
- poetry-editable==0.1.0 (from file://[WORKSPACE_DIR]/scripts/editable-installs/poetry_editable)
+ poetry-editable==0.1.0 (from file://[WORKSPACE_DIR]/scripts/editable-installs/poetry_editable)
"###);
});
Ok(())
}
#[test]
fn install_editable_and_registry() -> Result<()> {
let temp_dir = assert_fs::TempDir::new()?;
let cache_dir = assert_fs::TempDir::new()?;
let venv = create_venv_py312(&temp_dir, &cache_dir);
let current_dir = std::env::current_dir()?;
let workspace_dir = current_dir.join("..").join("..").canonicalize()?;
let filters: Vec<_> = iter::once((workspace_dir.to_str().unwrap(), "[WORKSPACE_DIR]"))
.chain(INSTA_FILTERS.to_vec())
.collect();
// Install the registry-based version of Black.
insta::with_settings!({
filters => filters.clone()
}, {
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
.arg("pip")
.arg("install")
.arg("black")
.arg("--strict")
.arg("--cache-dir")
.arg(cache_dir.path())
.arg("--exclude-newer")
.arg(EXCLUDE_NEWER)
.env("VIRTUAL_ENV", venv.as_os_str())
.env("CARGO_TARGET_DIR", "../../../target/target_install_editable"), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 6 packages in [TIME]
Downloaded 6 packages in [TIME]
Installed 6 packages in [TIME]
+ black==23.11.0
+ click==8.1.7
+ mypy-extensions==1.0.0
+ packaging==23.2
+ pathspec==0.11.2
+ platformdirs==4.0.0
"###);
});
// Install the editable version of Black. This should remove the registry-based version.
insta::with_settings!({
filters => filters.clone()
}, {
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
.arg("pip")
.arg("install")
.arg("-e")
.arg("../../scripts/editable-installs/black_editable")
.arg("--strict")
.arg("--cache-dir")
.arg(cache_dir.path())
.arg("--exclude-newer")
.arg(EXCLUDE_NEWER)
.env("VIRTUAL_ENV", venv.as_os_str())
.env("CARGO_TARGET_DIR", "../../../target/target_install_editable"), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Built 1 editable in [TIME]
Resolved 1 package in [TIME]
Installed 1 package in [TIME]
- black==23.11.0
+ black==0.1.0+editable (from file://[WORKSPACE_DIR]/scripts/editable-installs/black_editable)
"###);
});
// Re-install the registry-based version of Black. This should be a no-op, since we have a
// version of Black installed (the editable version) that satisfies the requirements.
insta::with_settings!({
filters => filters.clone()
}, {
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
.arg("pip")
.arg("install")
.arg("black")
.arg("--strict")
.arg("--cache-dir")
.arg(cache_dir.path())
.arg("--exclude-newer")
.arg(EXCLUDE_NEWER)
.env("VIRTUAL_ENV", venv.as_os_str())
.env("CARGO_TARGET_DIR", "../../../target/target_install_editable"), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Audited 1 package in [TIME]
"###);
});
// Re-install Black at a specific version. This should replace the editable version.
insta::with_settings!({
filters => filters.clone()
}, {
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
.arg("pip")
.arg("install")
.arg("black==23.10.0")
.arg("--strict")
.arg("--cache-dir")
.arg(cache_dir.path())
.arg("--exclude-newer")
.arg(EXCLUDE_NEWER)
.env("VIRTUAL_ENV", venv.as_os_str())
.env("CARGO_TARGET_DIR", "../../../target/target_install_editable"), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 6 packages in [TIME]
Downloaded 1 package in [TIME]
Installed 1 package in [TIME]
- black==0.1.0+editable (from file://[WORKSPACE_DIR]/scripts/editable-installs/black_editable)
+ black==23.10.0
"###);
});
Ok(())
}
/// Install a source distribution that uses the `flit` build system, along with `flit`
/// at the top-level, along with `--reinstall` to force a re-download after resolution, to ensure
/// that the `flit` install and the source distribution build don't conflict.
#[test]
fn reinstall_build_system() -> Result<()> {
let temp_dir = assert_fs::TempDir::new()?;
let cache_dir = assert_fs::TempDir::new()?;
let venv = create_venv_py312(&temp_dir, &cache_dir);
// Install devpi.
let requirements_txt = temp_dir.child("requirements.txt");
requirements_txt.write_str(indoc! {r"
flit_core<4.0.0
flask @ https://files.pythonhosted.org/packages/d8/09/c1a7354d3925a3c6c8cfdebf4245bae67d633ffda1ba415add06ffc839c5/flask-3.0.0.tar.gz
"
})?;
insta::with_settings!({
filters => INSTA_FILTERS.to_vec()
}, {
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
.arg("pip")
.arg("install")
.arg("--reinstall")
.arg("-r")
.arg("requirements.txt")
.arg("--strict")
.arg("--cache-dir")
.arg(cache_dir.path())
.arg("--exclude-newer")
.arg(EXCLUDE_NEWER)
.env("VIRTUAL_ENV", venv.as_os_str())
.current_dir(&temp_dir), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 8 packages in [TIME]
Downloaded 8 packages in [TIME]
Installed 8 packages in [TIME]
+ blinker==1.7.0
+ click==8.1.7
+ flask==3.0.0 (from https://files.pythonhosted.org/packages/d8/09/c1a7354d3925a3c6c8cfdebf4245bae67d633ffda1ba415add06ffc839c5/flask-3.0.0.tar.gz)
+ flit-core==3.9.0
+ itsdangerous==2.1.2
+ jinja2==3.1.2
+ markupsafe==2.1.3
+ werkzeug==3.0.1
"###);
});
Ok(())
}
/// Install a package without using pre-built wheels.
#[test]
fn install_no_binary() -> Result<()> {
let temp_dir = assert_fs::TempDir::new()?;
let cache_dir = assert_fs::TempDir::new()?;
let venv = create_venv_py312(&temp_dir, &cache_dir);
insta::with_settings!({
filters => INSTA_FILTERS.to_vec()
}, {
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
.arg("pip")
.arg("install")
.arg("Flask")
.arg("--no-binary")
.arg("--strict")
.arg("--cache-dir")
.arg(cache_dir.path())
.arg("--exclude-newer")
.arg(EXCLUDE_NEWER)
.env("VIRTUAL_ENV", venv.as_os_str())
.current_dir(&temp_dir), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 7 packages in [TIME]
Downloaded 7 packages in [TIME]
Installed 7 packages in [TIME]
+ blinker==1.7.0
+ click==8.1.7
+ flask==3.0.0
+ itsdangerous==2.1.2
+ jinja2==3.1.2
+ markupsafe==2.1.3
+ werkzeug==3.0.1
"###);
});
assert_command(&venv, "import flask", &temp_dir).success();
Ok(())
}
/// Install a package without using pre-built wheels for a subset of packages.
#[test]
fn install_no_binary_subset() -> Result<()> {
let temp_dir = assert_fs::TempDir::new()?;
let cache_dir = assert_fs::TempDir::new()?;
let venv = create_venv_py312(&temp_dir, &cache_dir);
insta::with_settings!({
filters => INSTA_FILTERS.to_vec()
}, {
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
.arg("pip")
.arg("install")
.arg("Flask")
.arg("--no-binary-package")
.arg("click")
.arg("--no-binary-package")
.arg("flask")
.arg("--strict")
.arg("--cache-dir")
.arg(cache_dir.path())
.arg("--exclude-newer")
.arg(EXCLUDE_NEWER)
.env("VIRTUAL_ENV", venv.as_os_str())
.current_dir(&temp_dir), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 7 packages in [TIME]
Downloaded 7 packages in [TIME]
Installed 7 packages in [TIME]
+ blinker==1.7.0
+ click==8.1.7
+ flask==3.0.0
+ itsdangerous==2.1.2
+ jinja2==3.1.2
+ markupsafe==2.1.3
+ werkzeug==3.0.1
"###);
});
assert_command(&venv, "import flask", &temp_dir).success();
Ok(())
}
/// Install a package without using pre-built wheels.
#[test]
fn reinstall_no_binary() -> Result<()> {
let temp_dir = assert_fs::TempDir::new()?;
let cache_dir = assert_fs::TempDir::new()?;
let venv = create_venv_py312(&temp_dir, &cache_dir);
// The first installation should use a pre-built wheel
insta::with_settings!({
filters => INSTA_FILTERS.to_vec()
}, {
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
.arg("pip")
.arg("install")
.arg("Flask")
.arg("--strict")
.arg("--cache-dir")
.arg(cache_dir.path())
.arg("--exclude-newer")
.arg(EXCLUDE_NEWER)
.env("VIRTUAL_ENV", venv.as_os_str())
.current_dir(&temp_dir), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 7 packages in [TIME]
Downloaded 7 packages in [TIME]
Installed 7 packages in [TIME]
+ blinker==1.7.0
+ click==8.1.7
+ flask==3.0.0
+ itsdangerous==2.1.2
+ jinja2==3.1.2
+ markupsafe==2.1.3
+ werkzeug==3.0.1
"###);
});
assert_command(&venv, "import flask", &temp_dir).success();
// Running installation again with `--no-binary` should be a no-op
insta::with_settings!({
filters => INSTA_FILTERS.to_vec()
}, {
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
.arg("pip")
.arg("install")
.arg("Flask")
.arg("--no-binary")
.arg("--strict")
.arg("--cache-dir")
.arg(cache_dir.path())
.arg("--exclude-newer")
.arg(EXCLUDE_NEWER)
.env("VIRTUAL_ENV", venv.as_os_str())
.current_dir(&temp_dir), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Audited 1 package in [TIME]
"###);
});
assert_command(&venv, "import flask", &temp_dir).success();
// With `--reinstall`, `--no-binary` should have an affect
insta::with_settings!({
filters => INSTA_FILTERS.to_vec()
}, {
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
.arg("pip")
.arg("install")
.arg("Flask")
.arg("--no-binary")
.arg("--reinstall-package")
.arg("Flask")
.arg("--strict")
.arg("--cache-dir")
.arg(cache_dir.path())
.arg("--exclude-newer")
.arg(EXCLUDE_NEWER)
.env("VIRTUAL_ENV", venv.as_os_str())
.current_dir(&temp_dir), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 7 packages in [TIME]
Downloaded 1 package in [TIME]
Installed 1 package in [TIME]
- flask==3.0.0
+ flask==3.0.0
"###);
});
assert_command(&venv, "import flask", &temp_dir).success();
Ok(())
}

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -1,546 +0,0 @@
use std::iter;
use std::process::Command;
use anyhow::Result;
use assert_cmd::prelude::*;
use assert_fs::prelude::*;
use insta_cmd::{assert_cmd_snapshot, get_cargo_bin};
use common::{BIN_NAME, INSTA_FILTERS};
use crate::common::create_venv_py312;
mod common;
#[test]
fn no_arguments() -> Result<()> {
let temp_dir = assert_fs::TempDir::new()?;
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
.arg("pip")
.arg("uninstall")
.current_dir(&temp_dir), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: the following required arguments were not provided:
<PACKAGE|--requirement <REQUIREMENT>|--editable <EDITABLE>>
Usage: puffin pip uninstall <PACKAGE|--requirement <REQUIREMENT>|--editable <EDITABLE>>
For more information, try '--help'.
"###);
Ok(())
}
#[test]
fn invalid_requirement() -> Result<()> {
let temp_dir = assert_fs::TempDir::new()?;
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
.arg("pip")
.arg("uninstall")
.arg("flask==1.0.x")
.current_dir(&temp_dir), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: Failed to parse `flask==1.0.x`
Caused by: after parsing 1.0, found ".x" after it, which is not part of a valid version
flask==1.0.x
^^^^^^^
"###);
Ok(())
}
#[test]
fn missing_requirements_txt() -> Result<()> {
let temp_dir = assert_fs::TempDir::new()?;
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
.arg("pip")
.arg("uninstall")
.arg("-r")
.arg("requirements.txt")
.current_dir(&temp_dir), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: failed to open file `requirements.txt`
Caused by: No such file or directory (os error 2)
"###);
Ok(())
}
#[test]
fn invalid_requirements_txt_requirement() -> Result<()> {
let temp_dir = assert_fs::TempDir::new()?;
let requirements_txt = temp_dir.child("requirements.txt");
requirements_txt.touch()?;
requirements_txt.write_str("flask==1.0.x")?;
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
.arg("pip")
.arg("uninstall")
.arg("-r")
.arg("requirements.txt")
.current_dir(&temp_dir), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: Couldn't parse requirement in requirements.txt position 0 to 12
Caused by: after parsing 1.0, found ".x" after it, which is not part of a valid version
flask==1.0.x
^^^^^^^
"###);
Ok(())
}
#[test]
fn missing_pyproject_toml() -> Result<()> {
let temp_dir = assert_fs::TempDir::new()?;
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
.arg("pip")
.arg("uninstall")
.arg("-r")
.arg("pyproject.toml")
.current_dir(&temp_dir), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: failed to open file `pyproject.toml`
Caused by: No such file or directory (os error 2)
"###);
Ok(())
}
#[test]
fn invalid_pyproject_toml_syntax() -> Result<()> {
let temp_dir = assert_fs::TempDir::new()?;
let pyproject_toml = temp_dir.child("pyproject.toml");
pyproject_toml.touch()?;
pyproject_toml.write_str("123 - 456")?;
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
.arg("pip")
.arg("uninstall")
.arg("-r")
.arg("pyproject.toml")
.current_dir(&temp_dir), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: Failed to parse `pyproject.toml`
Caused by: TOML parse error at line 1, column 5
|
1 | 123 - 456
| ^
expected `.`, `=`
"###);
Ok(())
}
#[test]
fn invalid_pyproject_toml_schema() -> Result<()> {
let temp_dir = assert_fs::TempDir::new()?;
let pyproject_toml = temp_dir.child("pyproject.toml");
pyproject_toml.touch()?;
pyproject_toml.write_str("[project]")?;
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
.arg("pip")
.arg("uninstall")
.arg("-r")
.arg("pyproject.toml")
.current_dir(&temp_dir), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: Failed to parse `pyproject.toml`
Caused by: TOML parse error at line 1, column 1
|
1 | [project]
| ^^^^^^^^^
missing field `name`
"###);
Ok(())
}
#[test]
fn invalid_pyproject_toml_requirement() -> Result<()> {
let temp_dir = assert_fs::TempDir::new()?;
let pyproject_toml = temp_dir.child("pyproject.toml");
pyproject_toml.touch()?;
pyproject_toml.write_str(
r#"[project]
name = "project"
dependencies = ["flask==1.0.x"]
"#,
)?;
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
.arg("pip")
.arg("uninstall")
.arg("-r")
.arg("pyproject.toml")
.current_dir(&temp_dir), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: Failed to parse `pyproject.toml`
Caused by: TOML parse error at line 3, column 16
|
3 | dependencies = ["flask==1.0.x"]
| ^^^^^^^^^^^^^^^^
after parsing 1.0, found ".x" after it, which is not part of a valid version
flask==1.0.x
^^^^^^^
"###);
Ok(())
}
#[test]
fn uninstall() -> Result<()> {
let temp_dir = assert_fs::TempDir::new()?;
let cache_dir = assert_fs::TempDir::new()?;
let venv = create_venv_py312(&temp_dir, &cache_dir);
let requirements_txt = temp_dir.child("requirements.txt");
requirements_txt.touch()?;
requirements_txt.write_str("MarkupSafe==2.1.3")?;
Command::new(get_cargo_bin(BIN_NAME))
.arg("pip")
.arg("sync")
.arg("requirements.txt")
.arg("--cache-dir")
.arg(cache_dir.path())
.env("VIRTUAL_ENV", venv.as_os_str())
.current_dir(&temp_dir)
.assert()
.success();
Command::new(venv.join("bin").join("python"))
.arg("-c")
.arg("import markupsafe")
.current_dir(&temp_dir)
.assert()
.success();
insta::with_settings!({
filters => INSTA_FILTERS.to_vec()
}, {
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
.arg("pip")
.arg("uninstall")
.arg("MarkupSafe")
.arg("--cache-dir")
.arg(cache_dir.path())
.env("VIRTUAL_ENV", venv.as_os_str())
.current_dir(&temp_dir), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Uninstalled 1 package in [TIME]
- markupsafe==2.1.3
"###);
});
Command::new(venv.join("bin").join("python"))
.arg("-c")
.arg("import markupsafe")
.current_dir(&temp_dir)
.assert()
.failure();
Ok(())
}
#[test]
fn missing_record() -> Result<()> {
let temp_dir = assert_fs::TempDir::new()?;
let cache_dir = assert_fs::TempDir::new()?;
let venv = create_venv_py312(&temp_dir, &cache_dir);
let requirements_txt = temp_dir.child("requirements.txt");
requirements_txt.touch()?;
requirements_txt.write_str("MarkupSafe==2.1.3")?;
Command::new(get_cargo_bin(BIN_NAME))
.arg("pip")
.arg("sync")
.arg("requirements.txt")
.arg("--cache-dir")
.arg(cache_dir.path())
.env("VIRTUAL_ENV", venv.as_os_str())
.current_dir(&temp_dir)
.assert()
.success();
Command::new(venv.join("bin").join("python"))
.arg("-c")
.arg("import markupsafe")
.current_dir(&temp_dir)
.assert()
.success();
// Delete the RECORD file.
let dist_info = venv
.join("lib")
.join("python3.12")
.join("site-packages")
.join("MarkupSafe-2.1.3.dist-info");
std::fs::remove_file(dist_info.join("RECORD"))?;
let filters: Vec<_> = iter::once((
"RECORD file not found at: .*/.venv",
"RECORD file not found at: [VENV_PATH]",
))
.chain(INSTA_FILTERS.to_vec())
.collect();
insta::with_settings!({
filters => filters,
}, {
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
.arg("pip")
.arg("uninstall")
.arg("MarkupSafe")
.arg("--cache-dir")
.arg(cache_dir.path())
.env("VIRTUAL_ENV", venv.as_os_str())
.current_dir(&temp_dir), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: Cannot uninstall package; RECORD file not found at: [VENV_PATH]/lib/python3.12/site-packages/MarkupSafe-2.1.3.dist-info/RECORD
"###);
});
Ok(())
}
#[test]
fn uninstall_editable_by_name() -> Result<()> {
let temp_dir = assert_fs::TempDir::new()?;
let cache_dir = assert_fs::TempDir::new()?;
let venv = create_venv_py312(&temp_dir, &cache_dir);
let current_dir = std::env::current_dir()?;
let workspace_dir = current_dir.join("..").join("..").canonicalize()?;
let filters: Vec<_> = iter::once((workspace_dir.to_str().unwrap(), "[WORKSPACE_DIR]"))
.chain(INSTA_FILTERS.to_vec())
.collect();
let requirements_txt = temp_dir.child("requirements.txt");
requirements_txt.touch()?;
requirements_txt.write_str("-e ../../scripts/editable-installs/poetry_editable")?;
Command::new(get_cargo_bin(BIN_NAME))
.arg("pip")
.arg("sync")
.arg(requirements_txt.path())
.arg("--cache-dir")
.arg(cache_dir.path())
.env("VIRTUAL_ENV", venv.as_os_str())
.assert()
.success();
Command::new(venv.join("bin").join("python"))
.arg("-c")
.arg("import poetry_editable")
.assert()
.success();
// Uninstall the editable by name.
insta::with_settings!({
filters => filters.clone()
}, {
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
.arg("pip")
.arg("uninstall")
.arg("poetry-editable")
.arg("--cache-dir")
.arg(cache_dir.path())
.env("VIRTUAL_ENV", venv.as_os_str())
, @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Uninstalled 1 package in [TIME]
- poetry-editable==0.1.0 (from file://[WORKSPACE_DIR]/scripts/editable-installs/poetry_editable)
"###);
});
Command::new(venv.join("bin").join("python"))
.arg("-c")
.arg("import poetry_editable")
.assert()
.failure();
Ok(())
}
#[test]
fn uninstall_editable_by_path() -> Result<()> {
let temp_dir = assert_fs::TempDir::new()?;
let cache_dir = assert_fs::TempDir::new()?;
let venv = create_venv_py312(&temp_dir, &cache_dir);
let current_dir = std::env::current_dir()?;
let workspace_dir = current_dir.join("..").join("..").canonicalize()?;
let filters: Vec<_> = iter::once((workspace_dir.to_str().unwrap(), "[WORKSPACE_DIR]"))
.chain(INSTA_FILTERS.to_vec())
.collect();
let requirements_txt = temp_dir.child("requirements.txt");
requirements_txt.touch()?;
requirements_txt.write_str("-e ../../scripts/editable-installs/poetry_editable")?;
Command::new(get_cargo_bin(BIN_NAME))
.arg("pip")
.arg("sync")
.arg(requirements_txt.path())
.arg("--cache-dir")
.arg(cache_dir.path())
.env("VIRTUAL_ENV", venv.as_os_str())
.assert()
.success();
Command::new(venv.join("bin").join("python"))
.arg("-c")
.arg("import poetry_editable")
.assert()
.success();
// Uninstall the editable by path.
insta::with_settings!({
filters => filters.clone()
}, {
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
.arg("pip")
.arg("uninstall")
.arg("-e")
.arg("../../scripts/editable-installs/poetry_editable")
.arg("--cache-dir")
.arg(cache_dir.path())
.env("VIRTUAL_ENV", venv.as_os_str()), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Uninstalled 1 package in [TIME]
- poetry-editable==0.1.0 (from file://[WORKSPACE_DIR]/scripts/editable-installs/poetry_editable)
"###);
});
Command::new(venv.join("bin").join("python"))
.arg("-c")
.arg("import poetry_editable")
.assert()
.failure();
Ok(())
}
#[test]
fn uninstall_duplicate_editable() -> Result<()> {
let temp_dir = assert_fs::TempDir::new()?;
let cache_dir = assert_fs::TempDir::new()?;
let venv = create_venv_py312(&temp_dir, &cache_dir);
let current_dir = std::env::current_dir()?;
let workspace_dir = current_dir.join("..").join("..").canonicalize()?;
let filters: Vec<_> = iter::once((workspace_dir.to_str().unwrap(), "[WORKSPACE_DIR]"))
.chain(INSTA_FILTERS.to_vec())
.collect();
let requirements_txt = temp_dir.child("requirements.txt");
requirements_txt.touch()?;
requirements_txt.write_str("-e ../../scripts/editable-installs/poetry_editable")?;
Command::new(get_cargo_bin(BIN_NAME))
.arg("pip")
.arg("sync")
.arg(requirements_txt.path())
.arg("--cache-dir")
.arg(cache_dir.path())
.env("VIRTUAL_ENV", venv.as_os_str())
.assert()
.success();
Command::new(venv.join("bin").join("python"))
.arg("-c")
.arg("import poetry_editable")
.assert()
.success();
// Uninstall the editable by both path and name.
insta::with_settings!({
filters => filters.clone()
}, {
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
.arg("pip")
.arg("uninstall")
.arg("poetry-editable")
.arg("-e")
.arg("../../scripts/editable-installs/poetry_editable")
.arg("--cache-dir")
.arg(cache_dir.path())
.env("VIRTUAL_ENV", venv.as_os_str()), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Uninstalled 1 package in [TIME]
- poetry-editable==0.1.0 (from file://[WORKSPACE_DIR]/scripts/editable-installs/poetry_editable)
"###);
});
Command::new(venv.join("bin").join("python"))
.arg("-c")
.arg("import poetry_editable")
.assert()
.failure();
Ok(())
}

View File

@ -1,282 +0,0 @@
use std::process::Command;
use anyhow::Result;
use assert_fs::prelude::*;
use insta_cmd::{assert_cmd_snapshot, get_cargo_bin};
use common::BIN_NAME;
mod common;
#[test]
fn missing_pyproject_toml() -> Result<()> {
let temp_dir = assert_fs::TempDir::new()?;
let pyproject_toml = temp_dir.child("pyproject.toml");
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
.arg("remove")
.arg("flask")
.current_dir(&temp_dir), @r###"
success: false
exit_code: 1
----- stdout -----
----- stderr -----
puffin::remove::workspace_not_found
× Could not find a `pyproject.toml` file in the current directory or any of
its parents
"###);
pyproject_toml.assert(predicates::path::missing());
Ok(())
}
#[test]
fn missing_project_table() -> Result<()> {
let temp_dir = assert_fs::TempDir::new()?;
let pyproject_toml = temp_dir.child("pyproject.toml");
pyproject_toml.touch()?;
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
.arg("remove")
.arg("flask")
.current_dir(&temp_dir), @r###"
success: false
exit_code: 1
----- stdout -----
----- stderr -----
puffin::remove::parse
× Failed to remove `flask` from `pyproject.toml`
no `[project]` table found in `pyproject.toml`
"###);
pyproject_toml.assert(predicates::str::is_empty());
Ok(())
}
#[test]
fn missing_dependencies_array() -> Result<()> {
let temp_dir = assert_fs::TempDir::new()?;
let pyproject_toml = temp_dir.child("pyproject.toml");
pyproject_toml.touch()?;
pyproject_toml.write_str(
r#"[project]
name = "project"
"#,
)?;
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
.arg("remove")
.arg("flask")
.current_dir(&temp_dir), @r###"
success: false
exit_code: 1
----- stdout -----
----- stderr -----
puffin::remove::parse
× Failed to remove `flask` from `pyproject.toml`
no `[project.dependencies]` array found in `pyproject.toml`
"###);
pyproject_toml.assert(
r#"[project]
name = "project"
"#,
);
Ok(())
}
#[test]
fn missing_dependency() -> Result<()> {
let temp_dir = assert_fs::TempDir::new()?;
let pyproject_toml = temp_dir.child("pyproject.toml");
pyproject_toml.touch()?;
pyproject_toml.write_str(
r#"[project]
name = "project"
dependencies = [
"flask==1.0.0",
]
"#,
)?;
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
.arg("remove")
.arg("requests")
.current_dir(&temp_dir), @r###"
success: false
exit_code: 1
----- stdout -----
----- stderr -----
puffin::remove::parse
× Failed to remove `requests` from `pyproject.toml`
unable to find package: `requests`
"###);
pyproject_toml.assert(
r#"[project]
name = "project"
dependencies = [
"flask==1.0.0",
]
"#,
);
Ok(())
}
#[test]
fn remove_dependency() -> Result<()> {
let temp_dir = assert_fs::TempDir::new()?;
let pyproject_toml = temp_dir.child("pyproject.toml");
pyproject_toml.touch()?;
pyproject_toml.write_str(
r#"[project]
name = "project"
dependencies = [
"flask==1.0.0",
"requests",
]
"#,
)?;
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
.arg("remove")
.arg("flask")
.current_dir(&temp_dir), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
"###);
pyproject_toml.assert(
r#"[project]
name = "project"
dependencies = [
"requests",
]
"#,
);
Ok(())
}
#[test]
fn empty_array() -> Result<()> {
let temp_dir = assert_fs::TempDir::new()?;
let pyproject_toml = temp_dir.child("pyproject.toml");
pyproject_toml.touch()?;
pyproject_toml.write_str(
r#"[project]
name = "project"
dependencies = [
"requests",
]
"#,
)?;
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
.arg("remove")
.arg("requests")
.current_dir(&temp_dir), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
"###);
pyproject_toml.assert(
r#"[project]
name = "project"
dependencies = []
"#,
);
Ok(())
}
#[test]
fn normalize_name() -> Result<()> {
let temp_dir = assert_fs::TempDir::new()?;
let pyproject_toml = temp_dir.child("pyproject.toml");
pyproject_toml.touch()?;
pyproject_toml.write_str(
r#"[project]
name = "project"
dependencies = [
"flask==1.0.0",
"requests",
]
"#,
)?;
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
.arg("remove")
.arg("Flask")
.current_dir(&temp_dir), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
"###);
pyproject_toml.assert(
r#"[project]
name = "project"
dependencies = [
"requests",
]
"#,
);
Ok(())
}
#[test]
fn reformat_array() -> Result<()> {
let temp_dir = assert_fs::TempDir::new()?;
let pyproject_toml = temp_dir.child("pyproject.toml");
pyproject_toml.touch()?;
pyproject_toml.write_str(
r#"[project]
name = "project"
dependencies = ["flask==1.0.0", "requests"]
"#,
)?;
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
.arg("remove")
.arg("requests")
.current_dir(&temp_dir), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
"###);
pyproject_toml.assert(
r#"[project]
name = "project"
dependencies = [
"flask==1.0.0",
]
"#,
);
Ok(())
}

View File

@ -1,111 +0,0 @@
#![cfg(feature = "python")]
use std::process::Command;
use anyhow::Result;
use assert_fs::prelude::*;
use insta_cmd::_macro_support::insta;
use insta_cmd::{assert_cmd_snapshot, get_cargo_bin};
use common::BIN_NAME;
mod common;
#[test]
fn create_venv() -> Result<()> {
let temp_dir = assert_fs::TempDir::new()?;
let venv = temp_dir.child(".venv");
insta::with_settings!({
filters => vec![
(r"Using Python 3\.\d+\.\d+ at .+", "Using Python [VERSION] at [PATH]"),
(temp_dir.to_str().unwrap(), "/home/ferris/project"),
]
}, {
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
.arg("venv")
.arg(venv.as_os_str())
.arg("--python")
.arg("python3.12")
.current_dir(&temp_dir), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Using Python [VERSION] at [PATH]
Creating virtual environment at: /home/ferris/project/.venv
"###);
});
venv.assert(predicates::path::is_dir());
Ok(())
}
#[test]
fn create_venv_defaults_to_cwd() -> Result<()> {
let temp_dir = assert_fs::TempDir::new()?;
let venv = temp_dir.child(".venv");
insta::with_settings!({
filters => vec![
(r"Using Python 3\.\d+\.\d+ at .+", "Using Python [VERSION] at [PATH]"),
(temp_dir.to_str().unwrap(), "/home/ferris/project"),
]
}, {
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
.arg("venv")
.arg("--python")
.arg("python3.12")
.current_dir(&temp_dir), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Using Python [VERSION] at [PATH]
Creating virtual environment at: .venv
"###);
});
venv.assert(predicates::path::is_dir());
Ok(())
}
#[test]
fn seed() -> Result<()> {
let temp_dir = assert_fs::TempDir::new()?;
let venv = temp_dir.child(".venv");
insta::with_settings!({
filters => vec![
(r"Using Python 3\.\d+\.\d+ at .+", "Using Python [VERSION] at [PATH]"),
(temp_dir.to_str().unwrap(), "/home/ferris/project"),
]
}, {
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
.arg("venv")
.arg(venv.as_os_str())
.arg("--seed")
.arg("--python")
.arg("python3.12")
.current_dir(&temp_dir), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Using Python [VERSION] at [PATH]
Creating virtual environment at: /home/ferris/project/.venv
+ setuptools==69.0.3
+ pip==23.3.2
+ wheel==0.42.0
"###);
});
venv.assert(predicates::path::is_dir());
Ok(())
}