Add a `pip-install` subcommand (#607)

## Summary

This PR adds a `pip-install` command that operates like, well, `pip
install`. In short, it resolves the provided dependency, then makes sure
they're all installed in the environment. The primary differences with
`pip-sync` are that (1) `pip-sync` ignores dependencies, and assumes
that the packages represent a complete set; and (2) `pip-sync`
uninstalls any unlisted packages.

There are a bunch of TODOs that I'll resolve in subsequent PRs.

Closes https://github.com/astral-sh/puffin/issues/129.
This commit is contained in:
Charlie Marsh 2023-12-12 12:16:00 -05:00 committed by GitHub
parent fd5544bb69
commit 974cb4cc15
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
7 changed files with 811 additions and 55 deletions

View File

@ -3,8 +3,10 @@ use std::time::Duration;
pub(crate) use add::add;
pub(crate) use clean::clean;
use distribution_types::Metadata;
pub(crate) use freeze::freeze;
pub(crate) use pip_compile::{extra_name_with_clap_error, pip_compile};
pub(crate) use pip_install::pip_install;
pub(crate) use pip_sync::pip_sync;
pub(crate) use pip_uninstall::pip_uninstall;
pub(crate) use remove::remove;
@ -14,6 +16,7 @@ mod add;
mod clean;
mod freeze;
mod pip_compile;
mod pip_install;
mod pip_sync;
mod pip_uninstall;
mod remove;
@ -57,3 +60,17 @@ pub(super) fn elapsed(duration: Duration) -> String {
format!("{}ms", duration.subsec_millis())
}
}
#[derive(Debug, Copy, Clone, Eq, PartialEq, Ord, PartialOrd)]
pub(super) enum ChangeEventKind {
/// The package was removed from the environment.
Removed,
/// The package was added to the environment.
Added,
}
#[derive(Debug)]
pub(super) struct ChangeEvent<T: Metadata> {
dist: T,
kind: ChangeEventKind,
}

View File

@ -0,0 +1,431 @@
use std::fmt::Write;
use anyhow::{anyhow, Context, Result};
use chrono::{DateTime, Utc};
use colored::Colorize;
use fs_err as fs;
use itertools::Itertools;
use tracing::debug;
use distribution_types::{AnyDist, Metadata};
use install_wheel_rs::linker::LinkMode;
use pep508_rs::Requirement;
use platform_host::Platform;
use platform_tags::Tags;
use puffin_cache::Cache;
use puffin_client::RegistryClientBuilder;
use puffin_dispatch::BuildDispatch;
use puffin_installer::{Downloader, InstallPlan, Reinstall};
use puffin_interpreter::Virtualenv;
use puffin_resolver::{
Graph, Manifest, PreReleaseMode, ResolutionMode, ResolutionOptions, Resolver,
};
use puffin_traits::OnceMap;
use pypi_types::IndexUrls;
use crate::commands::reporters::{
DownloadReporter, FinderReporter, InstallReporter, ResolverReporter,
};
use crate::commands::{elapsed, ChangeEvent, ChangeEventKind, ExitStatus};
use crate::printer::Printer;
use crate::requirements::{ExtrasSpecification, RequirementsSource, RequirementsSpecification};
/// Install packages into the current environment.
#[allow(clippy::too_many_arguments)]
pub(crate) async fn pip_install(
requirements: &[RequirementsSource],
constraints: &[RequirementsSource],
extras: &ExtrasSpecification<'_>,
resolution_mode: ResolutionMode,
prerelease_mode: PreReleaseMode,
index_urls: IndexUrls,
reinstall: &Reinstall,
link_mode: LinkMode,
no_build: bool,
exclude_newer: Option<DateTime<Utc>>,
cache: Cache,
printer: Printer,
) -> Result<ExitStatus> {
miette::set_hook(Box::new(|_| {
Box::new(
miette::MietteHandlerOpts::new()
.break_words(false)
.word_separator(textwrap::WordSeparator::AsciiSpace)
.word_splitter(textwrap::WordSplitter::NoHyphenation)
.build(),
)
}))?;
// Determine the requirements.
let spec = specification(requirements, constraints, extras)?;
// Detect the current Python interpreter.
let platform = Platform::current()?;
let venv = Virtualenv::from_env(platform, &cache)?;
debug!(
"Using Python interpreter: {}",
venv.python_executable().display()
);
// Resolve the requirements.
let resolution = resolve(
spec,
resolution_mode,
prerelease_mode,
&index_urls,
no_build,
exclude_newer,
&cache,
&venv,
printer,
)
.await?;
// Sync the environment.
install(
&resolution.requirements(),
reinstall,
link_mode,
index_urls,
no_build,
&cache,
&venv,
printer,
)
.await?;
Ok(ExitStatus::Success)
}
/// Consolidate the requirements for an installation.
fn specification(
requirements: &[RequirementsSource],
constraints: &[RequirementsSource],
extras: &ExtrasSpecification<'_>,
) -> Result<RequirementsSpecification> {
// If the user requests `extras` but does not provide a pyproject toml source
if !matches!(extras, ExtrasSpecification::None)
&& !requirements
.iter()
.any(|source| matches!(source, RequirementsSource::PyprojectToml(_)))
{
return Err(anyhow!(
"Requesting extras requires a pyproject.toml input file."
));
}
// Read all requirements from the provided sources.
let spec = RequirementsSpecification::try_from_sources(requirements, constraints, extras)?;
// Check that all provided extras are used
if let ExtrasSpecification::Some(extras) = extras {
let mut unused_extras = extras
.iter()
.filter(|extra| !spec.extras.contains(extra))
.collect::<Vec<_>>();
if !unused_extras.is_empty() {
unused_extras.sort_unstable();
unused_extras.dedup();
let s = if unused_extras.len() == 1 { "" } else { "s" };
return Err(anyhow!(
"Requested extra{s} not found: {}",
unused_extras.iter().join(", ")
));
}
}
Ok(spec)
}
/// Resolve a set of requirements, similar to running `pip-compile`.
#[allow(clippy::too_many_arguments)]
async fn resolve(
spec: RequirementsSpecification,
resolution_mode: ResolutionMode,
prerelease_mode: PreReleaseMode,
index_urls: &IndexUrls,
no_build: bool,
exclude_newer: Option<DateTime<Utc>>,
cache: &Cache,
venv: &Virtualenv,
mut printer: Printer,
) -> Result<Graph> {
let start = std::time::Instant::now();
// Create a manifest of the requirements.
let RequirementsSpecification {
project,
requirements,
constraints,
extras: _,
} = spec;
let preferences = vec![];
let manifest = Manifest::new(requirements, constraints, preferences, project);
let options = ResolutionOptions::new(resolution_mode, prerelease_mode, exclude_newer);
debug!(
"Using Python {} at {}",
venv.interpreter().markers().python_version,
venv.python_executable().display()
);
// Determine the compatible platform tags.
let tags = Tags::from_interpreter(venv.interpreter())?;
// Determine the interpreter to use for resolution.
let interpreter = venv.interpreter().clone();
// Determine the markers to use for resolution.
let markers = venv.interpreter().markers();
// Instantiate a client.
let client = RegistryClientBuilder::new(cache.clone())
.index_urls(index_urls.clone())
.build();
let build_dispatch = BuildDispatch::new(
client.clone(),
cache.clone(),
interpreter,
fs::canonicalize(venv.python_executable())?,
no_build,
index_urls.clone(),
)
.with_options(options);
// Resolve the dependencies.
let resolver = Resolver::new(manifest, options, markers, &tags, &client, &build_dispatch)
.with_reporter(ResolverReporter::from(printer));
let resolution = match resolver.resolve().await {
Err(puffin_resolver::ResolveError::PubGrub(err)) => {
#[allow(clippy::print_stderr)]
{
let report = miette::Report::msg(format!("{err}"))
.context("No solution found when resolving dependencies:");
eprint!("{report:?}");
}
return Err(puffin_resolver::ResolveError::PubGrub(err).into());
}
result => result,
}?;
let s = if resolution.len() == 1 { "" } else { "s" };
writeln!(
printer,
"{}",
format!(
"Resolved {} in {}",
format!("{} package{}", resolution.len(), s).bold(),
elapsed(start.elapsed())
)
.dimmed()
)?;
Ok(resolution)
}
/// Install a set of requirements into the current environment.
#[allow(clippy::too_many_arguments)]
async fn install(
requirements: &[Requirement],
reinstall: &Reinstall,
link_mode: LinkMode,
index_urls: IndexUrls,
no_build: bool,
cache: &Cache,
venv: &Virtualenv,
mut printer: Printer,
) -> Result<()> {
let start = std::time::Instant::now();
// Determine the current environment markers.
let markers = venv.interpreter().markers();
let tags = Tags::from_interpreter(venv.interpreter())?;
// Partition into those that should be linked from the cache (`local`), those that need to be
// downloaded (`remote`), and those that should be removed (`extraneous`).
let InstallPlan {
local,
remote,
reinstalls,
extraneous: _,
} = InstallPlan::from_requirements(
requirements,
reinstall,
&index_urls,
cache,
venv,
markers,
&tags,
)
.context("Failed to determine installation plan")?;
// Nothing to do.
if remote.is_empty() && local.is_empty() && reinstalls.is_empty() {
let s = if requirements.len() == 1 { "" } else { "s" };
writeln!(
printer,
"{}",
format!(
"Audited {} in {}",
format!("{} package{}", requirements.len(), s).bold(),
elapsed(start.elapsed())
)
.dimmed()
)?;
return Ok(());
}
// Instantiate a client.
let client = RegistryClientBuilder::new(cache.clone())
.index_urls(index_urls.clone())
.build();
// Resolve any registry-based requirements.
// TODO(charlie): We should be able to reuse the resolution from the `resolve` step. All the
// responses will be cached, so this isn't _terrible_, but it is wasteful. (Note that the
// distributions chosen when resolving won't necessarily be the same as those chosen by the
// `DistFinder`, since we allow the use of incompatible wheels when resolving, as long as a
// source distribution is present.)
let remote = if remote.is_empty() {
Vec::new()
} else {
let start = std::time::Instant::now();
let wheel_finder = puffin_resolver::DistFinder::new(&tags, &client, venv.interpreter())
.with_reporter(FinderReporter::from(printer).with_length(remote.len() as u64));
let resolution = wheel_finder.resolve(&remote).await?;
let s = if resolution.len() == 1 { "" } else { "s" };
writeln!(
printer,
"{}",
format!(
"Resolved {} in {}",
format!("{} package{}", resolution.len(), s).bold(),
elapsed(start.elapsed())
)
.dimmed()
)?;
resolution.into_distributions().collect::<Vec<_>>()
};
// Download, build, and unzip any missing distributions.
let wheels = if remote.is_empty() {
vec![]
} else {
let start = std::time::Instant::now();
let build_dispatch = BuildDispatch::new(
client.clone(),
cache.clone(),
venv.interpreter().clone(),
fs::canonicalize(venv.python_executable())?,
no_build,
index_urls.clone(),
);
let downloader = Downloader::new(cache, &tags, &client, &build_dispatch)
.with_reporter(DownloadReporter::from(printer).with_length(remote.len() as u64));
let wheels = downloader
.download(remote, &OnceMap::default())
.await
.context("Failed to download distributions")?;
let s = if wheels.len() == 1 { "" } else { "s" };
writeln!(
printer,
"{}",
format!(
"Downloaded {} in {}",
format!("{} package{}", wheels.len(), s).bold(),
elapsed(start.elapsed())
)
.dimmed()
)?;
wheels
};
// Remove any existing installations.
if !reinstalls.is_empty() {
for dist_info in &reinstalls {
let summary = puffin_installer::uninstall(dist_info).await?;
debug!(
"Uninstalled {} ({} file{}, {} director{})",
dist_info.name(),
summary.file_count,
if summary.file_count == 1 { "" } else { "s" },
summary.dir_count,
if summary.dir_count == 1 { "y" } else { "ies" },
);
}
}
// Install the resolved distributions.
let wheels = wheels.into_iter().chain(local).collect::<Vec<_>>();
if !wheels.is_empty() {
let start = std::time::Instant::now();
puffin_installer::Installer::new(venv)
.with_link_mode(link_mode)
.with_reporter(InstallReporter::from(printer).with_length(wheels.len() as u64))
.install(&wheels)?;
let s = if wheels.len() == 1 { "" } else { "s" };
writeln!(
printer,
"{}",
format!(
"Installed {} in {}",
format!("{} package{}", wheels.len(), s).bold(),
elapsed(start.elapsed())
)
.dimmed()
)?;
}
for event in reinstalls
.into_iter()
.map(|distribution| ChangeEvent {
dist: AnyDist::from(distribution),
kind: ChangeEventKind::Removed,
})
.chain(wheels.into_iter().map(|distribution| ChangeEvent {
dist: AnyDist::from(distribution),
kind: ChangeEventKind::Added,
}))
.sorted_unstable_by(|a, b| {
a.dist
.name()
.cmp(b.dist.name())
.then_with(|| a.kind.cmp(&b.kind))
})
{
match event.kind {
ChangeEventKind::Added => {
writeln!(
printer,
" {} {}{}",
"+".green(),
event.dist.name().as_ref().white().bold(),
event.dist.version_or_url().to_string().dimmed()
)?;
}
ChangeEventKind::Removed => {
writeln!(
printer,
" {} {}{}",
"-".red(),
event.dist.name().as_ref().white().bold(),
event.dist.version_or_url().to_string().dimmed()
)?;
}
}
}
Ok(())
}

View File

@ -20,7 +20,7 @@ use puffin_traits::OnceMap;
use pypi_types::{IndexUrls, Yanked};
use crate::commands::reporters::{DownloadReporter, FinderReporter, InstallReporter};
use crate::commands::{elapsed, ExitStatus};
use crate::commands::{elapsed, ChangeEvent, ChangeEventKind, ExitStatus};
use crate::printer::Printer;
use crate::requirements::{ExtrasSpecification, RequirementsSource, RequirementsSpecification};
@ -88,6 +88,7 @@ pub(crate) async fn sync_requirements(
let InstallPlan {
local,
remote,
reinstalls,
extraneous,
} = InstallPlan::from_requirements(
requirements,
@ -101,7 +102,7 @@ pub(crate) async fn sync_requirements(
.context("Failed to determine installation plan")?;
// Nothing to do.
if remote.is_empty() && local.is_empty() && extraneous.is_empty() {
if remote.is_empty() && local.is_empty() && reinstalls.is_empty() && extraneous.is_empty() {
let s = if requirements.len() == 1 { "" } else { "s" };
writeln!(
printer,
@ -213,10 +214,10 @@ pub(crate) async fn sync_requirements(
};
// Remove any unnecessary packages.
if !extraneous.is_empty() {
if !extraneous.is_empty() || !reinstalls.is_empty() {
let start = std::time::Instant::now();
for dist_info in &extraneous {
for dist_info in extraneous.iter().chain(reinstalls.iter()) {
let summary = puffin_installer::uninstall(dist_info).await?;
debug!(
"Uninstalled {} ({} file{}, {} director{})",
@ -228,13 +229,17 @@ pub(crate) async fn sync_requirements(
);
}
let s = if extraneous.len() == 1 { "" } else { "s" };
let s = if extraneous.len() + reinstalls.len() == 1 {
""
} else {
"s"
};
writeln!(
printer,
"{}",
format!(
"Uninstalled {} in {}",
format!("{} package{}", extraneous.len(), s).bold(),
format!("{} package{}", extraneous.len() + reinstalls.len(), s).bold(),
elapsed(start.elapsed())
)
.dimmed()
@ -265,13 +270,14 @@ pub(crate) async fn sync_requirements(
for event in extraneous
.into_iter()
.chain(reinstalls.into_iter())
.map(|distribution| ChangeEvent {
dist: AnyDist::from(distribution),
kind: ChangeEventKind::Remove,
kind: ChangeEventKind::Removed,
})
.chain(wheels.into_iter().map(|distribution| ChangeEvent {
dist: AnyDist::from(distribution),
kind: ChangeEventKind::Add,
kind: ChangeEventKind::Added,
}))
.sorted_unstable_by(|a, b| {
a.dist
@ -281,7 +287,7 @@ pub(crate) async fn sync_requirements(
})
{
match event.kind {
ChangeEventKind::Add => {
ChangeEventKind::Added => {
writeln!(
printer,
" {} {}{}",
@ -290,7 +296,7 @@ pub(crate) async fn sync_requirements(
event.dist.version_or_url().to_string().dimmed()
)?;
}
ChangeEventKind::Remove => {
ChangeEventKind::Removed => {
writeln!(
printer,
" {} {}{}",
@ -304,17 +310,3 @@ pub(crate) async fn sync_requirements(
Ok(ExitStatus::Success)
}
#[derive(Debug, Copy, Clone, Eq, PartialEq, Ord, PartialOrd)]
enum ChangeEventKind {
/// The package was removed from the environment.
Remove,
/// The package was added to the environment.
Add,
}
#[derive(Debug)]
struct ChangeEvent {
dist: AnyDist,
kind: ChangeEventKind,
}

View File

@ -66,6 +66,8 @@ enum Commands {
PipCompile(PipCompileArgs),
/// Sync dependencies from a `requirements.txt` file.
PipSync(PipSyncArgs),
/// Install packages into the current environment.
PipInstall(PipInstallArgs),
/// Uninstall packages from the current environment.
PipUninstall(PipUninstallArgs),
/// Clear the cache.
@ -145,8 +147,9 @@ struct PipCompileArgs {
/// Don't build source distributions.
///
/// This means resolving will not run arbitrary code. The cached wheels of already built source
/// distributions will be reused.
/// When enabled, resolving will not run arbitrary code. The cached wheels of already-built
/// source distributions will be reused, but operations that require building distributions will
/// exit with an error.
#[clap(long)]
no_build: bool,
@ -172,6 +175,7 @@ struct PipCompileArgs {
}
#[derive(Args)]
#[allow(clippy::struct_excessive_bools)]
struct PipSyncArgs {
/// Include all packages listed in the given `requirements.txt` files.
#[clap(required(true))]
@ -203,13 +207,98 @@ struct PipSyncArgs {
#[clap(long, conflicts_with = "index_url", conflicts_with = "extra_index_url")]
no_index: bool,
/// Don't build source distributions. This means resolving will not run arbitrary code. The
/// cached wheels of already built source distributions will be reused.
/// Don't build source distributions.
///
/// When enabled, resolving will not run arbitrary code. The cached wheels of already-built
/// source distributions will be reused, but operations that require building distributions will
/// exit with an error.
#[clap(long)]
no_build: bool,
}
#[derive(Args)]
#[allow(clippy::struct_excessive_bools)]
#[command(group = clap::ArgGroup::new("sources").required(true))]
struct PipInstallArgs {
/// Install all listed packages.
#[clap(group = "sources")]
package: Vec<String>,
/// Install all packages listed in the given requirements files.
#[clap(short, long, group = "sources")]
requirement: Vec<PathBuf>,
/// Constrain versions using the given constraints files.
#[clap(short, long)]
constraint: Vec<PathBuf>,
/// Include optional dependencies in the given extra group name; may be provided more than once.
#[clap(long, conflicts_with = "all_extras", value_parser = extra_name_with_clap_error)]
extra: Vec<ExtraName>,
/// Include all optional dependencies.
#[clap(long, conflicts_with = "extra")]
all_extras: bool,
/// Reinstall all packages, overwriting any entries in the cache and replacing any existing
/// packages in the environment.
#[clap(long)]
reinstall: bool,
/// Reinstall a specific package, overwriting any entries in the cache and replacing any
/// existing versions in the environment.
#[clap(long)]
reinstall_package: Vec<PackageName>,
/// The method to use when installing packages from the global cache.
#[clap(long, value_enum)]
link_mode: Option<install_wheel_rs::linker::LinkMode>,
#[clap(long, value_enum)]
resolution: Option<ResolutionMode>,
#[clap(long, value_enum)]
prerelease: Option<PreReleaseMode>,
/// Write the compiled requirements to the given `requirements.txt` file.
#[clap(short, long)]
output_file: Option<PathBuf>,
/// The URL of the Python Package Index.
#[clap(long, short, default_value = IndexUrl::Pypi.as_str())]
index_url: IndexUrl,
/// Extra URLs of package indexes to use, in addition to `--index-url`.
#[clap(long)]
extra_index_url: Vec<IndexUrl>,
/// Ignore the package index, instead relying on local archives and caches.
#[clap(long, conflicts_with = "index_url", conflicts_with = "extra_index_url")]
no_index: bool,
/// Don't build source distributions.
///
/// When enabled, resolving will not run arbitrary code. The cached wheels of already-built
/// source distributions will be reused, but operations that require building distributions will
/// exit with an error.
#[clap(long)]
no_build: bool,
/// Try to resolve at a past time.
///
/// This works by filtering out files with a more recent upload time, so if the index you use
/// does not provide upload times, the results might be inaccurate. pypi provides upload times
/// for all files.
///
/// Timestamps are given either as RFC 3339 timestamps such as `2006-12-02T02:07:43Z` or as
/// UTC dates in the same format such as `2006-12-02`. Dates are interpreted as including this
/// day, i.e. until midnight UTC that day.
#[arg(long, value_parser = date_or_datetime)]
exclude_newer: Option<DateTime<Utc>>,
}
#[derive(Args)]
#[allow(clippy::struct_excessive_bools)]
#[command(group = clap::ArgGroup::new("sources").required(true))]
struct PipUninstallArgs {
/// Uninstall all listed packages.
@ -222,12 +311,14 @@ struct PipUninstallArgs {
}
#[derive(Args)]
#[allow(clippy::struct_excessive_bools)]
struct CleanArgs {
/// The packages to remove from the cache.
package: Vec<PackageName>,
}
#[derive(Args)]
#[allow(clippy::struct_excessive_bools)]
struct VenvArgs {
/// The Python interpreter to use for the virtual environment.
// Short `-p` to match `virtualenv`
@ -241,12 +332,14 @@ struct VenvArgs {
}
#[derive(Args)]
#[allow(clippy::struct_excessive_bools)]
struct AddArgs {
/// The name of the package to add (e.g., `Django==4.2.6`).
name: String,
}
#[derive(Args)]
#[allow(clippy::struct_excessive_bools)]
struct RemoveArgs {
/// The name of the package to remove (e.g., `Django`).
name: PackageName,
@ -285,7 +378,6 @@ async fn inner() -> Result<ExitStatus> {
.collect::<Vec<_>>();
let index_urls =
IndexUrls::from_args(args.index_url, args.extra_index_url, args.no_index);
let extras = if args.all_extras {
ExtrasSpecification::All
} else if args.extra.is_empty() {
@ -293,7 +385,6 @@ async fn inner() -> Result<ExitStatus> {
} else {
ExtrasSpecification::Some(&args.extra)
};
commands::pip_compile(
&requirements,
&constraints,
@ -331,6 +422,44 @@ async fn inner() -> Result<ExitStatus> {
)
.await
}
Commands::PipInstall(args) => {
let requirements = args
.package
.into_iter()
.map(RequirementsSource::from)
.chain(args.requirement.into_iter().map(RequirementsSource::from))
.collect::<Vec<_>>();
let constraints = args
.constraint
.into_iter()
.map(RequirementsSource::from)
.collect::<Vec<_>>();
let index_urls =
IndexUrls::from_args(args.index_url, args.extra_index_url, args.no_index);
let extras = if args.all_extras {
ExtrasSpecification::All
} else if args.extra.is_empty() {
ExtrasSpecification::None
} else {
ExtrasSpecification::Some(&args.extra)
};
let reinstall = Reinstall::from_args(args.reinstall, args.reinstall_package);
commands::pip_install(
&requirements,
&constraints,
&extras,
args.resolution.unwrap_or_default(),
args.prerelease.unwrap_or_default(),
index_urls,
&reinstall,
args.link_mode.unwrap_or_default(),
args.no_build,
args.exclude_newer,
cache,
printer,
)
.await
}
Commands::PipUninstall(args) => {
let sources = args
.package

View File

@ -0,0 +1,174 @@
#![cfg(all(feature = "python", feature = "pypi"))]
use std::path::Path;
use std::process::Command;
use anyhow::Result;
use assert_cmd::prelude::*;
use assert_fs::prelude::*;
use insta_cmd::_macro_support::insta;
use insta_cmd::{assert_cmd_snapshot, get_cargo_bin};
use common::{create_venv_py312, BIN_NAME, INSTA_FILTERS};
mod common;
// Exclude any packages uploaded after this date.
static EXCLUDE_NEWER: &str = "2023-11-18T12:00:00Z";
fn check_command(venv: &Path, command: &str, temp_dir: &Path) {
Command::new(venv.join("bin").join("python"))
.arg("-c")
.arg(command)
.current_dir(temp_dir)
.assert()
.success();
}
#[test]
fn missing_requirements_txt() -> Result<()> {
let temp_dir = assert_fs::TempDir::new()?;
let cache_dir = assert_fs::TempDir::new()?;
let requirements_txt = temp_dir.child("requirements.txt");
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
.arg("pip-install")
.arg("-r")
.arg("requirements.txt")
.arg("--cache-dir")
.arg(cache_dir.path())
.current_dir(&temp_dir), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: failed to open file `requirements.txt`
Caused by: No such file or directory (os error 2)
"###);
requirements_txt.assert(predicates::path::missing());
Ok(())
}
/// Install a package from the command line into a virtual environment.
#[test]
fn install_package() -> Result<()> {
let temp_dir = assert_fs::TempDir::new()?;
let cache_dir = assert_fs::TempDir::new()?;
let venv = create_venv_py312(&temp_dir, &cache_dir);
// Install Flask.
insta::with_settings!({
filters => INSTA_FILTERS.to_vec()
}, {
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
.arg("pip-install")
.arg("Flask")
.arg("--cache-dir")
.arg(cache_dir.path())
.arg("--exclude-newer")
.arg(EXCLUDE_NEWER)
.env("VIRTUAL_ENV", venv.as_os_str())
.current_dir(&temp_dir), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 7 packages in [TIME]
Resolved 7 packages in [TIME]
Downloaded 7 packages in [TIME]
Installed 7 packages in [TIME]
+ blinker==1.7.0
+ click==8.1.7
+ flask==3.0.0
+ itsdangerous==2.1.2
+ jinja2==3.1.2
+ markupsafe==2.1.3
+ werkzeug==3.0.1
"###);
});
check_command(&venv, "import flask", &temp_dir);
Ok(())
}
/// Install a package from a `requirements.txt` into a virtual environment.
#[test]
fn install_requirements_txt() -> Result<()> {
let temp_dir = assert_fs::TempDir::new()?;
let cache_dir = assert_fs::TempDir::new()?;
let venv = create_venv_py312(&temp_dir, &cache_dir);
// Install Flask.
let requirements_txt = temp_dir.child("requirements.txt");
requirements_txt.write_str("Flask")?;
insta::with_settings!({
filters => INSTA_FILTERS.to_vec()
}, {
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
.arg("pip-install")
.arg("-r")
.arg("requirements.txt")
.arg("--cache-dir")
.arg(cache_dir.path())
.arg("--exclude-newer")
.arg(EXCLUDE_NEWER)
.env("VIRTUAL_ENV", venv.as_os_str())
.current_dir(&temp_dir), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 7 packages in [TIME]
Resolved 7 packages in [TIME]
Downloaded 7 packages in [TIME]
Installed 7 packages in [TIME]
+ blinker==1.7.0
+ click==8.1.7
+ flask==3.0.0
+ itsdangerous==2.1.2
+ jinja2==3.1.2
+ markupsafe==2.1.3
+ werkzeug==3.0.1
"###);
});
check_command(&venv, "import flask", &temp_dir);
// Install Jinja2 (which should already be installed, but shouldn't remove other packages).
let requirements_txt = temp_dir.child("requirements.txt");
requirements_txt.write_str("Jinja2")?;
insta::with_settings!({
filters => INSTA_FILTERS.to_vec()
}, {
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
.arg("pip-install")
.arg("-r")
.arg("requirements.txt")
.arg("--cache-dir")
.arg(cache_dir.path())
.arg("--exclude-newer")
.arg(EXCLUDE_NEWER)
.env("VIRTUAL_ENV", venv.as_os_str())
.current_dir(&temp_dir), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 2 packages in [TIME]
Audited 2 packages in [TIME]
"###);
});
check_command(&venv, "import flask", &temp_dir);
Ok(())
}

View File

@ -132,6 +132,7 @@ impl BuildContext for BuildDispatch {
let InstallPlan {
local,
remote,
reinstalls,
extraneous,
} = InstallPlan::from_requirements(
requirements,
@ -178,8 +179,8 @@ impl BuildContext for BuildDispatch {
};
// Remove any unnecessary packages.
if !extraneous.is_empty() {
for dist_info in &extraneous {
if !extraneous.is_empty() || !reinstalls.is_empty() {
for dist_info in extraneous.iter().chain(reinstalls.iter()) {
let summary = puffin_installer::uninstall(dist_info)
.await
.context("Failed to uninstall build dependencies")?;

View File

@ -28,7 +28,11 @@ pub struct InstallPlan {
/// not available in the local cache.
pub remote: Vec<Requirement>,
/// The distributions that are already installed in the current environment, and are
/// Any distributions that are already installed in the current environment, but will be
/// re-installed (including upgraded) to satisfy the requirements.
pub reinstalls: Vec<InstalledDist>,
/// Any distributions that are already installed in the current environment, and are
/// _not_ necessary to satisfy the requirements.
pub extraneous: Vec<InstalledDist>,
}
@ -54,6 +58,7 @@ impl InstallPlan {
let mut local = vec![];
let mut remote = vec![];
let mut reinstalls = vec![];
let mut extraneous = vec![];
let mut seen =
FxHashMap::with_capacity_and_hasher(requirements.len(), BuildHasherDefault::default());
@ -81,36 +86,42 @@ impl InstallPlan {
// If necessary, purge the cached distributions.
debug!("Purging cached distributions for: {requirement}");
cache.purge(&requirement.name)?;
} else if let Some(distribution) = site_packages.remove(&requirement.name) {
// Filter out already-installed packages.
match requirement.version_or_url.as_ref() {
// If the requirement comes from a registry, check by name.
None | Some(VersionOrUrl::VersionSpecifier(_)) => {
if requirement.is_satisfied_by(distribution.version()) {
debug!("Requirement already satisfied: {distribution}");
continue;
if let Some(distribution) = site_packages.remove(&requirement.name) {
reinstalls.push(distribution);
}
} else {
if let Some(distribution) = site_packages.remove(&requirement.name) {
// Filter out already-installed packages.
match requirement.version_or_url.as_ref() {
// If the requirement comes from a registry, check by name.
None | Some(VersionOrUrl::VersionSpecifier(_)) => {
if requirement.is_satisfied_by(distribution.version()) {
debug!("Requirement already satisfied: {distribution}");
continue;
}
}
}
// If the requirement comes from a direct URL, check by URL.
Some(VersionOrUrl::Url(url)) => {
if let InstalledDist::Url(distribution) = &distribution {
if let Ok(direct_url) = DirectUrl::try_from(url) {
if let Ok(direct_url) = pypi_types::DirectUrl::try_from(&direct_url)
{
// TODO(charlie): These don't need to be strictly equal. We only care
// about a subset of the fields.
if direct_url == distribution.url {
debug!("Requirement already satisfied: {distribution}");
continue;
// If the requirement comes from a direct URL, check by URL.
Some(VersionOrUrl::Url(url)) => {
if let InstalledDist::Url(distribution) = &distribution {
if let Ok(direct_url) = DirectUrl::try_from(url) {
if let Ok(direct_url) =
pypi_types::DirectUrl::try_from(&direct_url)
{
// TODO(charlie): These don't need to be strictly equal. We only care
// about a subset of the fields.
if direct_url == distribution.url {
debug!("Requirement already satisfied: {distribution}");
continue;
}
}
}
}
}
}
}
extraneous.push(distribution);
reinstalls.push(distribution);
}
}
// Identify any locally-available distributions that satisfy the requirement.
@ -267,6 +278,7 @@ impl InstallPlan {
Ok(InstallPlan {
local,
remote,
reinstalls,
extraneous,
})
}