mirror of https://github.com/astral-sh/uv
724 lines
25 KiB
Rust
724 lines
25 KiB
Rust
use std::path::PathBuf;
|
|
use std::process::ExitCode;
|
|
use std::str::FromStr;
|
|
|
|
use anstream::eprintln;
|
|
use anyhow::Result;
|
|
use chrono::{DateTime, Days, NaiveDate, NaiveTime, Utc};
|
|
use clap::{Args, Parser, Subcommand};
|
|
use owo_colors::OwoColorize;
|
|
|
|
use distribution_types::{FlatIndexLocation, IndexLocations, IndexUrl};
|
|
use puffin_cache::{Cache, CacheArgs};
|
|
use puffin_installer::Reinstall;
|
|
use puffin_interpreter::PythonVersion;
|
|
use puffin_normalize::{ExtraName, PackageName};
|
|
use puffin_resolver::{PreReleaseMode, ResolutionMode};
|
|
use puffin_traits::SetupPyStrategy;
|
|
use requirements::ExtrasSpecification;
|
|
|
|
use crate::commands::{extra_name_with_clap_error, ExitStatus};
|
|
use crate::requirements::RequirementsSource;
|
|
|
|
#[cfg(target_os = "windows")]
|
|
#[global_allocator]
|
|
static GLOBAL: mimalloc::MiMalloc = mimalloc::MiMalloc;
|
|
|
|
#[cfg(all(
|
|
not(target_os = "windows"),
|
|
not(target_os = "openbsd"),
|
|
any(
|
|
target_arch = "x86_64",
|
|
target_arch = "aarch64",
|
|
target_arch = "powerpc64"
|
|
)
|
|
))]
|
|
#[global_allocator]
|
|
static GLOBAL: tikv_jemallocator::Jemalloc = tikv_jemallocator::Jemalloc;
|
|
|
|
mod commands;
|
|
mod logging;
|
|
mod printer;
|
|
mod requirements;
|
|
|
|
#[derive(Parser)]
|
|
#[command(author, version, about)]
|
|
#[command(propagate_version = true)]
|
|
struct Cli {
|
|
#[command(subcommand)]
|
|
command: Commands,
|
|
|
|
/// Do not print any output.
|
|
#[arg(global = true, long, short, conflicts_with = "verbose")]
|
|
quiet: bool,
|
|
|
|
/// Use verbose output.
|
|
#[arg(global = true, long, short, conflicts_with = "quiet")]
|
|
verbose: bool,
|
|
|
|
#[command(flatten)]
|
|
cache_args: CacheArgs,
|
|
}
|
|
|
|
#[derive(Subcommand)]
|
|
#[allow(clippy::large_enum_variant)]
|
|
enum Commands {
|
|
/// Resolve and install Python packages.
|
|
Pip(PipArgs),
|
|
/// Create a virtual environment.
|
|
#[clap(alias = "virtualenv", alias = "v")]
|
|
Venv(VenvArgs),
|
|
/// Clear the cache.
|
|
Clean(CleanArgs),
|
|
/// Add a dependency to the workspace.
|
|
#[clap(hide = true)]
|
|
Add(AddArgs),
|
|
/// Remove a dependency from the workspace.
|
|
#[clap(hide = true)]
|
|
Remove(RemoveArgs),
|
|
}
|
|
|
|
#[derive(Args)]
|
|
struct PipArgs {
|
|
#[clap(subcommand)]
|
|
command: PipCommand,
|
|
}
|
|
|
|
#[derive(Subcommand)]
|
|
enum PipCommand {
|
|
/// Compile a `requirements.in` file to a `requirements.txt` file.
|
|
Compile(PipCompileArgs),
|
|
/// Sync dependencies from a `requirements.txt` file.
|
|
Sync(PipSyncArgs),
|
|
/// Install packages into the current environment.
|
|
Install(PipInstallArgs),
|
|
/// Uninstall packages from the current environment.
|
|
Uninstall(PipUninstallArgs),
|
|
/// Enumerate the installed packages in the current environment.
|
|
Freeze(PipFreezeArgs),
|
|
}
|
|
|
|
/// Clap parser for the union of date and datetime
|
|
fn date_or_datetime(input: &str) -> Result<DateTime<Utc>, String> {
|
|
let date_err = match NaiveDate::from_str(input) {
|
|
Ok(date) => {
|
|
// Midnight that day is 00:00:00 the next day
|
|
return Ok((date + Days::new(1)).and_time(NaiveTime::MIN).and_utc());
|
|
}
|
|
Err(err) => err,
|
|
};
|
|
let datetime_err = match DateTime::parse_from_rfc3339(input) {
|
|
Ok(datetime) => return Ok(datetime.with_timezone(&Utc)),
|
|
Err(err) => err,
|
|
};
|
|
Err(format!(
|
|
"Neither a valid date ({date_err}) not a valid datetime ({datetime_err})"
|
|
))
|
|
}
|
|
|
|
#[derive(Args)]
|
|
#[allow(clippy::struct_excessive_bools)]
|
|
struct PipCompileArgs {
|
|
/// Include all packages listed in the given `requirements.in` files.
|
|
#[clap(required(true))]
|
|
src_file: Vec<PathBuf>,
|
|
|
|
/// Constrain versions using the given requirements files.
|
|
///
|
|
/// Constraints files are `requirements.txt`-like files that only control the _version_ of a
|
|
/// requirement that's installed. However, including a package in a constraints file will _not_
|
|
/// trigger the installation of that package.
|
|
///
|
|
/// This is equivalent to pip's `--constraint` option.
|
|
#[clap(short, long)]
|
|
constraint: Vec<PathBuf>,
|
|
|
|
/// Override versions using the given requirements files.
|
|
///
|
|
/// Overrides files are `requirements.txt`-like files that force a specific version of a
|
|
/// requirement to be installed, regardless of the requirements declared by any constituent
|
|
/// package, and regardless of whether this would be considered an invalid resolution.
|
|
///
|
|
/// While constraints are _additive_, in that they're combined with the requirements of the
|
|
/// constituent packages, overrides are _absolute_, in that they completely replace the
|
|
/// requirements of the constituent packages.
|
|
#[clap(long)]
|
|
r#override: Vec<PathBuf>,
|
|
|
|
/// Include optional dependencies in the given extra group name; may be provided more than once.
|
|
#[clap(long, conflicts_with = "all_extras", value_parser = extra_name_with_clap_error)]
|
|
extra: Vec<ExtraName>,
|
|
|
|
/// Include all optional dependencies.
|
|
#[clap(long, conflicts_with = "extra")]
|
|
all_extras: bool,
|
|
|
|
#[clap(long, value_enum, default_value_t = ResolutionMode::default())]
|
|
resolution: ResolutionMode,
|
|
|
|
#[clap(long, value_enum, default_value_t = PreReleaseMode::default())]
|
|
prerelease: PreReleaseMode,
|
|
|
|
/// Write the compiled requirements to the given `requirements.txt` file.
|
|
#[clap(short, long)]
|
|
output_file: Option<PathBuf>,
|
|
|
|
/// The URL of the Python Package Index.
|
|
#[clap(long, short, default_value = IndexUrl::Pypi.as_str(), env = "PUFFIN_INDEX_URL")]
|
|
index_url: IndexUrl,
|
|
|
|
/// Extra URLs of package indexes to use, in addition to `--index-url`.
|
|
#[clap(long)]
|
|
extra_index_url: Vec<IndexUrl>,
|
|
|
|
/// Locations to search for candidate distributions, beyond those found in the indexes.
|
|
///
|
|
/// If a path, the target must be a directory that contains package as wheel files (`.whl`) or
|
|
/// source distributions (`.tar.gz` or `.zip`) at the top level.
|
|
///
|
|
/// If a URL, the page must contain a flat list of links to package files.
|
|
#[clap(long)]
|
|
find_links: Vec<FlatIndexLocation>,
|
|
|
|
/// Ignore the package index, instead relying on local archives and caches.
|
|
#[clap(long, conflicts_with = "index_url", conflicts_with = "extra_index_url")]
|
|
no_index: bool,
|
|
|
|
/// Allow package upgrades, ignoring pinned versions in the existing output file.
|
|
#[clap(long)]
|
|
upgrade: bool,
|
|
|
|
/// Include distribution hashes in the output file.
|
|
#[clap(long)]
|
|
generate_hashes: bool,
|
|
|
|
/// Use legacy `setuptools` behavior when building source distributions without a
|
|
/// `pyproject.toml`.
|
|
#[clap(long)]
|
|
legacy_setup_py: bool,
|
|
|
|
/// Don't build source distributions.
|
|
///
|
|
/// When enabled, resolving will not run arbitrary code. The cached wheels of already-built
|
|
/// source distributions will be reused, but operations that require building distributions will
|
|
/// exit with an error.
|
|
#[clap(long)]
|
|
no_build: bool,
|
|
|
|
/// The minimum Python version that should be supported by the compiled requirements (e.g.,
|
|
/// `3.7` or `3.7.9`).
|
|
///
|
|
/// If a patch version is omitted, the most recent known patch version for that minor version
|
|
/// is assumed. For example, `3.7` is mapped to `3.7.17`.
|
|
#[arg(long, short)]
|
|
python_version: Option<PythonVersion>,
|
|
|
|
/// Try to resolve at a past time.
|
|
///
|
|
/// This works by filtering out files with a more recent upload time, so if the index you use
|
|
/// does not provide upload times, the results might be inaccurate. pypi provides upload times
|
|
/// for all files.
|
|
///
|
|
/// Timestamps are given either as RFC 3339 timestamps such as `2006-12-02T02:07:43Z` or as
|
|
/// UTC dates in the same format such as `2006-12-02`. Dates are interpreted as including this
|
|
/// day, i.e. until midnight UTC that day.
|
|
#[arg(long, value_parser = date_or_datetime)]
|
|
exclude_newer: Option<DateTime<Utc>>,
|
|
}
|
|
|
|
#[derive(Args)]
|
|
#[allow(clippy::struct_excessive_bools)]
|
|
struct PipSyncArgs {
|
|
/// Include all packages listed in the given `requirements.txt` files.
|
|
#[clap(required(true))]
|
|
src_file: Vec<PathBuf>,
|
|
|
|
/// Reinstall all packages, overwriting any entries in the cache and replacing any existing
|
|
/// packages in the environment.
|
|
#[clap(long)]
|
|
reinstall: bool,
|
|
|
|
/// Reinstall a specific package, overwriting any entries in the cache and replacing any
|
|
/// existing versions in the environment.
|
|
#[clap(long)]
|
|
reinstall_package: Vec<PackageName>,
|
|
|
|
/// The method to use when installing packages from the global cache.
|
|
#[clap(long, value_enum, default_value_t = install_wheel_rs::linker::LinkMode::default())]
|
|
link_mode: install_wheel_rs::linker::LinkMode,
|
|
|
|
/// The URL of the Python Package Index.
|
|
#[clap(long, short, default_value = IndexUrl::Pypi.as_str(), env = "PUFFIN_INDEX_URL")]
|
|
index_url: IndexUrl,
|
|
|
|
/// Extra URLs of package indexes to use, in addition to `--index-url`.
|
|
#[clap(long)]
|
|
extra_index_url: Vec<IndexUrl>,
|
|
|
|
/// Locations to search for candidate distributions, beyond those found in the indexes.
|
|
///
|
|
/// If a path, the target must be a directory that contains package as wheel files (`.whl`) or
|
|
/// source distributions (`.tar.gz` or `.zip`) at the top level.
|
|
///
|
|
/// If a URL, the page must contain a flat list of links to package files.
|
|
#[clap(long)]
|
|
find_links: Vec<FlatIndexLocation>,
|
|
|
|
/// Ignore the registry index (e.g., PyPI), instead relying on local caches and `--find-links`
|
|
/// directories and URLs.
|
|
#[clap(long, conflicts_with = "index_url", conflicts_with = "extra_index_url")]
|
|
no_index: bool,
|
|
|
|
/// Use legacy `setuptools` behavior when building source distributions without a
|
|
/// `pyproject.toml`.
|
|
#[clap(long)]
|
|
legacy_setup_py: bool,
|
|
|
|
/// Don't build source distributions.
|
|
///
|
|
/// When enabled, resolving will not run arbitrary code. The cached wheels of already-built
|
|
/// source distributions will be reused, but operations that require building distributions will
|
|
/// exit with an error.
|
|
#[clap(long)]
|
|
no_build: bool,
|
|
|
|
/// Validate the virtual environment after completing the installation, to detect packages with
|
|
/// missing dependencies or other issues.
|
|
#[clap(long)]
|
|
strict: bool,
|
|
}
|
|
|
|
#[derive(Args)]
|
|
#[allow(clippy::struct_excessive_bools)]
|
|
#[command(group = clap::ArgGroup::new("sources").required(true).multiple(true))]
|
|
struct PipInstallArgs {
|
|
/// Install all listed packages.
|
|
#[clap(group = "sources")]
|
|
package: Vec<String>,
|
|
|
|
/// Install all packages listed in the given requirements files.
|
|
#[clap(short, long, group = "sources")]
|
|
requirement: Vec<PathBuf>,
|
|
|
|
/// Install the editable package based on the provided local file path.
|
|
#[clap(short, long, group = "sources")]
|
|
editable: Vec<String>,
|
|
|
|
/// Constrain versions using the given requirements files.
|
|
///
|
|
/// Constraints files are `requirements.txt`-like files that only control the _version_ of a
|
|
/// requirement that's installed. However, including a package in a constraints file will _not_
|
|
/// trigger the installation of that package.
|
|
///
|
|
/// This is equivalent to pip's `--constraint` option.
|
|
#[clap(short, long)]
|
|
constraint: Vec<PathBuf>,
|
|
|
|
/// Override versions using the given requirements files.
|
|
///
|
|
/// Overrides files are `requirements.txt`-like files that force a specific version of a
|
|
/// requirement to be installed, regardless of the requirements declared by any constituent
|
|
/// package, and regardless of whether this would be considered an invalid resolution.
|
|
///
|
|
/// While constraints are _additive_, in that they're combined with the requirements of the
|
|
/// constituent packages, overrides are _absolute_, in that they completely replace the
|
|
/// requirements of the constituent packages.
|
|
#[clap(long)]
|
|
r#override: Vec<PathBuf>,
|
|
|
|
/// Include optional dependencies in the given extra group name; may be provided more than once.
|
|
#[clap(long, conflicts_with = "all_extras", value_parser = extra_name_with_clap_error)]
|
|
extra: Vec<ExtraName>,
|
|
|
|
/// Include all optional dependencies.
|
|
#[clap(long, conflicts_with = "extra")]
|
|
all_extras: bool,
|
|
|
|
/// Reinstall all packages, overwriting any entries in the cache and replacing any existing
|
|
/// packages in the environment.
|
|
#[clap(long)]
|
|
reinstall: bool,
|
|
|
|
/// Reinstall a specific package, overwriting any entries in the cache and replacing any
|
|
/// existing versions in the environment.
|
|
#[clap(long)]
|
|
reinstall_package: Vec<PackageName>,
|
|
|
|
/// The method to use when installing packages from the global cache.
|
|
#[clap(long, value_enum, default_value_t = install_wheel_rs::linker::LinkMode::default())]
|
|
link_mode: install_wheel_rs::linker::LinkMode,
|
|
|
|
#[clap(long, value_enum, default_value_t = ResolutionMode::default())]
|
|
resolution: ResolutionMode,
|
|
|
|
#[clap(long, value_enum, default_value_t = PreReleaseMode::default())]
|
|
prerelease: PreReleaseMode,
|
|
|
|
/// Write the compiled requirements to the given `requirements.txt` file.
|
|
#[clap(short, long)]
|
|
output_file: Option<PathBuf>,
|
|
|
|
/// The URL of the Python Package Index.
|
|
#[clap(long, short, default_value = IndexUrl::Pypi.as_str(), env = "PUFFIN_INDEX_URL")]
|
|
index_url: IndexUrl,
|
|
|
|
/// Extra URLs of package indexes to use, in addition to `--index-url`.
|
|
#[clap(long)]
|
|
extra_index_url: Vec<IndexUrl>,
|
|
|
|
/// Locations to search for candidate distributions, beyond those found in the indexes.
|
|
///
|
|
/// If a path, the target must be a directory that contains package as wheel files (`.whl`) or
|
|
/// source distributions (`.tar.gz` or `.zip`) at the top level.
|
|
///
|
|
/// If a URL, the page must contain a flat list of links to package files.
|
|
#[clap(long)]
|
|
find_links: Vec<FlatIndexLocation>,
|
|
|
|
/// Ignore the package index, instead relying on local archives and caches.
|
|
#[clap(long, conflicts_with = "index_url", conflicts_with = "extra_index_url")]
|
|
no_index: bool,
|
|
|
|
/// Use legacy `setuptools` behavior when building source distributions without a
|
|
/// `pyproject.toml`.
|
|
#[clap(long)]
|
|
legacy_setup_py: bool,
|
|
|
|
/// Don't build source distributions.
|
|
///
|
|
/// When enabled, resolving will not run arbitrary code. The cached wheels of already-built
|
|
/// source distributions will be reused, but operations that require building distributions will
|
|
/// exit with an error.
|
|
#[clap(long)]
|
|
no_build: bool,
|
|
|
|
/// Validate the virtual environment after completing the installation, to detect packages with
|
|
/// missing dependencies or other issues.
|
|
#[clap(long)]
|
|
strict: bool,
|
|
|
|
/// Try to resolve at a past time.
|
|
///
|
|
/// This works by filtering out files with a more recent upload time, so if the index you use
|
|
/// does not provide upload times, the results might be inaccurate. pypi provides upload times
|
|
/// for all files.
|
|
///
|
|
/// Timestamps are given either as RFC 3339 timestamps such as `2006-12-02T02:07:43Z` or as
|
|
/// UTC dates in the same format such as `2006-12-02`. Dates are interpreted as including this
|
|
/// day, i.e. until midnight UTC that day.
|
|
#[arg(long, value_parser = date_or_datetime)]
|
|
exclude_newer: Option<DateTime<Utc>>,
|
|
}
|
|
|
|
#[derive(Args)]
|
|
#[allow(clippy::struct_excessive_bools)]
|
|
#[command(group = clap::ArgGroup::new("sources").required(true).multiple(true))]
|
|
struct PipUninstallArgs {
|
|
/// Uninstall all listed packages.
|
|
#[clap(group = "sources")]
|
|
package: Vec<String>,
|
|
|
|
/// Uninstall all packages listed in the given requirements files.
|
|
#[clap(short, long, group = "sources")]
|
|
requirement: Vec<PathBuf>,
|
|
|
|
/// Uninstall the editable package based on the provided local file path.
|
|
#[clap(short, long, group = "sources")]
|
|
editable: Vec<String>,
|
|
}
|
|
|
|
#[derive(Args)]
|
|
#[allow(clippy::struct_excessive_bools)]
|
|
struct PipFreezeArgs {
|
|
/// Validate the virtual environment, to detect packages with missing dependencies or other
|
|
/// issues.
|
|
#[clap(long)]
|
|
strict: bool,
|
|
}
|
|
|
|
#[derive(Args)]
|
|
#[allow(clippy::struct_excessive_bools)]
|
|
struct CleanArgs {
|
|
/// The packages to remove from the cache.
|
|
package: Vec<PackageName>,
|
|
}
|
|
|
|
#[derive(Args)]
|
|
#[allow(clippy::struct_excessive_bools)]
|
|
struct VenvArgs {
|
|
/// The Python interpreter to use for the virtual environment.
|
|
// Short `-p` to match `virtualenv`
|
|
// TODO(konstin): Support e.g. `-p 3.10`
|
|
#[clap(short, long)]
|
|
python: Option<PathBuf>,
|
|
|
|
/// Install seed packages (`pip`, `setuptools`, and `wheel`) into the virtual environment.
|
|
#[clap(long)]
|
|
seed: bool,
|
|
|
|
/// The path to the virtual environment to create.
|
|
#[clap(default_value = ".venv")]
|
|
name: PathBuf,
|
|
|
|
/// The URL of the Python Package Index.
|
|
#[clap(long, short, default_value = IndexUrl::Pypi.as_str(), env = "PUFFIN_INDEX_URL")]
|
|
index_url: IndexUrl,
|
|
|
|
/// Extra URLs of package indexes to use, in addition to `--index-url`.
|
|
#[clap(long)]
|
|
extra_index_url: Vec<IndexUrl>,
|
|
|
|
/// Ignore the package index, instead relying on local archives and caches.
|
|
#[clap(long, conflicts_with = "index_url", conflicts_with = "extra_index_url")]
|
|
no_index: bool,
|
|
}
|
|
|
|
#[derive(Args)]
|
|
#[allow(clippy::struct_excessive_bools)]
|
|
struct AddArgs {
|
|
/// The name of the package to add (e.g., `Django==4.2.6`).
|
|
name: String,
|
|
}
|
|
|
|
#[derive(Args)]
|
|
#[allow(clippy::struct_excessive_bools)]
|
|
struct RemoveArgs {
|
|
/// The name of the package to remove (e.g., `Django`).
|
|
name: PackageName,
|
|
}
|
|
|
|
async fn inner() -> Result<ExitStatus> {
|
|
let cli = Cli::parse();
|
|
|
|
// Configure the `tracing` crate, which controls internal logging.
|
|
#[cfg(feature = "tracing-durations-export")]
|
|
let (duration_layer, _duration_guard) = logging::setup_duration();
|
|
#[cfg(not(feature = "tracing-durations-export"))]
|
|
let duration_layer = None::<tracing_subscriber::layer::Identity>;
|
|
logging::setup_logging(
|
|
if cli.verbose {
|
|
logging::Level::Verbose
|
|
} else {
|
|
logging::Level::Default
|
|
},
|
|
duration_layer,
|
|
);
|
|
|
|
// Configure the `Printer`, which controls user-facing output in the CLI.
|
|
let printer = if cli.quiet {
|
|
printer::Printer::Quiet
|
|
} else if cli.verbose {
|
|
printer::Printer::Verbose
|
|
} else {
|
|
printer::Printer::Default
|
|
};
|
|
|
|
// Configure the `warn!` macros, which control user-facing warnings in the CLI.
|
|
if !cli.quiet {
|
|
puffin_warnings::enable();
|
|
}
|
|
|
|
let cache = Cache::try_from(cli.cache_args)?;
|
|
|
|
match cli.command {
|
|
Commands::Pip(PipArgs {
|
|
command: PipCommand::Compile(args),
|
|
}) => {
|
|
let requirements = args
|
|
.src_file
|
|
.into_iter()
|
|
.map(RequirementsSource::from)
|
|
.collect::<Vec<_>>();
|
|
let constraints = args
|
|
.constraint
|
|
.into_iter()
|
|
.map(RequirementsSource::from)
|
|
.collect::<Vec<_>>();
|
|
let overrides = args
|
|
.r#override
|
|
.into_iter()
|
|
.map(RequirementsSource::from)
|
|
.collect::<Vec<_>>();
|
|
let index_urls = IndexLocations::from_args(
|
|
args.index_url,
|
|
args.extra_index_url,
|
|
args.find_links,
|
|
args.no_index,
|
|
);
|
|
let extras = if args.all_extras {
|
|
ExtrasSpecification::All
|
|
} else if args.extra.is_empty() {
|
|
ExtrasSpecification::None
|
|
} else {
|
|
ExtrasSpecification::Some(&args.extra)
|
|
};
|
|
commands::pip_compile(
|
|
&requirements,
|
|
&constraints,
|
|
&overrides,
|
|
extras,
|
|
args.output_file.as_deref(),
|
|
args.resolution,
|
|
args.prerelease,
|
|
args.upgrade.into(),
|
|
args.generate_hashes,
|
|
index_urls,
|
|
if args.legacy_setup_py {
|
|
SetupPyStrategy::Setuptools
|
|
} else {
|
|
SetupPyStrategy::Pep517
|
|
},
|
|
args.no_build,
|
|
args.python_version,
|
|
args.exclude_newer,
|
|
cache,
|
|
printer,
|
|
)
|
|
.await
|
|
}
|
|
Commands::Pip(PipArgs {
|
|
command: PipCommand::Sync(args),
|
|
}) => {
|
|
let index_urls = IndexLocations::from_args(
|
|
args.index_url,
|
|
args.extra_index_url,
|
|
args.find_links,
|
|
args.no_index,
|
|
);
|
|
let sources = args
|
|
.src_file
|
|
.into_iter()
|
|
.map(RequirementsSource::from)
|
|
.collect::<Vec<_>>();
|
|
let reinstall = Reinstall::from_args(args.reinstall, args.reinstall_package);
|
|
commands::pip_sync(
|
|
&sources,
|
|
&reinstall,
|
|
args.link_mode,
|
|
index_urls,
|
|
if args.legacy_setup_py {
|
|
SetupPyStrategy::Setuptools
|
|
} else {
|
|
SetupPyStrategy::Pep517
|
|
},
|
|
args.no_build,
|
|
args.strict,
|
|
cache,
|
|
printer,
|
|
)
|
|
.await
|
|
}
|
|
Commands::Pip(PipArgs {
|
|
command: PipCommand::Install(args),
|
|
}) => {
|
|
let requirements = args
|
|
.package
|
|
.into_iter()
|
|
.map(RequirementsSource::Package)
|
|
.chain(args.editable.into_iter().map(RequirementsSource::Editable))
|
|
.chain(args.requirement.into_iter().map(RequirementsSource::from))
|
|
.collect::<Vec<_>>();
|
|
let constraints = args
|
|
.constraint
|
|
.into_iter()
|
|
.map(RequirementsSource::from)
|
|
.collect::<Vec<_>>();
|
|
let overrides = args
|
|
.r#override
|
|
.into_iter()
|
|
.map(RequirementsSource::from)
|
|
.collect::<Vec<_>>();
|
|
let index_urls = IndexLocations::from_args(
|
|
args.index_url,
|
|
args.extra_index_url,
|
|
args.find_links,
|
|
args.no_index,
|
|
);
|
|
let extras = if args.all_extras {
|
|
ExtrasSpecification::All
|
|
} else if args.extra.is_empty() {
|
|
ExtrasSpecification::None
|
|
} else {
|
|
ExtrasSpecification::Some(&args.extra)
|
|
};
|
|
let reinstall = Reinstall::from_args(args.reinstall, args.reinstall_package);
|
|
commands::pip_install(
|
|
&requirements,
|
|
&constraints,
|
|
&overrides,
|
|
&extras,
|
|
args.resolution,
|
|
args.prerelease,
|
|
index_urls,
|
|
&reinstall,
|
|
args.link_mode,
|
|
if args.legacy_setup_py {
|
|
SetupPyStrategy::Setuptools
|
|
} else {
|
|
SetupPyStrategy::Pep517
|
|
},
|
|
args.no_build,
|
|
args.strict,
|
|
args.exclude_newer,
|
|
cache,
|
|
printer,
|
|
)
|
|
.await
|
|
}
|
|
Commands::Pip(PipArgs {
|
|
command: PipCommand::Uninstall(args),
|
|
}) => {
|
|
let sources = args
|
|
.package
|
|
.into_iter()
|
|
.map(RequirementsSource::Package)
|
|
.chain(args.editable.into_iter().map(RequirementsSource::Editable))
|
|
.chain(args.requirement.into_iter().map(RequirementsSource::from))
|
|
.collect::<Vec<_>>();
|
|
commands::pip_uninstall(&sources, cache, printer).await
|
|
}
|
|
Commands::Pip(PipArgs {
|
|
command: PipCommand::Freeze(args),
|
|
}) => commands::freeze(&cache, args.strict, printer),
|
|
Commands::Clean(args) => commands::clean(&cache, &args.package, printer),
|
|
Commands::Venv(args) => {
|
|
let index_locations = IndexLocations::from_args(
|
|
args.index_url,
|
|
args.extra_index_url,
|
|
// No find links for the venv subcommand, to keep things simple
|
|
Vec::new(),
|
|
args.no_index,
|
|
);
|
|
commands::venv(
|
|
&args.name,
|
|
args.python.as_deref(),
|
|
&index_locations,
|
|
args.seed,
|
|
&cache,
|
|
printer,
|
|
)
|
|
.await
|
|
}
|
|
Commands::Add(args) => commands::add(&args.name, printer),
|
|
Commands::Remove(args) => commands::remove(&args.name, printer),
|
|
}
|
|
}
|
|
|
|
#[tokio::main]
|
|
async fn main() -> ExitCode {
|
|
match inner().await {
|
|
Ok(code) => code.into(),
|
|
Err(err) => {
|
|
#[allow(clippy::print_stderr)]
|
|
{
|
|
let mut causes = err.chain();
|
|
eprintln!("{}: {}", "error".red().bold(), causes.next().unwrap());
|
|
for err in causes {
|
|
eprintln!(" {}: {}", "Caused by".red().bold(), err);
|
|
}
|
|
}
|
|
ExitStatus::Error.into()
|
|
}
|
|
}
|
|
}
|