mirror of https://github.com/astral-sh/uv
Add `uv publish`: Basic upload with username/password or keyring (#7475)
Co-authored-by: Charlie Marsh <charlie.r.marsh@gmail.com>
This commit is contained in:
parent
484717d42f
commit
1995d20298
|
|
@ -968,6 +968,61 @@ jobs:
|
|||
env:
|
||||
UV_PROJECT_ENVIRONMENT: "/home/runner/example"
|
||||
|
||||
integration-test-publish:
|
||||
timeout-minutes: 10
|
||||
needs: build-binary-linux
|
||||
name: "integration test | uv publish"
|
||||
runs-on: ubuntu-latest
|
||||
# Only the main repository is a trusted publisher
|
||||
if: github.repository == 'astral-sh/uv'
|
||||
environment: uv-test-publish
|
||||
env:
|
||||
# No dbus in GitHub Actions
|
||||
PYTHON_KEYRING_BACKEND: keyrings.alt.file.PlaintextKeyring
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
# Only publish a new release if the
|
||||
- uses: tj-actions/changed-files@v45
|
||||
id: changed
|
||||
with:
|
||||
files_yaml: |
|
||||
code:
|
||||
- "crates/uv-publish/**/*"
|
||||
- "scripts/publish/**/*"
|
||||
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3.12"
|
||||
|
||||
- name: "Download binary"
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: uv-linux-${{ github.sha }}
|
||||
|
||||
- name: "Prepare binary"
|
||||
run: chmod +x ./uv
|
||||
|
||||
- name: "Add password to keyring"
|
||||
run: |
|
||||
# `keyrings.alt` contains the plaintext keyring
|
||||
./uv tool install --with keyrings.alt "keyring<25.4.0" # TODO(konsti): Remove upper bound once fix is released
|
||||
echo $UV_TEST_PUBLISH_KEYRING | keyring set https://test.pypi.org/legacy/?astral-test-keyring __token__
|
||||
env:
|
||||
UV_TEST_PUBLISH_KEYRING: ${{ secrets.UV_TEST_PUBLISH_KEYRING }}
|
||||
|
||||
- name: "Publish test packages"
|
||||
if: ${{ steps.changed.outputs.code_any_changed }}
|
||||
# `-p 3.12` prefers the python we just installed over the one locked in `.python_version`.
|
||||
run: ./uv run -p 3.12 scripts/publish/test_publish.py --uv ./uv all
|
||||
env:
|
||||
RUST_LOG: uv=debug,uv_publish=trace
|
||||
UV_TEST_PUBLISH_TOKEN: ${{ secrets.UV_TEST_PUBLISH_TOKEN }}
|
||||
UV_TEST_PUBLISH_PASSWORD: ${{ secrets.UV_TEST_PUBLISH_PASSWORD }}
|
||||
UV_TEST_PUBLISH_GITLAB_PAT: ${{ secrets.UV_TEST_PUBLISH_GITLAB_PAT }}
|
||||
|
||||
cache-test-ubuntu:
|
||||
timeout-minutes: 10
|
||||
needs: build-binary-linux
|
||||
|
|
|
|||
|
|
@ -2131,6 +2131,16 @@ version = "0.3.17"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a"
|
||||
|
||||
[[package]]
|
||||
name = "mime_guess"
|
||||
version = "2.0.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f7c44f8e672c00fe5308fa235f821cb4198414e1c77935c1ab6948d3fd78550e"
|
||||
dependencies = [
|
||||
"mime",
|
||||
"unicase",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "miniz_oxide"
|
||||
version = "0.7.4"
|
||||
|
|
@ -3090,6 +3100,7 @@ dependencies = [
|
|||
"js-sys",
|
||||
"log",
|
||||
"mime",
|
||||
"mime_guess",
|
||||
"once_cell",
|
||||
"percent-encoding",
|
||||
"pin-project-lite",
|
||||
|
|
@ -4290,6 +4301,15 @@ version = "0.1.6"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ed646292ffc8188ef8ea4d1e0e0150fb15a5c2e12ad9b8fc191ae7a8a7f3c4b9"
|
||||
|
||||
[[package]]
|
||||
name = "unicase"
|
||||
version = "2.7.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f7d2d4dafb69621809a81864c9c1b864479e1235c0dd4e199924b9742439ed89"
|
||||
dependencies = [
|
||||
"version_check",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "unicode-bidi"
|
||||
version = "0.3.15"
|
||||
|
|
@ -4511,6 +4531,7 @@ dependencies = [
|
|||
"uv-git",
|
||||
"uv-installer",
|
||||
"uv-normalize",
|
||||
"uv-publish",
|
||||
"uv-python",
|
||||
"uv-requirements",
|
||||
"uv-resolver",
|
||||
|
|
@ -4983,6 +5004,35 @@ dependencies = [
|
|||
"thiserror",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "uv-publish"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"async-compression",
|
||||
"base64 0.22.1",
|
||||
"distribution-filename",
|
||||
"fs-err",
|
||||
"futures",
|
||||
"glob",
|
||||
"insta",
|
||||
"itertools 0.13.0",
|
||||
"krata-tokio-tar",
|
||||
"pypi-types",
|
||||
"reqwest",
|
||||
"reqwest-middleware",
|
||||
"rustc-hash",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"sha2",
|
||||
"thiserror",
|
||||
"tokio",
|
||||
"tracing",
|
||||
"url",
|
||||
"uv-client",
|
||||
"uv-fs",
|
||||
"uv-metadata",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "uv-python"
|
||||
version = "0.0.1"
|
||||
|
|
@ -5156,6 +5206,7 @@ dependencies = [
|
|||
"thiserror",
|
||||
"toml",
|
||||
"tracing",
|
||||
"url",
|
||||
"uv-cache-info",
|
||||
"uv-configuration",
|
||||
"uv-fs",
|
||||
|
|
|
|||
|
|
@ -45,6 +45,7 @@ uv-metadata = { path = "crates/uv-metadata" }
|
|||
uv-normalize = { path = "crates/uv-normalize" }
|
||||
uv-options-metadata = { path = "crates/uv-options-metadata" }
|
||||
uv-pubgrub = { path = "crates/uv-pubgrub" }
|
||||
uv-publish = { path = "crates/uv-publish" }
|
||||
uv-python = { path = "crates/uv-python" }
|
||||
uv-requirements = { path = "crates/uv-requirements" }
|
||||
uv-resolver = { path = "crates/uv-resolver" }
|
||||
|
|
@ -123,8 +124,8 @@ quote = { version = "1.0.37" }
|
|||
rayon = { version = "1.10.0" }
|
||||
reflink-copy = { version = "0.1.19" }
|
||||
regex = { version = "1.10.6" }
|
||||
reqwest = { version = "0.12.7", default-features = false, features = ["json", "gzip", "stream", "rustls-tls", "rustls-tls-native-roots", "socks"] }
|
||||
reqwest-middleware = { git = "https://github.com/astral-sh/reqwest-middleware", rev = "5e3eaf254b5bd481c75d2710eed055f95b756913" }
|
||||
reqwest = { version = "0.12.7", default-features = false, features = ["json", "gzip", "stream", "rustls-tls", "rustls-tls-native-roots", "socks", "multipart"] }
|
||||
reqwest-middleware = { git = "https://github.com/astral-sh/reqwest-middleware", rev = "5e3eaf254b5bd481c75d2710eed055f95b756913", features = ["multipart"] }
|
||||
reqwest-retry = { git = "https://github.com/astral-sh/reqwest-middleware", rev = "5e3eaf254b5bd481c75d2710eed055f95b756913" }
|
||||
rkyv = { version = "0.8.8", features = ["bytecheck"] }
|
||||
rmp-serde = { version = "1.3.0" }
|
||||
|
|
|
|||
|
|
@ -67,6 +67,14 @@ impl DistFilename {
|
|||
Self::WheelFilename(filename) => &filename.version,
|
||||
}
|
||||
}
|
||||
|
||||
/// Whether the file is a `bdist_wheel` or an `sdist`.
|
||||
pub fn filetype(&self) -> &'static str {
|
||||
match self {
|
||||
Self::SourceDistFilename(_) => "sdist",
|
||||
Self::WheelFilename(_) => "bdist_wheel",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Display for DistFilename {
|
||||
|
|
|
|||
|
|
@ -0,0 +1,199 @@
|
|||
//! Vendored from <https://github.com/PyO3/python-pkginfo-rs>
|
||||
|
||||
use crate::metadata::Headers;
|
||||
use crate::MetadataError;
|
||||
use std::str;
|
||||
use std::str::FromStr;
|
||||
|
||||
/// Code Metadata 2.3 as specified in
|
||||
/// <https://packaging.python.org/specifications/core-metadata/>.
|
||||
#[derive(Debug, Clone, Default, PartialEq, Eq)]
|
||||
pub struct Metadata23 {
|
||||
/// Version of the file format; legal values are `1.0`, `1.1`, `1.2`, `2.1`, `2.2` and `2.3`.
|
||||
pub metadata_version: String,
|
||||
/// The name of the distribution.
|
||||
pub name: String,
|
||||
/// A string containing the distribution’s version number.
|
||||
pub version: String,
|
||||
/// A Platform specification describing an operating system supported by the distribution
|
||||
/// which is not listed in the “Operating System” Trove classifiers.
|
||||
pub platforms: Vec<String>,
|
||||
/// Binary distributions containing a PKG-INFO file will use the Supported-Platform field
|
||||
/// in their metadata to specify the OS and CPU for which the binary distribution was compiled.
|
||||
pub supported_platforms: Vec<String>,
|
||||
/// A one-line summary of what the distribution does.
|
||||
pub summary: Option<String>,
|
||||
/// A longer description of the distribution that can run to several paragraphs.
|
||||
pub description: Option<String>,
|
||||
/// A list of additional keywords, separated by commas, to be used to
|
||||
/// assist searching for the distribution in a larger catalog.
|
||||
pub keywords: Option<String>,
|
||||
/// A string containing the URL for the distribution’s home page.
|
||||
pub home_page: Option<String>,
|
||||
/// A string containing the URL from which this version of the distribution can be downloaded.
|
||||
pub download_url: Option<String>,
|
||||
/// A string containing the author’s name at a minimum; additional contact information may be provided.
|
||||
pub author: Option<String>,
|
||||
/// A string containing the author’s e-mail address. It can contain a name and e-mail address in the legal forms for a RFC-822 `From:` header.
|
||||
pub author_email: Option<String>,
|
||||
/// Text indicating the license covering the distribution where the license is not a selection from the `License` Trove classifiers or an SPDX license expression.
|
||||
pub license: Option<String>,
|
||||
/// An SPDX expression indicating the license covering the distribution.
|
||||
pub license_expression: Option<String>,
|
||||
/// Paths to files containing the text of the licenses covering the distribution.
|
||||
pub license_files: Vec<String>,
|
||||
/// Each entry is a string giving a single classification value for the distribution.
|
||||
pub classifiers: Vec<String>,
|
||||
/// Each entry contains a string naming some other distutils project required by this distribution.
|
||||
pub requires_dist: Vec<String>,
|
||||
/// Each entry contains a string naming a Distutils project which is contained within this distribution.
|
||||
pub provides_dist: Vec<String>,
|
||||
/// Each entry contains a string describing a distutils project’s distribution which this distribution renders obsolete,
|
||||
/// meaning that the two projects should not be installed at the same time.
|
||||
pub obsoletes_dist: Vec<String>,
|
||||
/// A string containing the maintainer’s name at a minimum; additional contact information may be provided.
|
||||
///
|
||||
/// Note that this field is intended for use when a project is being maintained by someone other than the original author:
|
||||
/// it should be omitted if it is identical to `author`.
|
||||
pub maintainer: Option<String>,
|
||||
/// A string containing the maintainer’s e-mail address.
|
||||
/// It can contain a name and e-mail address in the legal forms for a RFC-822 `From:` header.
|
||||
///
|
||||
/// Note that this field is intended for use when a project is being maintained by someone other than the original author:
|
||||
/// it should be omitted if it is identical to `author_email`.
|
||||
pub maintainer_email: Option<String>,
|
||||
/// This field specifies the Python version(s) that the distribution is guaranteed to be compatible with.
|
||||
pub requires_python: Option<String>,
|
||||
/// Each entry contains a string describing some dependency in the system that the distribution is to be used.
|
||||
pub requires_external: Vec<String>,
|
||||
/// A string containing a browsable URL for the project and a label for it, separated by a comma.
|
||||
pub project_urls: Vec<String>,
|
||||
/// A string containing the name of an optional feature. Must be a valid Python identifier.
|
||||
/// May be used to make a dependency conditional on whether the optional feature has been requested.
|
||||
pub provides_extras: Vec<String>,
|
||||
/// A string stating the markup syntax (if any) used in the distribution’s description,
|
||||
/// so that tools can intelligently render the description.
|
||||
pub description_content_type: Option<String>,
|
||||
/// A string containing the name of another core metadata field.
|
||||
pub dynamic: Vec<String>,
|
||||
}
|
||||
|
||||
impl Metadata23 {
|
||||
/// Parse distribution metadata from metadata `MetadataError`
|
||||
pub fn parse(content: &[u8]) -> Result<Self, MetadataError> {
|
||||
let headers = Headers::parse(content)?;
|
||||
|
||||
let metadata_version = headers
|
||||
.get_first_value("Metadata-Version")
|
||||
.ok_or(MetadataError::FieldNotFound("Metadata-Version"))?;
|
||||
let name = headers
|
||||
.get_first_value("Name")
|
||||
.ok_or(MetadataError::FieldNotFound("Name"))?;
|
||||
let version = headers
|
||||
.get_first_value("Version")
|
||||
.ok_or(MetadataError::FieldNotFound("Version"))?;
|
||||
let platforms = headers.get_all_values("Platform").collect();
|
||||
let supported_platforms = headers.get_all_values("Supported-Platform").collect();
|
||||
let summary = headers.get_first_value("Summary");
|
||||
let body = str::from_utf8(&content[headers.body_start..])
|
||||
.map_err(MetadataError::DescriptionEncoding)?;
|
||||
let description = if body.trim().is_empty() {
|
||||
headers.get_first_value("Description")
|
||||
} else {
|
||||
Some(body.to_string())
|
||||
};
|
||||
let keywords = headers.get_first_value("Keywords");
|
||||
let home_page = headers.get_first_value("Home-Page");
|
||||
let download_url = headers.get_first_value("Download-URL");
|
||||
let author = headers.get_first_value("Author");
|
||||
let author_email = headers.get_first_value("Author-email");
|
||||
let license = headers.get_first_value("License");
|
||||
let license_expression = headers.get_first_value("License-Expression");
|
||||
let license_files = headers.get_all_values("License-File").collect();
|
||||
let classifiers = headers.get_all_values("Classifier").collect();
|
||||
let requires_dist = headers.get_all_values("Requires-Dist").collect();
|
||||
let provides_dist = headers.get_all_values("Provides-Dist").collect();
|
||||
let obsoletes_dist = headers.get_all_values("Obsoletes-Dist").collect();
|
||||
let maintainer = headers.get_first_value("Maintainer");
|
||||
let maintainer_email = headers.get_first_value("Maintainer-email");
|
||||
let requires_python = headers.get_first_value("Requires-Python");
|
||||
let requires_external = headers.get_all_values("Requires-External").collect();
|
||||
let project_urls = headers.get_all_values("Project-URL").collect();
|
||||
let provides_extras = headers.get_all_values("Provides-Extra").collect();
|
||||
let description_content_type = headers.get_first_value("Description-Content-Type");
|
||||
let dynamic = headers.get_all_values("Dynamic").collect();
|
||||
Ok(Metadata23 {
|
||||
metadata_version,
|
||||
name,
|
||||
version,
|
||||
platforms,
|
||||
supported_platforms,
|
||||
summary,
|
||||
description,
|
||||
keywords,
|
||||
home_page,
|
||||
download_url,
|
||||
author,
|
||||
author_email,
|
||||
license,
|
||||
license_expression,
|
||||
license_files,
|
||||
classifiers,
|
||||
requires_dist,
|
||||
provides_dist,
|
||||
obsoletes_dist,
|
||||
maintainer,
|
||||
maintainer_email,
|
||||
requires_python,
|
||||
requires_external,
|
||||
project_urls,
|
||||
provides_extras,
|
||||
description_content_type,
|
||||
dynamic,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl FromStr for Metadata23 {
|
||||
type Err = MetadataError;
|
||||
|
||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||
Metadata23::parse(s.as_bytes())
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::MetadataError;
|
||||
|
||||
#[test]
|
||||
fn test_parse_from_str() {
|
||||
let s = "Metadata-Version: 1.0";
|
||||
let meta: Result<Metadata23, MetadataError> = s.parse();
|
||||
assert!(matches!(meta, Err(MetadataError::FieldNotFound("Name"))));
|
||||
|
||||
let s = "Metadata-Version: 1.0\nName: asdf";
|
||||
let meta = Metadata23::parse(s.as_bytes());
|
||||
assert!(matches!(meta, Err(MetadataError::FieldNotFound("Version"))));
|
||||
|
||||
let s = "Metadata-Version: 1.0\nName: asdf\nVersion: 1.0";
|
||||
let meta = Metadata23::parse(s.as_bytes()).unwrap();
|
||||
assert_eq!(meta.metadata_version, "1.0");
|
||||
assert_eq!(meta.name, "asdf");
|
||||
assert_eq!(meta.version, "1.0");
|
||||
|
||||
let s = "Metadata-Version: 1.0\nName: asdf\nVersion: 1.0\nDescription: a Python package";
|
||||
let meta: Metadata23 = s.parse().unwrap();
|
||||
assert_eq!(meta.description.as_deref(), Some("a Python package"));
|
||||
|
||||
let s = "Metadata-Version: 1.0\nName: asdf\nVersion: 1.0\n\na Python package";
|
||||
let meta: Metadata23 = s.parse().unwrap();
|
||||
assert_eq!(meta.description.as_deref(), Some("a Python package"));
|
||||
|
||||
let s = "Metadata-Version: 1.0\nName: asdf\nVersion: 1.0\nAuthor: 中文\n\n一个 Python 包";
|
||||
let meta: Metadata23 = s.parse().unwrap();
|
||||
assert_eq!(meta.author.as_deref(), Some("中文"));
|
||||
assert_eq!(meta.description.as_deref(), Some("一个 Python 包"));
|
||||
}
|
||||
}
|
||||
|
|
@ -1,5 +1,6 @@
|
|||
mod metadata10;
|
||||
mod metadata12;
|
||||
mod metadata23;
|
||||
mod metadata_resolver;
|
||||
mod pyproject_toml;
|
||||
mod requires_txt;
|
||||
|
|
@ -14,6 +15,7 @@ use uv_normalize::InvalidNameError;
|
|||
|
||||
pub use metadata10::Metadata10;
|
||||
pub use metadata12::Metadata12;
|
||||
pub use metadata23::Metadata23;
|
||||
pub use metadata_resolver::MetadataResolver;
|
||||
pub use pyproject_toml::RequiresDist;
|
||||
pub use requires_txt::RequiresTxt;
|
||||
|
|
@ -65,13 +67,17 @@ impl From<Pep508Error<VerbatimParsedUrl>> for MetadataError {
|
|||
#[derive(Debug)]
|
||||
struct Headers<'a> {
|
||||
headers: Vec<mailparse::MailHeader<'a>>,
|
||||
body_start: usize,
|
||||
}
|
||||
|
||||
impl<'a> Headers<'a> {
|
||||
/// Parse the headers from the given metadata file content.
|
||||
fn parse(content: &'a [u8]) -> Result<Self, MailParseError> {
|
||||
let (headers, _) = mailparse::parse_headers(content)?;
|
||||
Ok(Self { headers })
|
||||
let (headers, body_start) = mailparse::parse_headers(content)?;
|
||||
Ok(Self {
|
||||
headers,
|
||||
body_start,
|
||||
})
|
||||
}
|
||||
|
||||
/// Return the first value associated with the header with the given name.
|
||||
|
|
|
|||
|
|
@ -10,6 +10,7 @@ use clap::{Args, Parser, Subcommand};
|
|||
use distribution_types::{FlatIndexLocation, IndexUrl};
|
||||
use pep508_rs::Requirement;
|
||||
use pypi_types::VerbatimParsedUrl;
|
||||
use url::Url;
|
||||
use uv_cache::CacheArgs;
|
||||
use uv_configuration::{
|
||||
ConfigSettingEntry, ExportFormat, IndexStrategy, KeyringProviderType, PackageNameSpecifier,
|
||||
|
|
@ -380,6 +381,8 @@ pub enum Commands {
|
|||
after_long_help = ""
|
||||
)]
|
||||
Build(BuildArgs),
|
||||
/// Upload distributions to an index.
|
||||
Publish(PublishArgs),
|
||||
/// Manage uv's cache.
|
||||
#[command(
|
||||
after_help = "Use `uv help cache` for more details.",
|
||||
|
|
@ -4303,3 +4306,72 @@ pub struct DisplayTreeArgs {
|
|||
#[arg(long, alias = "reverse")]
|
||||
pub invert: bool,
|
||||
}
|
||||
|
||||
#[derive(Args, Debug)]
|
||||
pub struct PublishArgs {
|
||||
/// Paths to the files to upload. Accepts glob expressions.
|
||||
///
|
||||
/// Defaults to the `dist` directory. Selects only wheels and source distributions, while
|
||||
/// ignoring other files.
|
||||
#[arg(default_value = "dist/*")]
|
||||
pub files: Vec<String>,
|
||||
|
||||
/// The URL of the upload endpoint.
|
||||
///
|
||||
/// Note that this typically differs from the index URL.
|
||||
///
|
||||
/// Defaults to PyPI's publish URL (<https://upload.pypi.org/legacy/>).
|
||||
///
|
||||
/// The default value is publish URL for PyPI (<https://upload.pypi.org/legacy/>).
|
||||
#[arg(long, env = "UV_PUBLISH_URL")]
|
||||
pub publish_url: Option<Url>,
|
||||
|
||||
/// The username for the upload.
|
||||
#[arg(short, long, env = "UV_PUBLISH_USERNAME")]
|
||||
pub username: Option<String>,
|
||||
|
||||
/// The password for the upload.
|
||||
#[arg(short, long, env = "UV_PUBLISH_PASSWORD")]
|
||||
pub password: Option<String>,
|
||||
|
||||
/// The token for the upload.
|
||||
///
|
||||
/// Using a token is equivalent to passing `__token__` as `--username` and the token as `--password`.
|
||||
/// password.
|
||||
#[arg(
|
||||
short,
|
||||
long,
|
||||
env = "UV_PUBLISH_TOKEN",
|
||||
conflicts_with = "username",
|
||||
conflicts_with = "password"
|
||||
)]
|
||||
pub token: Option<String>,
|
||||
|
||||
/// Attempt to use `keyring` for authentication for remote requirements files.
|
||||
///
|
||||
/// At present, only `--keyring-provider subprocess` is supported, which configures uv to
|
||||
/// use the `keyring` CLI to handle authentication.
|
||||
///
|
||||
/// Defaults to `disabled`.
|
||||
#[arg(long, value_enum, env = "UV_KEYRING_PROVIDER")]
|
||||
pub keyring_provider: Option<KeyringProviderType>,
|
||||
|
||||
/// Allow insecure connections to a host.
|
||||
///
|
||||
/// Can be provided multiple times.
|
||||
///
|
||||
/// Expects to receive either a hostname (e.g., `localhost`), a host-port pair (e.g.,
|
||||
/// `localhost:8080`), or a URL (e.g., `https://localhost`).
|
||||
///
|
||||
/// WARNING: Hosts included in this list will not be verified against the system's certificate
|
||||
/// store. Only use `--allow-insecure-host` in a secure network with verified sources, as it
|
||||
/// bypasses SSL verification and could expose you to MITM attacks.
|
||||
#[arg(
|
||||
long,
|
||||
alias = "trusted-host",
|
||||
env = "UV_INSECURE_HOST",
|
||||
value_delimiter = ' ',
|
||||
value_parser = parse_insecure_host,
|
||||
)]
|
||||
pub allow_insecure_host: Option<Vec<Maybe<TrustedHost>>>,
|
||||
}
|
||||
|
|
|
|||
|
|
@ -0,0 +1,41 @@
|
|||
[package]
|
||||
name = "uv-publish"
|
||||
version = "0.1.0"
|
||||
edition.workspace = true
|
||||
rust-version.workspace = true
|
||||
homepage.workspace = true
|
||||
documentation.workspace = true
|
||||
repository.workspace = true
|
||||
authors.workspace = true
|
||||
license.workspace = true
|
||||
|
||||
[dependencies]
|
||||
distribution-filename = { workspace = true }
|
||||
pypi-types = { workspace = true }
|
||||
uv-client = { workspace = true }
|
||||
uv-fs = { workspace = true }
|
||||
uv-metadata = { workspace = true }
|
||||
|
||||
async-compression = { workspace = true }
|
||||
base64 = { workspace = true }
|
||||
fs-err = { workspace = true }
|
||||
futures = { workspace = true }
|
||||
glob = { workspace = true }
|
||||
itertools = { workspace = true }
|
||||
krata-tokio-tar = { workspace = true }
|
||||
reqwest = { workspace = true }
|
||||
reqwest-middleware = { workspace = true }
|
||||
rustc-hash = { workspace = true }
|
||||
serde = { workspace = true, features = ["derive"] }
|
||||
serde_json = { workspace = true }
|
||||
sha2 = { workspace = true }
|
||||
thiserror = { workspace = true }
|
||||
tokio = { workspace = true }
|
||||
tracing = { workspace = true }
|
||||
url = { workspace = true }
|
||||
|
||||
[dev-dependencies]
|
||||
insta = { version = "1.36.1", features = ["json", "filters"] }
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
|
@ -0,0 +1,784 @@
|
|||
use base64::prelude::BASE64_STANDARD;
|
||||
use base64::Engine;
|
||||
use distribution_filename::{DistFilename, SourceDistExtension, SourceDistFilename};
|
||||
use fs_err::File;
|
||||
use futures::TryStreamExt;
|
||||
use glob::{glob, GlobError, PatternError};
|
||||
use itertools::Itertools;
|
||||
use pypi_types::{Metadata23, MetadataError};
|
||||
use reqwest::header::AUTHORIZATION;
|
||||
use reqwest::multipart::Part;
|
||||
use reqwest::{Body, Response, StatusCode};
|
||||
use reqwest_middleware::RequestBuilder;
|
||||
use rustc_hash::FxHashSet;
|
||||
use serde::Deserialize;
|
||||
use sha2::{Digest, Sha256};
|
||||
use std::io::BufReader;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::{fmt, io};
|
||||
use thiserror::Error;
|
||||
use tokio::io::AsyncReadExt;
|
||||
use tracing::{debug, enabled, trace, Level};
|
||||
use url::Url;
|
||||
use uv_client::BaseClient;
|
||||
use uv_fs::Simplified;
|
||||
use uv_metadata::read_metadata_async_seek;
|
||||
|
||||
#[derive(Error, Debug)]
|
||||
pub enum PublishError {
|
||||
#[error("Invalid publish path: `{0}`")]
|
||||
Pattern(String, #[source] PatternError),
|
||||
/// [`GlobError`] is a wrapped io error.
|
||||
#[error(transparent)]
|
||||
Glob(#[from] GlobError),
|
||||
#[error("Path patterns didn't match any wheels or source distributions")]
|
||||
NoFiles,
|
||||
#[error(transparent)]
|
||||
Fmt(#[from] fmt::Error),
|
||||
#[error("File is neither a wheel nor a source distribution: `{}`", _0.user_display())]
|
||||
InvalidFilename(PathBuf),
|
||||
#[error("Failed to publish: `{}`", _0.user_display())]
|
||||
PublishPrepare(PathBuf, #[source] Box<PublishPrepareError>),
|
||||
#[error("Failed to publish `{}` to {}", _0.user_display(), _1)]
|
||||
PublishSend(PathBuf, Url, #[source] PublishSendError),
|
||||
}
|
||||
|
||||
/// Failure to get the metadata for a specific file.
|
||||
#[derive(Error, Debug)]
|
||||
pub enum PublishPrepareError {
|
||||
#[error(transparent)]
|
||||
Io(#[from] io::Error),
|
||||
#[error("Failed to read metadata")]
|
||||
Metadata(#[from] uv_metadata::Error),
|
||||
#[error("Failed to read metadata")]
|
||||
Metadata23(#[from] MetadataError),
|
||||
#[error("Only files ending in `.tar.gz` are valid source distributions: `{0}`")]
|
||||
InvalidExtension(SourceDistFilename),
|
||||
#[error("No PKG-INFO file found")]
|
||||
MissingPkgInfo,
|
||||
#[error("Multiple PKG-INFO files found: `{0}`")]
|
||||
MultiplePkgInfo(String),
|
||||
#[error("Failed to read: `{0}`")]
|
||||
Read(String, #[source] io::Error),
|
||||
}
|
||||
|
||||
/// Failure in or after (HTTP) transport for a specific file.
|
||||
#[derive(Error, Debug)]
|
||||
pub enum PublishSendError {
|
||||
#[error("Failed to send POST request")]
|
||||
ReqwestMiddleware(#[from] reqwest_middleware::Error),
|
||||
#[error("Upload failed with status {0}")]
|
||||
StatusNoBody(StatusCode, #[source] reqwest::Error),
|
||||
#[error("Upload failed with status code {0}: {1}")]
|
||||
Status(StatusCode, String),
|
||||
/// The registry returned a "403 Forbidden".
|
||||
#[error("Permission denied (status code {0}): {1}")]
|
||||
PermissionDenied(StatusCode, String),
|
||||
/// See inline comment.
|
||||
#[error("The request was redirected, but redirects are not allowed when publishing, please use the canonical URL: `{0}`")]
|
||||
RedirectError(Url),
|
||||
}
|
||||
|
||||
impl PublishSendError {
|
||||
/// Extract `code` from the PyPI json error response, if any.
|
||||
///
|
||||
/// The error response from PyPI contains crucial context, such as the difference between
|
||||
/// "Invalid or non-existent authentication information" and "The user 'konstin' isn't allowed
|
||||
/// to upload to project 'dummy'".
|
||||
///
|
||||
/// Twine uses the HTTP status reason for its error messages. In HTTP 2.0 and onward this field
|
||||
/// is abolished, so reqwest doesn't expose it, see
|
||||
/// <https://docs.rs/reqwest/0.12.7/reqwest/struct.StatusCode.html#method.canonical_reason>.
|
||||
/// PyPI does respect the content type for error responses and can return an error display as
|
||||
/// HTML, JSON and plain. Since HTML and plain text are both overly verbose, we show the JSON
|
||||
/// response. Examples are shown below, line breaks were inserted for readability. Of those,
|
||||
/// the `code` seems to be the most helpful message, so we return it. If the response isn't a
|
||||
/// JSON document with `code` we return the regular body.
|
||||
///
|
||||
/// ```json
|
||||
/// {"message": "The server could not comply with the request since it is either malformed or
|
||||
/// otherwise incorrect.\n\n\nError: Use 'source' as Python version for an sdist.\n\n",
|
||||
/// "code": "400 Error: Use 'source' as Python version for an sdist.",
|
||||
/// "title": "Bad Request"}
|
||||
/// ```
|
||||
///
|
||||
/// ```json
|
||||
/// {"message": "Access was denied to this resource.\n\n\nInvalid or non-existent authentication
|
||||
/// information. See https://test.pypi.org/help/#invalid-auth for more information.\n\n",
|
||||
/// "code": "403 Invalid or non-existent authentication information. See
|
||||
/// https://test.pypi.org/help/#invalid-auth for more information.",
|
||||
/// "title": "Forbidden"}
|
||||
/// ```
|
||||
/// ```json
|
||||
/// {"message": "Access was denied to this resource.\n\n\n\n\n",
|
||||
/// "code": "403 Username/Password authentication is no longer supported. Migrate to API
|
||||
/// Tokens or Trusted Publishers instead. See https://test.pypi.org/help/#apitoken and
|
||||
/// https://test.pypi.org/help/#trusted-publishers",
|
||||
/// "title": "Forbidden"}
|
||||
/// ```
|
||||
///
|
||||
/// For context, for the last case twine shows:
|
||||
/// ```text
|
||||
/// WARNING Error during upload. Retry with the --verbose option for more details.
|
||||
/// ERROR HTTPError: 403 Forbidden from https://test.pypi.org/legacy/
|
||||
/// Username/Password authentication is no longer supported. Migrate to API
|
||||
/// Tokens or Trusted Publishers instead. See
|
||||
/// https://test.pypi.org/help/#apitoken and
|
||||
/// https://test.pypi.org/help/#trusted-publishers
|
||||
/// ```
|
||||
///
|
||||
/// ```text
|
||||
/// INFO Response from https://test.pypi.org/legacy/:
|
||||
/// 403 Username/Password authentication is no longer supported. Migrate to
|
||||
/// API Tokens or Trusted Publishers instead. See
|
||||
/// https://test.pypi.org/help/#apitoken and
|
||||
/// https://test.pypi.org/help/#trusted-publishers
|
||||
/// INFO <html>
|
||||
/// <head>
|
||||
/// <title>403 Username/Password authentication is no longer supported.
|
||||
/// Migrate to API Tokens or Trusted Publishers instead. See
|
||||
/// https://test.pypi.org/help/#apitoken and
|
||||
/// https://test.pypi.org/help/#trusted-publishers</title>
|
||||
/// </head>
|
||||
/// <body>
|
||||
/// <h1>403 Username/Password authentication is no longer supported.
|
||||
/// Migrate to API Tokens or Trusted Publishers instead. See
|
||||
/// https://test.pypi.org/help/#apitoken and
|
||||
/// https://test.pypi.org/help/#trusted-publishers</h1>
|
||||
/// Access was denied to this resource.<br/><br/>
|
||||
/// ```
|
||||
///
|
||||
/// In comparison, we now show (line-wrapped for readability):
|
||||
///
|
||||
/// ```text
|
||||
/// error: Failed to publish `dist/astral_test_1-0.1.0-py3-none-any.whl` to `https://test.pypi.org/legacy/`
|
||||
/// Caused by: Incorrect credentials (status code 403 Forbidden): 403 Username/Password
|
||||
/// authentication is no longer supported. Migrate to API Tokens or Trusted Publishers
|
||||
/// instead. See https://test.pypi.org/help/#apitoken and https://test.pypi.org/help/#trusted-publishers
|
||||
/// ```
|
||||
fn extract_error_message(body: String, content_type: Option<&str>) -> String {
|
||||
if content_type == Some("application/json") {
|
||||
#[derive(Deserialize)]
|
||||
struct ErrorBody {
|
||||
code: String,
|
||||
}
|
||||
|
||||
if let Ok(structured) = serde_json::from_str::<ErrorBody>(&body) {
|
||||
structured.code
|
||||
} else {
|
||||
body
|
||||
}
|
||||
} else {
|
||||
body
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn files_for_publishing(
|
||||
paths: Vec<String>,
|
||||
) -> Result<Vec<(PathBuf, DistFilename)>, PublishError> {
|
||||
let mut seen = FxHashSet::default();
|
||||
let mut files = Vec::new();
|
||||
for path in paths {
|
||||
for dist in glob(&path).map_err(|err| PublishError::Pattern(path, err))? {
|
||||
let dist = dist?;
|
||||
if !dist.is_file() {
|
||||
continue;
|
||||
}
|
||||
if !seen.insert(dist.clone()) {
|
||||
continue;
|
||||
}
|
||||
let Some(filename) = dist.file_name().and_then(|filename| filename.to_str()) else {
|
||||
continue;
|
||||
};
|
||||
let filename = DistFilename::try_from_normalized_filename(filename)
|
||||
.ok_or_else(|| PublishError::InvalidFilename(dist.clone()))?;
|
||||
files.push((dist, filename));
|
||||
}
|
||||
}
|
||||
// TODO(konsti): Should we sort those files, e.g. wheels before sdists because they are more
|
||||
// certain to have reliable metadata, even though the metadata in the upload API is unreliable
|
||||
// in general?
|
||||
Ok(files)
|
||||
}
|
||||
|
||||
/// Upload a file to a registry.
|
||||
///
|
||||
/// Returns `true` if the file was newly uploaded and `false` if it already existed.
|
||||
pub async fn upload(
|
||||
file: &Path,
|
||||
filename: &DistFilename,
|
||||
registry: &Url,
|
||||
client: &BaseClient,
|
||||
username: Option<&str>,
|
||||
password: Option<&str>,
|
||||
) -> Result<bool, PublishError> {
|
||||
let form_metadata = form_metadata(file, filename)
|
||||
.await
|
||||
.map_err(|err| PublishError::PublishPrepare(file.to_path_buf(), Box::new(err)))?;
|
||||
let request = build_request(
|
||||
file,
|
||||
filename,
|
||||
registry,
|
||||
client,
|
||||
username,
|
||||
password,
|
||||
form_metadata,
|
||||
)
|
||||
.await
|
||||
.map_err(|err| PublishError::PublishPrepare(file.to_path_buf(), Box::new(err)))?;
|
||||
|
||||
let response = request.send().await.map_err(|err| {
|
||||
PublishError::PublishSend(file.to_path_buf(), registry.clone(), err.into())
|
||||
})?;
|
||||
|
||||
handle_response(registry, response)
|
||||
.await
|
||||
.map_err(|err| PublishError::PublishSend(file.to_path_buf(), registry.clone(), err))
|
||||
}
|
||||
|
||||
/// Calculate the SHA256 of a file.
|
||||
fn hash_file(path: impl AsRef<Path>) -> Result<String, io::Error> {
|
||||
// Ideally, this would be async, but in case we actually want to make parallel uploads we should
|
||||
// use `spawn_blocking` since sha256 is cpu intensive.
|
||||
let mut file = BufReader::new(File::open(path.as_ref())?);
|
||||
let mut hasher = Sha256::new();
|
||||
io::copy(&mut file, &mut hasher)?;
|
||||
Ok(format!("{:x}", hasher.finalize()))
|
||||
}
|
||||
|
||||
// Not in `uv-metadata` because we only support tar files here.
|
||||
async fn source_dist_pkg_info(file: &Path) -> Result<Vec<u8>, PublishPrepareError> {
|
||||
let file = fs_err::tokio::File::open(&file).await?;
|
||||
let reader = tokio::io::BufReader::new(file);
|
||||
let decoded = async_compression::tokio::bufread::GzipDecoder::new(reader);
|
||||
let mut archive = tokio_tar::Archive::new(decoded);
|
||||
let mut pkg_infos: Vec<(PathBuf, Vec<u8>)> = archive
|
||||
.entries()?
|
||||
.map_err(PublishPrepareError::from)
|
||||
.try_filter_map(|mut entry| async move {
|
||||
let path = entry
|
||||
.path()
|
||||
.map_err(PublishPrepareError::from)?
|
||||
.to_path_buf();
|
||||
let mut components = path.components();
|
||||
let Some(_top_level) = components.next() else {
|
||||
return Ok(None);
|
||||
};
|
||||
let Some(pkg_info) = components.next() else {
|
||||
return Ok(None);
|
||||
};
|
||||
if components.next().is_some() || pkg_info.as_os_str() != "PKG-INFO" {
|
||||
return Ok(None);
|
||||
}
|
||||
let mut buffer = Vec::new();
|
||||
// We have to read while iterating or the entry is empty as we're beyond it in the file.
|
||||
entry.read_to_end(&mut buffer).await.map_err(|err| {
|
||||
PublishPrepareError::Read(path.to_string_lossy().to_string(), err)
|
||||
})?;
|
||||
Ok(Some((path, buffer)))
|
||||
})
|
||||
.try_collect()
|
||||
.await?;
|
||||
match pkg_infos.len() {
|
||||
0 => Err(PublishPrepareError::MissingPkgInfo),
|
||||
1 => Ok(pkg_infos.remove(0).1),
|
||||
_ => Err(PublishPrepareError::MultiplePkgInfo(
|
||||
pkg_infos
|
||||
.iter()
|
||||
.map(|(path, _buffer)| path.to_string_lossy())
|
||||
.join(", "),
|
||||
)),
|
||||
}
|
||||
}
|
||||
|
||||
async fn metadata(file: &Path, filename: &DistFilename) -> Result<Metadata23, PublishPrepareError> {
|
||||
let contents = match filename {
|
||||
DistFilename::SourceDistFilename(source_dist) => {
|
||||
if source_dist.extension != SourceDistExtension::TarGz {
|
||||
// See PEP 625. While we support installing legacy source distributions, we don't
|
||||
// support creating and uploading them.
|
||||
return Err(PublishPrepareError::InvalidExtension(source_dist.clone()));
|
||||
}
|
||||
source_dist_pkg_info(file).await?
|
||||
}
|
||||
DistFilename::WheelFilename(wheel) => {
|
||||
let file = fs_err::tokio::File::open(&file).await?;
|
||||
let reader = tokio::io::BufReader::new(file);
|
||||
read_metadata_async_seek(wheel, reader).await?
|
||||
}
|
||||
};
|
||||
Ok(Metadata23::parse(&contents)?)
|
||||
}
|
||||
|
||||
/// Collect the non-file fields for the multipart request from the package METADATA.
|
||||
///
|
||||
/// Reference implementation: <https://github.com/pypi/warehouse/blob/d2c36d992cf9168e0518201d998b2707a3ef1e72/warehouse/forklift/legacy.py#L1376-L1430>
|
||||
async fn form_metadata(
|
||||
file: &Path,
|
||||
filename: &DistFilename,
|
||||
) -> Result<Vec<(&'static str, String)>, PublishPrepareError> {
|
||||
let hash_hex = hash_file(file)?;
|
||||
|
||||
let metadata = metadata(file, filename).await?;
|
||||
|
||||
let mut form_metadata = vec![
|
||||
(":action", "file_upload".to_string()),
|
||||
("sha256_digest", hash_hex),
|
||||
("protocol_version", "1".to_string()),
|
||||
("metadata_version", metadata.metadata_version.clone()),
|
||||
// Twine transforms the name with `re.sub("[^A-Za-z0-9.]+", "-", name)`
|
||||
// * <https://github.com/pypa/twine/issues/743>
|
||||
// * <https://github.com/pypa/twine/blob/5bf3f38ff3d8b2de47b7baa7b652c697d7a64776/twine/package.py#L57-L65>
|
||||
// warehouse seems to call `packaging.utils.canonicalize_name` nowadays and has a separate
|
||||
// `normalized_name`, so we'll start with this and we'll readjust if there are user reports.
|
||||
("name", metadata.name.clone()),
|
||||
("version", metadata.version.clone()),
|
||||
("filetype", filename.filetype().to_string()),
|
||||
];
|
||||
|
||||
if let DistFilename::WheelFilename(wheel) = filename {
|
||||
form_metadata.push(("pyversion", wheel.python_tag.join(".")));
|
||||
} else {
|
||||
form_metadata.push(("pyversion", "source".to_string()));
|
||||
}
|
||||
|
||||
let mut add_option = |name, value: Option<String>| {
|
||||
if let Some(some) = value.clone() {
|
||||
form_metadata.push((name, some));
|
||||
}
|
||||
};
|
||||
|
||||
add_option("summary", metadata.summary);
|
||||
add_option("description", metadata.description);
|
||||
add_option(
|
||||
"description_content_type",
|
||||
metadata.description_content_type,
|
||||
);
|
||||
add_option("author", metadata.author);
|
||||
add_option("author_email", metadata.author_email);
|
||||
add_option("maintainer", metadata.maintainer);
|
||||
add_option("maintainer_email", metadata.maintainer_email);
|
||||
add_option("license", metadata.license);
|
||||
add_option("keywords", metadata.keywords);
|
||||
add_option("home_page", metadata.home_page);
|
||||
add_option("download_url", metadata.download_url);
|
||||
|
||||
// The GitLab PyPI repository API implementation requires this metadata field and twine always
|
||||
// includes it in the request, even when it's empty.
|
||||
form_metadata.push((
|
||||
"requires_python",
|
||||
metadata.requires_python.unwrap_or(String::new()),
|
||||
));
|
||||
|
||||
let mut add_vec = |name, values: Vec<String>| {
|
||||
for i in values {
|
||||
form_metadata.push((name, i.clone()));
|
||||
}
|
||||
};
|
||||
|
||||
add_vec("classifiers", metadata.classifiers);
|
||||
add_vec("platform", metadata.platforms);
|
||||
add_vec("requires_dist", metadata.requires_dist);
|
||||
add_vec("provides_dist", metadata.provides_dist);
|
||||
add_vec("obsoletes_dist", metadata.obsoletes_dist);
|
||||
add_vec("requires_external", metadata.requires_external);
|
||||
add_vec("project_urls", metadata.project_urls);
|
||||
|
||||
Ok(form_metadata)
|
||||
}
|
||||
|
||||
async fn build_request(
|
||||
file: &Path,
|
||||
filename: &DistFilename,
|
||||
registry: &Url,
|
||||
client: &BaseClient,
|
||||
username: Option<&str>,
|
||||
password: Option<&str>,
|
||||
form_metadata: Vec<(&'static str, String)>,
|
||||
) -> Result<RequestBuilder, PublishPrepareError> {
|
||||
let mut form = reqwest::multipart::Form::new();
|
||||
for (key, value) in form_metadata {
|
||||
form = form.text(key, value);
|
||||
}
|
||||
|
||||
let file: tokio::fs::File = fs_err::tokio::File::open(file).await?.into();
|
||||
let file_reader = Body::from(file);
|
||||
form = form.part(
|
||||
"content",
|
||||
Part::stream(file_reader).file_name(filename.to_string()),
|
||||
);
|
||||
|
||||
let url = if let Some(username) = username {
|
||||
if password.is_none() {
|
||||
// Attach the username to the URL so the authentication middleware can find the matching
|
||||
// password.
|
||||
let mut url = registry.clone();
|
||||
let _ = url.set_username(username);
|
||||
url
|
||||
} else {
|
||||
// We set the authorization header below.
|
||||
registry.clone()
|
||||
}
|
||||
} else {
|
||||
registry.clone()
|
||||
};
|
||||
|
||||
let mut request = client
|
||||
.client()
|
||||
.post(url)
|
||||
.multipart(form)
|
||||
// Ask PyPI for a structured error messages instead of HTML-markup error messages.
|
||||
// For other registries, we ask them to return plain text over HTML. See
|
||||
// [`PublishSendError::extract_remote_error`].
|
||||
.header(
|
||||
reqwest::header::ACCEPT,
|
||||
"application/json;q=0.9, text/plain;q=0.8, text/html;q=0.7",
|
||||
);
|
||||
if let (Some(username), Some(password)) = (username, password) {
|
||||
debug!("Using username/password basic auth");
|
||||
let credentials = BASE64_STANDARD.encode(format!("{username}:{password}"));
|
||||
request = request.header(AUTHORIZATION, format!("Basic {credentials}"));
|
||||
}
|
||||
Ok(request)
|
||||
}
|
||||
|
||||
/// Returns `true` if the file was newly uploaded and `false` if it already existed.
|
||||
async fn handle_response(registry: &Url, response: Response) -> Result<bool, PublishSendError> {
|
||||
let status_code = response.status();
|
||||
debug!("Response code for {registry}: {status_code}");
|
||||
trace!("Response headers for {registry}: {response:?}");
|
||||
|
||||
// When the user accidentally uses https://test.pypi.org/simple (no slash) as publish URL, we
|
||||
// get a redirect to https://test.pypi.org/simple/ (the canonical index URL), while changing the
|
||||
// method to GET (see https://en.wikipedia.org/wiki/Post/Redirect/Get and
|
||||
// https://fetch.spec.whatwg.org/#http-redirect-fetch). The user gets a 200 OK while we actually
|
||||
// didn't upload anything! Reqwest doesn't support redirect policies conditional on the HTTP
|
||||
// method (https://github.com/seanmonstar/reqwest/issues/1777#issuecomment-2303386160), so we're
|
||||
// checking after the fact.
|
||||
if response.url() != registry {
|
||||
return Err(PublishSendError::RedirectError(response.url().clone()));
|
||||
}
|
||||
|
||||
if status_code.is_success() {
|
||||
if enabled!(Level::TRACE) {
|
||||
match response.text().await {
|
||||
Ok(response_content) => {
|
||||
trace!("Response content for {registry}: {response_content}");
|
||||
}
|
||||
Err(err) => {
|
||||
trace!("Failed to read response content for {registry}: {err}");
|
||||
}
|
||||
}
|
||||
}
|
||||
return Ok(true);
|
||||
}
|
||||
|
||||
let content_type = response
|
||||
.headers()
|
||||
.get(reqwest::header::CONTENT_TYPE)
|
||||
.and_then(|content_type| content_type.to_str().ok())
|
||||
.map(ToString::to_string);
|
||||
let upload_error = response
|
||||
.bytes()
|
||||
.await
|
||||
.map_err(|err| PublishSendError::StatusNoBody(status_code, err))?;
|
||||
let upload_error = String::from_utf8_lossy(&upload_error);
|
||||
|
||||
trace!("Response content for non-200 for {registry}: {upload_error}");
|
||||
|
||||
debug!("Upload error response: {upload_error}");
|
||||
// Detect existing file errors the way twine does.
|
||||
// https://github.com/pypa/twine/blob/c512bbf166ac38239e58545a39155285f8747a7b/twine/commands/upload.py#L34-L72
|
||||
if status_code == 403 {
|
||||
if upload_error.contains("overwrite artifact") {
|
||||
// Artifactory (https://jfrog.com/artifactory/)
|
||||
Ok(false)
|
||||
} else {
|
||||
Err(PublishSendError::PermissionDenied(
|
||||
status_code,
|
||||
PublishSendError::extract_error_message(
|
||||
upload_error.to_string(),
|
||||
content_type.as_deref(),
|
||||
),
|
||||
))
|
||||
}
|
||||
} else if status_code == 409 {
|
||||
// conflict, pypiserver (https://pypi.org/project/pypiserver)
|
||||
Ok(false)
|
||||
} else if status_code == 400
|
||||
&& (upload_error.contains("updating asset") || upload_error.contains("already been taken"))
|
||||
{
|
||||
// Nexus Repository OSS (https://www.sonatype.com/nexus-repository-oss)
|
||||
// and Gitlab Enterprise Edition (https://about.gitlab.com)
|
||||
Ok(false)
|
||||
} else {
|
||||
Err(PublishSendError::Status(
|
||||
status_code,
|
||||
PublishSendError::extract_error_message(
|
||||
upload_error.to_string(),
|
||||
content_type.as_deref(),
|
||||
),
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::{build_request, form_metadata};
|
||||
use distribution_filename::DistFilename;
|
||||
use insta::{assert_debug_snapshot, assert_snapshot};
|
||||
use itertools::Itertools;
|
||||
use std::path::PathBuf;
|
||||
use url::Url;
|
||||
use uv_client::BaseClientBuilder;
|
||||
|
||||
/// Snapshot the data we send for an upload request for a source distribution.
|
||||
#[tokio::test]
|
||||
async fn upload_request_source_dist() {
|
||||
let filename = "tqdm-999.0.0.tar.gz";
|
||||
let file = PathBuf::from("../../scripts/links/").join(filename);
|
||||
let filename = DistFilename::try_from_normalized_filename(filename).unwrap();
|
||||
|
||||
let form_metadata = form_metadata(&file, &filename).await.unwrap();
|
||||
|
||||
let formatted_metadata = form_metadata
|
||||
.iter()
|
||||
.map(|(k, v)| format!("{k}: {v}"))
|
||||
.join("\n");
|
||||
assert_snapshot!(&formatted_metadata, @r###"
|
||||
:action: file_upload
|
||||
sha256_digest: 89fa05cffa7f457658373b85de302d24d0c205ceda2819a8739e324b75e9430b
|
||||
protocol_version: 1
|
||||
metadata_version: 2.3
|
||||
name: tqdm
|
||||
version: 999.0.0
|
||||
filetype: sdist
|
||||
pyversion: source
|
||||
description: # tqdm
|
||||
|
||||
[](https://pypi.org/project/tqdm)
|
||||
[](https://pypi.org/project/tqdm)
|
||||
|
||||
-----
|
||||
|
||||
**Table of Contents**
|
||||
|
||||
- [Installation](#installation)
|
||||
- [License](#license)
|
||||
|
||||
## Installation
|
||||
|
||||
```console
|
||||
pip install tqdm
|
||||
```
|
||||
|
||||
## License
|
||||
|
||||
`tqdm` is distributed under the terms of the [MIT](https://spdx.org/licenses/MIT.html) license.
|
||||
|
||||
description_content_type: text/markdown
|
||||
author_email: Charlie Marsh <charlie.r.marsh@gmail.com>
|
||||
requires_python: >=3.8
|
||||
classifiers: Development Status :: 4 - Beta
|
||||
classifiers: Programming Language :: Python
|
||||
classifiers: Programming Language :: Python :: 3.8
|
||||
classifiers: Programming Language :: Python :: 3.9
|
||||
classifiers: Programming Language :: Python :: 3.10
|
||||
classifiers: Programming Language :: Python :: 3.11
|
||||
classifiers: Programming Language :: Python :: 3.12
|
||||
classifiers: Programming Language :: Python :: Implementation :: CPython
|
||||
classifiers: Programming Language :: Python :: Implementation :: PyPy
|
||||
project_urls: Documentation, https://github.com/unknown/tqdm#readme
|
||||
project_urls: Issues, https://github.com/unknown/tqdm/issues
|
||||
project_urls: Source, https://github.com/unknown/tqdm
|
||||
"###);
|
||||
|
||||
let request = build_request(
|
||||
&file,
|
||||
&filename,
|
||||
&Url::parse("https://example.org/upload").unwrap(),
|
||||
&BaseClientBuilder::new().build(),
|
||||
Some("ferris"),
|
||||
Some("F3RR!S"),
|
||||
form_metadata,
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
insta::with_settings!({
|
||||
filters => [("boundary=[0-9a-f-]+", "boundary=[...]")],
|
||||
}, {
|
||||
assert_debug_snapshot!(&request, @r###"
|
||||
RequestBuilder {
|
||||
inner: RequestBuilder {
|
||||
method: POST,
|
||||
url: Url {
|
||||
scheme: "https",
|
||||
cannot_be_a_base: false,
|
||||
username: "",
|
||||
password: None,
|
||||
host: Some(
|
||||
Domain(
|
||||
"example.org",
|
||||
),
|
||||
),
|
||||
port: None,
|
||||
path: "/upload",
|
||||
query: None,
|
||||
fragment: None,
|
||||
},
|
||||
headers: {
|
||||
"content-type": "multipart/form-data; boundary=[...]",
|
||||
"accept": "application/json;q=0.9, text/plain;q=0.8, text/html;q=0.7",
|
||||
"authorization": "Basic ZmVycmlzOkYzUlIhUw==",
|
||||
},
|
||||
},
|
||||
..
|
||||
}
|
||||
"###);
|
||||
});
|
||||
}
|
||||
|
||||
/// Snapshot the data we send for an upload request for a wheel.
|
||||
#[tokio::test]
|
||||
async fn upload_request_wheel() {
|
||||
let filename = "tqdm-4.66.1-py3-none-manylinux_2_12_x86_64.manylinux2010_x86_64.musllinux_1_1_x86_64.whl";
|
||||
let file = PathBuf::from("../../scripts/links/").join(filename);
|
||||
let filename = DistFilename::try_from_normalized_filename(filename).unwrap();
|
||||
|
||||
let form_metadata = form_metadata(&file, &filename).await.unwrap();
|
||||
|
||||
let formatted_metadata = form_metadata
|
||||
.iter()
|
||||
.map(|(k, v)| format!("{k}: {v}"))
|
||||
.join("\n");
|
||||
assert_snapshot!(&formatted_metadata, @r###"
|
||||
:action: file_upload
|
||||
sha256_digest: 0d88ca657bc6b64995ca416e0c59c71af85cc10015d940fa446c42a8b485ee1c
|
||||
protocol_version: 1
|
||||
metadata_version: 2.1
|
||||
name: tqdm
|
||||
version: 4.66.1
|
||||
filetype: bdist_wheel
|
||||
pyversion: py3
|
||||
summary: Fast, Extensible Progress Meter
|
||||
description_content_type: text/x-rst
|
||||
maintainer_email: tqdm developers <devs@tqdm.ml>
|
||||
license: MPL-2.0 AND MIT
|
||||
keywords: progressbar,progressmeter,progress,bar,meter,rate,eta,console,terminal,time
|
||||
requires_python: >=3.7
|
||||
classifiers: Development Status :: 5 - Production/Stable
|
||||
classifiers: Environment :: Console
|
||||
classifiers: Environment :: MacOS X
|
||||
classifiers: Environment :: Other Environment
|
||||
classifiers: Environment :: Win32 (MS Windows)
|
||||
classifiers: Environment :: X11 Applications
|
||||
classifiers: Framework :: IPython
|
||||
classifiers: Framework :: Jupyter
|
||||
classifiers: Intended Audience :: Developers
|
||||
classifiers: Intended Audience :: Education
|
||||
classifiers: Intended Audience :: End Users/Desktop
|
||||
classifiers: Intended Audience :: Other Audience
|
||||
classifiers: Intended Audience :: System Administrators
|
||||
classifiers: License :: OSI Approved :: MIT License
|
||||
classifiers: License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)
|
||||
classifiers: Operating System :: MacOS
|
||||
classifiers: Operating System :: MacOS :: MacOS X
|
||||
classifiers: Operating System :: Microsoft
|
||||
classifiers: Operating System :: Microsoft :: MS-DOS
|
||||
classifiers: Operating System :: Microsoft :: Windows
|
||||
classifiers: Operating System :: POSIX
|
||||
classifiers: Operating System :: POSIX :: BSD
|
||||
classifiers: Operating System :: POSIX :: BSD :: FreeBSD
|
||||
classifiers: Operating System :: POSIX :: Linux
|
||||
classifiers: Operating System :: POSIX :: SunOS/Solaris
|
||||
classifiers: Operating System :: Unix
|
||||
classifiers: Programming Language :: Python
|
||||
classifiers: Programming Language :: Python :: 3
|
||||
classifiers: Programming Language :: Python :: 3.7
|
||||
classifiers: Programming Language :: Python :: 3.8
|
||||
classifiers: Programming Language :: Python :: 3.9
|
||||
classifiers: Programming Language :: Python :: 3.10
|
||||
classifiers: Programming Language :: Python :: 3.11
|
||||
classifiers: Programming Language :: Python :: 3 :: Only
|
||||
classifiers: Programming Language :: Python :: Implementation
|
||||
classifiers: Programming Language :: Python :: Implementation :: IronPython
|
||||
classifiers: Programming Language :: Python :: Implementation :: PyPy
|
||||
classifiers: Programming Language :: Unix Shell
|
||||
classifiers: Topic :: Desktop Environment
|
||||
classifiers: Topic :: Education :: Computer Aided Instruction (CAI)
|
||||
classifiers: Topic :: Education :: Testing
|
||||
classifiers: Topic :: Office/Business
|
||||
classifiers: Topic :: Other/Nonlisted Topic
|
||||
classifiers: Topic :: Software Development :: Build Tools
|
||||
classifiers: Topic :: Software Development :: Libraries
|
||||
classifiers: Topic :: Software Development :: Libraries :: Python Modules
|
||||
classifiers: Topic :: Software Development :: Pre-processors
|
||||
classifiers: Topic :: Software Development :: User Interfaces
|
||||
classifiers: Topic :: System :: Installation/Setup
|
||||
classifiers: Topic :: System :: Logging
|
||||
classifiers: Topic :: System :: Monitoring
|
||||
classifiers: Topic :: System :: Shells
|
||||
classifiers: Topic :: Terminals
|
||||
classifiers: Topic :: Utilities
|
||||
requires_dist: colorama ; platform_system == "Windows"
|
||||
requires_dist: pytest >=6 ; extra == 'dev'
|
||||
requires_dist: pytest-cov ; extra == 'dev'
|
||||
requires_dist: pytest-timeout ; extra == 'dev'
|
||||
requires_dist: pytest-xdist ; extra == 'dev'
|
||||
requires_dist: ipywidgets >=6 ; extra == 'notebook'
|
||||
requires_dist: slack-sdk ; extra == 'slack'
|
||||
requires_dist: requests ; extra == 'telegram'
|
||||
project_urls: homepage, https://tqdm.github.io
|
||||
project_urls: repository, https://github.com/tqdm/tqdm
|
||||
project_urls: changelog, https://tqdm.github.io/releases
|
||||
project_urls: wiki, https://github.com/tqdm/tqdm/wiki
|
||||
"###);
|
||||
|
||||
let request = build_request(
|
||||
&file,
|
||||
&filename,
|
||||
&Url::parse("https://example.org/upload").unwrap(),
|
||||
&BaseClientBuilder::new().build(),
|
||||
Some("ferris"),
|
||||
Some("F3RR!S"),
|
||||
form_metadata,
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
insta::with_settings!({
|
||||
filters => [("boundary=[0-9a-f-]+", "boundary=[...]")],
|
||||
}, {
|
||||
assert_debug_snapshot!(&request, @r###"
|
||||
RequestBuilder {
|
||||
inner: RequestBuilder {
|
||||
method: POST,
|
||||
url: Url {
|
||||
scheme: "https",
|
||||
cannot_be_a_base: false,
|
||||
username: "",
|
||||
password: None,
|
||||
host: Some(
|
||||
Domain(
|
||||
"example.org",
|
||||
),
|
||||
),
|
||||
port: None,
|
||||
path: "/upload",
|
||||
query: None,
|
||||
fragment: None,
|
||||
},
|
||||
headers: {
|
||||
"content-type": "multipart/form-data; boundary=[...]",
|
||||
"accept": "application/json;q=0.9, text/plain;q=0.8, text/html;q=0.7",
|
||||
"authorization": "Basic ZmVycmlzOkYzUlIhUw==",
|
||||
},
|
||||
},
|
||||
..
|
||||
}
|
||||
"###);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
|
@ -36,6 +36,7 @@ textwrap = { workspace = true }
|
|||
thiserror = { workspace = true }
|
||||
toml = { workspace = true }
|
||||
tracing = { workspace = true }
|
||||
url = { workspace = true }
|
||||
|
||||
[package.metadata.cargo-shear]
|
||||
ignored = ["uv-options-metadata", "clap"]
|
||||
|
|
|
|||
|
|
@ -1,5 +1,6 @@
|
|||
use std::num::NonZeroUsize;
|
||||
use std::path::PathBuf;
|
||||
use url::Url;
|
||||
|
||||
use distribution_types::IndexUrl;
|
||||
use install_wheel_rs::linker::LinkMode;
|
||||
|
|
@ -71,6 +72,7 @@ impl_combine_or!(AnnotationStyle);
|
|||
impl_combine_or!(ExcludeNewer);
|
||||
impl_combine_or!(IndexStrategy);
|
||||
impl_combine_or!(IndexUrl);
|
||||
impl_combine_or!(Url);
|
||||
impl_combine_or!(KeyringProviderType);
|
||||
impl_combine_or!(LinkMode);
|
||||
impl_combine_or!(NonZeroUsize);
|
||||
|
|
|
|||
|
|
@ -1,11 +1,11 @@
|
|||
use std::{fmt::Debug, num::NonZeroUsize, path::PathBuf};
|
||||
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use distribution_types::{FlatIndexLocation, IndexUrl, StaticMetadata};
|
||||
use install_wheel_rs::linker::LinkMode;
|
||||
use pep508_rs::Requirement;
|
||||
use pypi_types::{SupportedEnvironments, VerbatimParsedUrl};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use url::Url;
|
||||
use uv_cache_info::CacheKey;
|
||||
use uv_configuration::{
|
||||
ConfigSettings, IndexStrategy, KeyringProviderType, PackageNameSpecifier, TargetTriple,
|
||||
|
|
@ -42,6 +42,9 @@ pub struct Options {
|
|||
#[serde(flatten)]
|
||||
pub top_level: ResolverInstallerOptions,
|
||||
|
||||
#[serde(flatten)]
|
||||
pub publish: PublishOptions,
|
||||
|
||||
#[option_group]
|
||||
pub pip: Option<PipOptions>,
|
||||
|
||||
|
|
@ -1497,6 +1500,7 @@ pub struct OptionsWire {
|
|||
no_build_package: Option<Vec<PackageName>>,
|
||||
no_binary: Option<bool>,
|
||||
no_binary_package: Option<Vec<PackageName>>,
|
||||
publish_url: Option<Url>,
|
||||
|
||||
pip: Option<PipOptions>,
|
||||
cache_keys: Option<Vec<CacheKey>>,
|
||||
|
|
@ -1564,6 +1568,7 @@ impl From<OptionsWire> for Options {
|
|||
override_dependencies,
|
||||
constraint_dependencies,
|
||||
environments,
|
||||
publish_url,
|
||||
workspace: _,
|
||||
sources: _,
|
||||
dev_dependencies: _,
|
||||
|
|
@ -1611,6 +1616,7 @@ impl From<OptionsWire> for Options {
|
|||
no_binary,
|
||||
no_binary_package,
|
||||
},
|
||||
publish: PublishOptions { publish_url },
|
||||
pip,
|
||||
cache_keys,
|
||||
override_dependencies,
|
||||
|
|
@ -1619,3 +1625,21 @@ impl From<OptionsWire> for Options {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(
|
||||
Debug, Clone, Default, PartialEq, Eq, Serialize, Deserialize, CombineOptions, OptionsMetadata,
|
||||
)]
|
||||
#[serde(rename_all = "kebab-case")]
|
||||
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
|
||||
pub struct PublishOptions {
|
||||
/// The URL for publishing packages to the Python package index (by default:
|
||||
/// <https://upload.pypi.org/legacy/>).
|
||||
#[option(
|
||||
default = "\"https://upload.pypi.org/legacy/\"",
|
||||
value_type = "str",
|
||||
example = r#"
|
||||
publish-url = "https://test.pypi.org/legacy/"
|
||||
"#
|
||||
)]
|
||||
pub publish_url: Option<Url>,
|
||||
}
|
||||
|
|
|
|||
|
|
@ -35,6 +35,7 @@ uv-fs = { workspace = true }
|
|||
uv-git = { workspace = true }
|
||||
uv-installer = { workspace = true }
|
||||
uv-normalize = { workspace = true }
|
||||
uv-publish = { workspace = true }
|
||||
uv-python = { workspace = true, features = ["schemars"]}
|
||||
uv-requirements = { workspace = true }
|
||||
uv-resolver = { workspace = true }
|
||||
|
|
|
|||
|
|
@ -30,6 +30,7 @@ pub(crate) use project::remove::remove;
|
|||
pub(crate) use project::run::{run, RunCommand};
|
||||
pub(crate) use project::sync::sync;
|
||||
pub(crate) use project::tree::tree;
|
||||
pub(crate) use publish::publish;
|
||||
pub(crate) use python::dir::dir as python_dir;
|
||||
pub(crate) use python::find::find as python_find;
|
||||
pub(crate) use python::install::install as python_install;
|
||||
|
|
@ -70,6 +71,7 @@ pub(crate) mod reporters;
|
|||
mod tool;
|
||||
|
||||
mod build;
|
||||
mod publish;
|
||||
#[cfg(feature = "self-update")]
|
||||
mod self_update;
|
||||
mod venv;
|
||||
|
|
|
|||
|
|
@ -0,0 +1,74 @@
|
|||
use crate::commands::{human_readable_bytes, ExitStatus};
|
||||
use crate::printer::Printer;
|
||||
use anyhow::{bail, Result};
|
||||
use owo_colors::OwoColorize;
|
||||
use std::fmt::Write;
|
||||
use tracing::info;
|
||||
use url::Url;
|
||||
use uv_client::{BaseClientBuilder, Connectivity};
|
||||
use uv_configuration::{KeyringProviderType, TrustedHost};
|
||||
use uv_publish::{files_for_publishing, upload};
|
||||
|
||||
pub(crate) async fn publish(
|
||||
paths: Vec<String>,
|
||||
publish_url: Url,
|
||||
keyring_provider: KeyringProviderType,
|
||||
allow_insecure_host: Vec<TrustedHost>,
|
||||
username: Option<String>,
|
||||
password: Option<String>,
|
||||
connectivity: Connectivity,
|
||||
native_tls: bool,
|
||||
printer: Printer,
|
||||
) -> Result<ExitStatus> {
|
||||
if connectivity.is_offline() {
|
||||
bail!("Unable to publish files in offline mode");
|
||||
}
|
||||
|
||||
let files = files_for_publishing(paths)?;
|
||||
match files.len() {
|
||||
0 => bail!("No files found to publish"),
|
||||
1 => writeln!(printer.stderr(), "Publishing 1 file to {publish_url}")?,
|
||||
n => writeln!(printer.stderr(), "Publishing {n} files {publish_url}")?,
|
||||
}
|
||||
|
||||
let client = BaseClientBuilder::new()
|
||||
// Don't try cloning the request for retries.
|
||||
// https://github.com/seanmonstar/reqwest/issues/2416
|
||||
.retries(0)
|
||||
.keyring(keyring_provider)
|
||||
.native_tls(native_tls)
|
||||
.allow_insecure_host(allow_insecure_host)
|
||||
// Don't try cloning the request to make an unauthenticated request first.
|
||||
.only_authenticated(true)
|
||||
.build();
|
||||
|
||||
for (file, filename) in files {
|
||||
let size = fs_err::metadata(&file)?.len();
|
||||
let (bytes, unit) = human_readable_bytes(size);
|
||||
writeln!(
|
||||
printer.stderr(),
|
||||
"{} {filename} {}",
|
||||
"Uploading".bold().green(),
|
||||
format!("({bytes:.1}{unit})").dimmed()
|
||||
)?;
|
||||
let uploaded = upload(
|
||||
&file,
|
||||
&filename,
|
||||
&publish_url,
|
||||
&client,
|
||||
username.as_deref(),
|
||||
password.as_deref(),
|
||||
)
|
||||
.await?; // Filename and/or URL are already attached, if applicable.
|
||||
info!("Upload succeeded");
|
||||
if !uploaded {
|
||||
writeln!(
|
||||
printer.stderr(),
|
||||
"{}",
|
||||
"File already exists, skipping".dimmed()
|
||||
)?;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(ExitStatus::Success)
|
||||
}
|
||||
|
|
@ -33,6 +33,7 @@ use crate::printer::Printer;
|
|||
use crate::settings::{
|
||||
CacheSettings, GlobalSettings, PipCheckSettings, PipCompileSettings, PipFreezeSettings,
|
||||
PipInstallSettings, PipListSettings, PipShowSettings, PipSyncSettings, PipUninstallSettings,
|
||||
PublishSettings,
|
||||
};
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
|
|
@ -1085,6 +1086,36 @@ async fn run(cli: Cli) -> Result<ExitStatus> {
|
|||
commands::python_dir()?;
|
||||
Ok(ExitStatus::Success)
|
||||
}
|
||||
Commands::Publish(args) => {
|
||||
show_settings!(args);
|
||||
|
||||
if globals.preview.is_disabled() {
|
||||
warn_user_once!("`uv publish` is experimental and may change without warning");
|
||||
}
|
||||
|
||||
// Resolve the settings from the command-line arguments and workspace configuration.
|
||||
let PublishSettings {
|
||||
files,
|
||||
username,
|
||||
password,
|
||||
publish_url,
|
||||
keyring_provider,
|
||||
allow_insecure_host,
|
||||
} = PublishSettings::resolve(args, filesystem);
|
||||
|
||||
commands::publish(
|
||||
files,
|
||||
publish_url,
|
||||
keyring_provider,
|
||||
allow_insecure_host,
|
||||
username,
|
||||
password,
|
||||
globals.connectivity,
|
||||
globals.native_tls,
|
||||
printer,
|
||||
)
|
||||
.await
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -8,10 +8,11 @@ use distribution_types::{DependencyMetadata, IndexLocations};
|
|||
use install_wheel_rs::linker::LinkMode;
|
||||
use pep508_rs::{ExtraName, RequirementOrigin};
|
||||
use pypi_types::{Requirement, SupportedEnvironments};
|
||||
use url::Url;
|
||||
use uv_cache::{CacheArgs, Refresh};
|
||||
use uv_cli::{
|
||||
options::{flag, resolver_installer_options, resolver_options},
|
||||
BuildArgs, ExportArgs, ToolUpgradeArgs,
|
||||
BuildArgs, ExportArgs, PublishArgs, ToolUpgradeArgs,
|
||||
};
|
||||
use uv_cli::{
|
||||
AddArgs, ColorChoice, ExternalCommand, GlobalArgs, InitArgs, ListFormat, LockArgs, Maybe,
|
||||
|
|
@ -30,7 +31,8 @@ use uv_normalize::PackageName;
|
|||
use uv_python::{Prefix, PythonDownloads, PythonPreference, PythonVersion, Target};
|
||||
use uv_resolver::{AnnotationStyle, DependencyMode, ExcludeNewer, PrereleaseMode, ResolutionMode};
|
||||
use uv_settings::{
|
||||
Combine, FilesystemOptions, Options, PipOptions, ResolverInstallerOptions, ResolverOptions,
|
||||
Combine, FilesystemOptions, Options, PipOptions, PublishOptions, ResolverInstallerOptions,
|
||||
ResolverOptions,
|
||||
};
|
||||
use uv_warnings::warn_user_once;
|
||||
use uv_workspace::pyproject::DependencyType;
|
||||
|
|
@ -38,6 +40,9 @@ use uv_workspace::pyproject::DependencyType;
|
|||
use crate::commands::ToolRunCommand;
|
||||
use crate::commands::{pip::operations::Modifications, InitProjectKind};
|
||||
|
||||
/// The default publish URL.
|
||||
const PYPI_PUBLISH_URL: &str = "https://upload.pypi.org/legacy/";
|
||||
|
||||
/// The resolved global settings to use for any invocation of the CLI.
|
||||
#[allow(clippy::struct_excessive_bools)]
|
||||
#[derive(Debug, Clone)]
|
||||
|
|
@ -2420,6 +2425,70 @@ impl<'a> From<ResolverInstallerSettingsRef<'a>> for InstallerSettingsRef<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
/// The resolved settings to use for an invocation of the `uv publish` CLI.
|
||||
#[allow(clippy::struct_excessive_bools)]
|
||||
#[derive(Debug, Clone)]
|
||||
pub(crate) struct PublishSettings {
|
||||
// CLI only, see [`PublishArgs`] for docs.
|
||||
pub(crate) files: Vec<String>,
|
||||
pub(crate) username: Option<String>,
|
||||
pub(crate) password: Option<String>,
|
||||
|
||||
// Both CLI and configuration.
|
||||
pub(crate) publish_url: Url,
|
||||
pub(crate) keyring_provider: KeyringProviderType,
|
||||
pub(crate) allow_insecure_host: Vec<TrustedHost>,
|
||||
}
|
||||
|
||||
impl PublishSettings {
|
||||
/// Resolve the [`crate::settings::PublishSettings`] from the CLI and filesystem configuration.
|
||||
pub(crate) fn resolve(args: PublishArgs, filesystem: Option<FilesystemOptions>) -> Self {
|
||||
let Options {
|
||||
publish, top_level, ..
|
||||
} = filesystem
|
||||
.map(FilesystemOptions::into_options)
|
||||
.unwrap_or_default();
|
||||
|
||||
let PublishOptions { publish_url } = publish;
|
||||
let ResolverInstallerOptions {
|
||||
keyring_provider,
|
||||
allow_insecure_host,
|
||||
..
|
||||
} = top_level;
|
||||
|
||||
// Tokens are encoded in the same way as username/password
|
||||
let (username, password) = if let Some(token) = args.token {
|
||||
(Some("__token__".to_string()), Some(token))
|
||||
} else {
|
||||
(args.username, args.password)
|
||||
};
|
||||
|
||||
Self {
|
||||
files: args.files,
|
||||
username,
|
||||
password,
|
||||
publish_url: args
|
||||
.publish_url
|
||||
.combine(publish_url)
|
||||
.unwrap_or_else(|| Url::parse(PYPI_PUBLISH_URL).unwrap()),
|
||||
keyring_provider: args
|
||||
.keyring_provider
|
||||
.combine(keyring_provider)
|
||||
.unwrap_or_default(),
|
||||
allow_insecure_host: args
|
||||
.allow_insecure_host
|
||||
.map(|allow_insecure_host| {
|
||||
allow_insecure_host
|
||||
.into_iter()
|
||||
.filter_map(Maybe::into_option)
|
||||
.collect()
|
||||
})
|
||||
.combine(allow_insecure_host)
|
||||
.unwrap_or_default(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Environment variables that are not exposed as CLI arguments.
|
||||
mod env {
|
||||
pub(super) const CONCURRENT_DOWNLOADS: (&str, &str) =
|
||||
|
|
|
|||
|
|
@ -52,7 +52,7 @@ pub const INSTA_FILTERS: &[(&str, &str)] = &[
|
|||
(r"tv_sec: \d+", "tv_sec: [TIME]"),
|
||||
(r"tv_nsec: \d+", "tv_nsec: [TIME]"),
|
||||
// Rewrite Windows output to Unix output
|
||||
(r"\\([\w\d])", "/$1"),
|
||||
(r"\\([\w\d]|\.\.)", "/$1"),
|
||||
(r"uv.exe", "uv"),
|
||||
// uv version display
|
||||
(
|
||||
|
|
@ -579,6 +579,21 @@ impl TestContext {
|
|||
command
|
||||
}
|
||||
|
||||
/// Create a `uv publish` command with options shared across scenarios.
|
||||
#[expect(clippy::unused_self)] // For consistency
|
||||
pub fn publish(&self) -> Command {
|
||||
let mut command = Command::new(get_bin());
|
||||
command.arg("publish");
|
||||
|
||||
if cfg!(all(windows, debug_assertions)) {
|
||||
// TODO(konstin): Reduce stack usage in debug mode enough that the tests pass with the
|
||||
// default windows stack of 1MB
|
||||
command.env("UV_STACK_SIZE", (4 * 1024 * 1024).to_string());
|
||||
}
|
||||
|
||||
command
|
||||
}
|
||||
|
||||
/// Create a `uv python find` command with options shared across scenarios.
|
||||
pub fn python_find(&self) -> Command {
|
||||
let mut command = Command::new(get_bin());
|
||||
|
|
|
|||
|
|
@ -29,6 +29,7 @@ fn help() {
|
|||
pip Manage Python packages with a pip-compatible interface
|
||||
venv Create a virtual environment
|
||||
build Build Python packages into source distributions and wheels
|
||||
publish Upload distributions to an index
|
||||
cache Manage uv's cache
|
||||
version Display uv's version
|
||||
generate-shell-completion Generate shell completion
|
||||
|
|
@ -95,6 +96,7 @@ fn help_flag() {
|
|||
pip Manage Python packages with a pip-compatible interface
|
||||
venv Create a virtual environment
|
||||
build Build Python packages into source distributions and wheels
|
||||
publish Upload distributions to an index
|
||||
cache Manage uv's cache
|
||||
version Display uv's version
|
||||
help Display documentation for a command
|
||||
|
|
@ -159,6 +161,7 @@ fn help_short_flag() {
|
|||
pip Manage Python packages with a pip-compatible interface
|
||||
venv Create a virtual environment
|
||||
build Build Python packages into source distributions and wheels
|
||||
publish Upload distributions to an index
|
||||
cache Manage uv's cache
|
||||
version Display uv's version
|
||||
help Display documentation for a command
|
||||
|
|
@ -666,6 +669,7 @@ fn help_unknown_subcommand() {
|
|||
pip
|
||||
venv
|
||||
build
|
||||
publish
|
||||
cache
|
||||
version
|
||||
generate-shell-completion
|
||||
|
|
@ -691,6 +695,7 @@ fn help_unknown_subcommand() {
|
|||
pip
|
||||
venv
|
||||
build
|
||||
publish
|
||||
cache
|
||||
version
|
||||
generate-shell-completion
|
||||
|
|
@ -743,6 +748,7 @@ fn help_with_global_option() {
|
|||
pip Manage Python packages with a pip-compatible interface
|
||||
venv Create a virtual environment
|
||||
build Build Python packages into source distributions and wheels
|
||||
publish Upload distributions to an index
|
||||
cache Manage uv's cache
|
||||
version Display uv's version
|
||||
generate-shell-completion Generate shell completion
|
||||
|
|
@ -845,6 +851,7 @@ fn help_with_no_pager() {
|
|||
pip Manage Python packages with a pip-compatible interface
|
||||
venv Create a virtual environment
|
||||
build Build Python packages into source distributions and wheels
|
||||
publish Upload distributions to an index
|
||||
cache Manage uv's cache
|
||||
version Display uv's version
|
||||
generate-shell-completion Generate shell completion
|
||||
|
|
|
|||
|
|
@ -194,7 +194,7 @@ fn invalid_pyproject_toml_option_unknown_field() -> Result<()> {
|
|||
|
|
||||
2 | unknown = "field"
|
||||
| ^^^^^^^
|
||||
unknown field `unknown`, expected one of `native-tls`, `offline`, `no-cache`, `cache-dir`, `preview`, `python-preference`, `python-downloads`, `concurrent-downloads`, `concurrent-builds`, `concurrent-installs`, `index-url`, `extra-index-url`, `no-index`, `find-links`, `index-strategy`, `keyring-provider`, `allow-insecure-host`, `resolution`, `prerelease`, `dependency-metadata`, `config-settings`, `no-build-isolation`, `no-build-isolation-package`, `exclude-newer`, `link-mode`, `compile-bytecode`, `no-sources`, `upgrade`, `upgrade-package`, `reinstall`, `reinstall-package`, `no-build`, `no-build-package`, `no-binary`, `no-binary-package`, `pip`, `cache-keys`, `override-dependencies`, `constraint-dependencies`, `environments`, `workspace`, `sources`, `dev-dependencies`, `managed`, `package`
|
||||
unknown field `unknown`, expected one of `native-tls`, `offline`, `no-cache`, `cache-dir`, `preview`, `python-preference`, `python-downloads`, `concurrent-downloads`, `concurrent-builds`, `concurrent-installs`, `index-url`, `extra-index-url`, `no-index`, `find-links`, `index-strategy`, `keyring-provider`, `allow-insecure-host`, `resolution`, `prerelease`, `dependency-metadata`, `config-settings`, `no-build-isolation`, `no-build-isolation-package`, `exclude-newer`, `link-mode`, `compile-bytecode`, `no-sources`, `upgrade`, `upgrade-package`, `reinstall`, `reinstall-package`, `no-build`, `no-build-package`, `no-binary`, `no-binary-package`, `publish-url`, `pip`, `cache-keys`, `override-dependencies`, `constraint-dependencies`, `environments`, `workspace`, `sources`, `dev-dependencies`, `managed`, `package`
|
||||
|
||||
Resolved in [TIME]
|
||||
Audited in [TIME]
|
||||
|
|
|
|||
|
|
@ -0,0 +1,57 @@
|
|||
#![cfg(feature = "pypi")]
|
||||
|
||||
use common::{uv_snapshot, TestContext};
|
||||
|
||||
mod common;
|
||||
|
||||
#[test]
|
||||
fn username_password_no_longer_supported() {
|
||||
let context = TestContext::new("3.12");
|
||||
|
||||
uv_snapshot!(context.filters(), context.publish()
|
||||
.arg("-u")
|
||||
.arg("dummy")
|
||||
.arg("-p")
|
||||
.arg("dummy")
|
||||
.arg("--publish-url")
|
||||
.arg("https://test.pypi.org/legacy/")
|
||||
.arg("../../scripts/links/ok-1.0.0-py3-none-any.whl"), @r###"
|
||||
success: false
|
||||
exit_code: 2
|
||||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
warning: `uv publish` is experimental and may change without warning
|
||||
Publishing 1 file to https://test.pypi.org/legacy/
|
||||
Uploading ok-1.0.0-py3-none-any.whl ([SIZE])
|
||||
error: Failed to publish `../../scripts/links/ok-1.0.0-py3-none-any.whl` to https://test.pypi.org/legacy/
|
||||
Caused by: Permission denied (status code 403 Forbidden): 403 Username/Password authentication is no longer supported. Migrate to API Tokens or Trusted Publishers instead. See https://test.pypi.org/help/#apitoken and https://test.pypi.org/help/#trusted-publishers
|
||||
"###
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn invalid_token() {
|
||||
let context = TestContext::new("3.12");
|
||||
|
||||
uv_snapshot!(context.filters(), context.publish()
|
||||
.arg("-u")
|
||||
.arg("__token__")
|
||||
.arg("-p")
|
||||
.arg("dummy")
|
||||
.arg("--publish-url")
|
||||
.arg("https://test.pypi.org/legacy/")
|
||||
.arg("../../scripts/links/ok-1.0.0-py3-none-any.whl"), @r###"
|
||||
success: false
|
||||
exit_code: 2
|
||||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
warning: `uv publish` is experimental and may change without warning
|
||||
Publishing 1 file to https://test.pypi.org/legacy/
|
||||
Uploading ok-1.0.0-py3-none-any.whl ([SIZE])
|
||||
error: Failed to publish `../../scripts/links/ok-1.0.0-py3-none-any.whl` to https://test.pypi.org/legacy/
|
||||
Caused by: Permission denied (status code 403 Forbidden): 403 Invalid or non-existent authentication information. See https://test.pypi.org/help/#invalid-auth for more information.
|
||||
"###
|
||||
);
|
||||
}
|
||||
|
|
@ -3150,7 +3150,7 @@ fn resolve_config_file() -> anyhow::Result<()> {
|
|||
|
|
||||
1 | [project]
|
||||
| ^^^^^^^
|
||||
unknown field `project`, expected one of `native-tls`, `offline`, `no-cache`, `cache-dir`, `preview`, `python-preference`, `python-downloads`, `concurrent-downloads`, `concurrent-builds`, `concurrent-installs`, `index-url`, `extra-index-url`, `no-index`, `find-links`, `index-strategy`, `keyring-provider`, `allow-insecure-host`, `resolution`, `prerelease`, `dependency-metadata`, `config-settings`, `no-build-isolation`, `no-build-isolation-package`, `exclude-newer`, `link-mode`, `compile-bytecode`, `no-sources`, `upgrade`, `upgrade-package`, `reinstall`, `reinstall-package`, `no-build`, `no-build-package`, `no-binary`, `no-binary-package`, `pip`, `cache-keys`, `override-dependencies`, `constraint-dependencies`, `environments`, `workspace`, `sources`, `dev-dependencies`, `managed`, `package`
|
||||
unknown field `project`, expected one of `native-tls`, `offline`, `no-cache`, `cache-dir`, `preview`, `python-preference`, `python-downloads`, `concurrent-downloads`, `concurrent-builds`, `concurrent-installs`, `index-url`, `extra-index-url`, `no-index`, `find-links`, `index-strategy`, `keyring-provider`, `allow-insecure-host`, `resolution`, `prerelease`, `dependency-metadata`, `config-settings`, `no-build-isolation`, `no-build-isolation-package`, `exclude-newer`, `link-mode`, `compile-bytecode`, `no-sources`, `upgrade`, `upgrade-package`, `reinstall`, `reinstall-package`, `no-build`, `no-build-package`, `no-binary`, `no-binary-package`, `publish-url`, `pip`, `cache-keys`, `override-dependencies`, `constraint-dependencies`, `environments`, `workspace`, `sources`, `dev-dependencies`, `managed`, `package`
|
||||
|
||||
"###
|
||||
);
|
||||
|
|
|
|||
|
|
@ -63,6 +63,14 @@ uv accepts the following command-line arguments as environment variables:
|
|||
`--no-python-downloads` option. Whether uv should allow Python downloads.
|
||||
- `UV_COMPILE_BYTECODE`: Equivalent to the `--compile-bytecode` command-line argument. If set, uv
|
||||
will compile Python source files to bytecode after installation.
|
||||
- `UV_PUBLISH_URL`: Equivalent to the `--publish-url` command-line argument. The URL of the upload
|
||||
endpoint of the index to use with `uv publish`.
|
||||
- `UV_PUBLISH_TOKEN`: Equivalent to the `--token` command-line argument in `uv publish`. If set, uv
|
||||
will use this token (with the username `__token__`) for publishing.
|
||||
- `UV_PUBLISH_USERNAME`: Equivalent to the `--username` command-line argument in `uv publish`. If
|
||||
set, uv will use this username for publishing.
|
||||
- `UV_PUBLISH_PASSWORD`: Equivalent to the `--password` command-line argument in `uv publish`. If
|
||||
set, uv will use this password for publishing.
|
||||
|
||||
In each case, the corresponding command-line argument takes precedence over an environment variable.
|
||||
|
||||
|
|
|
|||
|
|
@ -1,10 +1,7 @@
|
|||
# Publishing a package
|
||||
|
||||
uv supports building Python packages into source and binary distributions via `uv build`.
|
||||
|
||||
As uv does not yet have a dedicated command for publishing packages, you can use the PyPA tool
|
||||
[`twine`](https://github.com/pypa/twine) to upload your package to a package registry, which can be
|
||||
invoked via `uvx`.
|
||||
uv supports building Python packages into source and binary distributions via `uv build` and
|
||||
uploading them to a registry with `uv publish`.
|
||||
|
||||
## Preparing your project for packaging
|
||||
|
||||
|
|
@ -32,15 +29,20 @@ Alternatively, `uv build <SRC>` will build the package in the specified director
|
|||
|
||||
## Publishing your package
|
||||
|
||||
Publish your package with `twine`:
|
||||
Publish your package with `uv publish`:
|
||||
|
||||
```console
|
||||
$ uvx twine upload dist/*
|
||||
$ uv publish
|
||||
```
|
||||
|
||||
!!! tip
|
||||
Set a PyPI token with `--token` or `UV_PUBLISH_TOKEN`, or set a username with `--username` or
|
||||
`UV_PUBLISH_USERNAME` and password with `--password` or `UV_PUBLISH_PASSWORD`.
|
||||
|
||||
To provide credentials, use the `TWINE_USERNAME` and `TWINE_PASSWORD` environment variables.
|
||||
!!! note
|
||||
|
||||
PyPI does not support publishing with username and password anymore, instead you need to
|
||||
generate a token. Using a token is equivalent to setting `--username __token__` and using the
|
||||
token as password.
|
||||
|
||||
## Installing your package
|
||||
|
||||
|
|
|
|||
|
|
@ -38,6 +38,8 @@ uv [OPTIONS] <COMMAND>
|
|||
</dd>
|
||||
<dt><a href="#uv-build"><code>uv build</code></a></dt><dd><p>Build Python packages into source distributions and wheels</p>
|
||||
</dd>
|
||||
<dt><a href="#uv-publish"><code>uv publish</code></a></dt><dd><p>Upload distributions to an index</p>
|
||||
</dd>
|
||||
<dt><a href="#uv-cache"><code>uv cache</code></a></dt><dd><p>Manage uv’s cache</p>
|
||||
</dd>
|
||||
<dt><a href="#uv-version"><code>uv version</code></a></dt><dd><p>Display uv’s version</p>
|
||||
|
|
@ -6776,6 +6778,152 @@ uv build [OPTIONS] [SRC]
|
|||
|
||||
</dd></dl>
|
||||
|
||||
## uv publish
|
||||
|
||||
Upload distributions to an index
|
||||
|
||||
<h3 class="cli-reference">Usage</h3>
|
||||
|
||||
```
|
||||
uv publish [OPTIONS] [FILES]...
|
||||
```
|
||||
|
||||
<h3 class="cli-reference">Arguments</h3>
|
||||
|
||||
<dl class="cli-reference"><dt><code>FILES</code></dt><dd><p>Paths to the files to upload. Accepts glob expressions.</p>
|
||||
|
||||
<p>Defaults to the <code>dist</code> directory. Selects only wheels and source distributions, while ignoring other files.</p>
|
||||
|
||||
</dd></dl>
|
||||
|
||||
<h3 class="cli-reference">Options</h3>
|
||||
|
||||
<dl class="cli-reference"><dt><code>--allow-insecure-host</code> <i>allow-insecure-host</i></dt><dd><p>Allow insecure connections to a host.</p>
|
||||
|
||||
<p>Can be provided multiple times.</p>
|
||||
|
||||
<p>Expects to receive either a hostname (e.g., <code>localhost</code>), a host-port pair (e.g., <code>localhost:8080</code>), or a URL (e.g., <code>https://localhost</code>).</p>
|
||||
|
||||
<p>WARNING: Hosts included in this list will not be verified against the system’s certificate store. Only use <code>--allow-insecure-host</code> in a secure network with verified sources, as it bypasses SSL verification and could expose you to MITM attacks.</p>
|
||||
|
||||
<p>May also be set with the <code>UV_INSECURE_HOST</code> environment variable.</p>
|
||||
</dd><dt><code>--cache-dir</code> <i>cache-dir</i></dt><dd><p>Path to the cache directory.</p>
|
||||
|
||||
<p>Defaults to <code>$HOME/Library/Caches/uv</code> on macOS, <code>$XDG_CACHE_HOME/uv</code> or <code>$HOME/.cache/uv</code> on Linux, and <code>%LOCALAPPDATA%\uv\cache</code> on Windows.</p>
|
||||
|
||||
<p>May also be set with the <code>UV_CACHE_DIR</code> environment variable.</p>
|
||||
</dd><dt><code>--color</code> <i>color-choice</i></dt><dd><p>Control colors in output</p>
|
||||
|
||||
<p>[default: auto]</p>
|
||||
<p>Possible values:</p>
|
||||
|
||||
<ul>
|
||||
<li><code>auto</code>: Enables colored output only when the output is going to a terminal or TTY with support</li>
|
||||
|
||||
<li><code>always</code>: Enables colored output regardless of the detected environment</li>
|
||||
|
||||
<li><code>never</code>: Disables colored output</li>
|
||||
</ul>
|
||||
</dd><dt><code>--config-file</code> <i>config-file</i></dt><dd><p>The path to a <code>uv.toml</code> file to use for configuration.</p>
|
||||
|
||||
<p>While uv configuration can be included in a <code>pyproject.toml</code> file, it is not allowed in this context.</p>
|
||||
|
||||
<p>May also be set with the <code>UV_CONFIG_FILE</code> environment variable.</p>
|
||||
</dd><dt><code>--help</code>, <code>-h</code></dt><dd><p>Display the concise help for this command</p>
|
||||
|
||||
</dd><dt><code>--keyring-provider</code> <i>keyring-provider</i></dt><dd><p>Attempt to use <code>keyring</code> for authentication for remote requirements files.</p>
|
||||
|
||||
<p>At present, only <code>--keyring-provider subprocess</code> is supported, which configures uv to use the <code>keyring</code> CLI to handle authentication.</p>
|
||||
|
||||
<p>Defaults to <code>disabled</code>.</p>
|
||||
|
||||
<p>May also be set with the <code>UV_KEYRING_PROVIDER</code> environment variable.</p>
|
||||
<p>Possible values:</p>
|
||||
|
||||
<ul>
|
||||
<li><code>disabled</code>: Do not use keyring for credential lookup</li>
|
||||
|
||||
<li><code>subprocess</code>: Use the <code>keyring</code> command for credential lookup</li>
|
||||
</ul>
|
||||
</dd><dt><code>--native-tls</code></dt><dd><p>Whether to load TLS certificates from the platform’s native certificate store.</p>
|
||||
|
||||
<p>By default, uv loads certificates from the bundled <code>webpki-roots</code> crate. The <code>webpki-roots</code> are a reliable set of trust roots from Mozilla, and including them in uv improves portability and performance (especially on macOS).</p>
|
||||
|
||||
<p>However, in some cases, you may want to use the platform’s native certificate store, especially if you’re relying on a corporate trust root (e.g., for a mandatory proxy) that’s included in your system’s certificate store.</p>
|
||||
|
||||
<p>May also be set with the <code>UV_NATIVE_TLS</code> environment variable.</p>
|
||||
</dd><dt><code>--no-cache</code>, <code>-n</code></dt><dd><p>Avoid reading from or writing to the cache, instead using a temporary directory for the duration of the operation</p>
|
||||
|
||||
<p>May also be set with the <code>UV_NO_CACHE</code> environment variable.</p>
|
||||
</dd><dt><code>--no-config</code></dt><dd><p>Avoid discovering configuration files (<code>pyproject.toml</code>, <code>uv.toml</code>).</p>
|
||||
|
||||
<p>Normally, configuration files are discovered in the current directory, parent directories, or user configuration directories.</p>
|
||||
|
||||
<p>May also be set with the <code>UV_NO_CONFIG</code> environment variable.</p>
|
||||
</dd><dt><code>--no-progress</code></dt><dd><p>Hide all progress outputs.</p>
|
||||
|
||||
<p>For example, spinners or progress bars.</p>
|
||||
|
||||
</dd><dt><code>--no-python-downloads</code></dt><dd><p>Disable automatic downloads of Python.</p>
|
||||
|
||||
</dd><dt><code>--offline</code></dt><dd><p>Disable network access.</p>
|
||||
|
||||
<p>When disabled, uv will only use locally cached data and locally available files.</p>
|
||||
|
||||
</dd><dt><code>--password</code>, <code>-p</code> <i>password</i></dt><dd><p>The password for the upload</p>
|
||||
|
||||
<p>May also be set with the <code>UV_PUBLISH_PASSWORD</code> environment variable.</p>
|
||||
</dd><dt><code>--project</code> <i>project</i></dt><dd><p>Run the command within the given project directory.</p>
|
||||
|
||||
<p>All <code>pyproject.toml</code>, <code>uv.toml</code>, and <code>.python-version</code> files will be discovered by walking up the directory tree from the project root, as will the project’s virtual environment (<code>.venv</code>).</p>
|
||||
|
||||
<p>Other command-line arguments (such as relative paths) will be resolved relative to the current working directory.</p>
|
||||
|
||||
<p>This setting has no effect when used in the <code>uv pip</code> interface.</p>
|
||||
|
||||
</dd><dt><code>--publish-url</code> <i>publish-url</i></dt><dd><p>The URL of the upload endpoint.</p>
|
||||
|
||||
<p>Note that this typically differs from the index URL.</p>
|
||||
|
||||
<p>Defaults to PyPI’s publish URL (<https://upload.pypi.org/legacy/>).</p>
|
||||
|
||||
<p>The default value is publish URL for PyPI (<https://upload.pypi.org/legacy/>).</p>
|
||||
|
||||
<p>May also be set with the <code>UV_PUBLISH_URL</code> environment variable.</p>
|
||||
</dd><dt><code>--python-preference</code> <i>python-preference</i></dt><dd><p>Whether to prefer uv-managed or system Python installations.</p>
|
||||
|
||||
<p>By default, uv prefers using Python versions it manages. However, it will use system Python installations if a uv-managed Python is not installed. This option allows prioritizing or ignoring system Python installations.</p>
|
||||
|
||||
<p>May also be set with the <code>UV_PYTHON_PREFERENCE</code> environment variable.</p>
|
||||
<p>Possible values:</p>
|
||||
|
||||
<ul>
|
||||
<li><code>only-managed</code>: Only use managed Python installations; never use system Python installations</li>
|
||||
|
||||
<li><code>managed</code>: Prefer managed Python installations over system Python installations</li>
|
||||
|
||||
<li><code>system</code>: Prefer system Python installations over managed Python installations</li>
|
||||
|
||||
<li><code>only-system</code>: Only use system Python installations; never use managed Python installations</li>
|
||||
</ul>
|
||||
</dd><dt><code>--quiet</code>, <code>-q</code></dt><dd><p>Do not print any output</p>
|
||||
|
||||
</dd><dt><code>--token</code>, <code>-t</code> <i>token</i></dt><dd><p>The token for the upload.</p>
|
||||
|
||||
<p>Using a token is equivalent to passing <code>__token__</code> as <code>--username</code> and the token as <code>--password</code>. password.</p>
|
||||
|
||||
<p>May also be set with the <code>UV_PUBLISH_TOKEN</code> environment variable.</p>
|
||||
</dd><dt><code>--username</code>, <code>-u</code> <i>username</i></dt><dd><p>The username for the upload</p>
|
||||
|
||||
<p>May also be set with the <code>UV_PUBLISH_USERNAME</code> environment variable.</p>
|
||||
</dd><dt><code>--verbose</code>, <code>-v</code></dt><dd><p>Use verbose output.</p>
|
||||
|
||||
<p>You can configure fine-grained logging using the <code>RUST_LOG</code> environment variable. (<https://docs.rs/tracing-subscriber/latest/tracing_subscriber/filter/struct.EnvFilter.html#directives>)</p>
|
||||
|
||||
</dd><dt><code>--version</code>, <code>-V</code></dt><dd><p>Display the uv version</p>
|
||||
|
||||
</dd></dl>
|
||||
|
||||
## uv cache
|
||||
|
||||
Manage uv's cache
|
||||
|
|
|
|||
|
|
@ -1077,6 +1077,32 @@ Whether to enable experimental, preview features.
|
|||
|
||||
---
|
||||
|
||||
### [`publish-url`](#publish-url) {: #publish-url }
|
||||
|
||||
The URL for publishing packages to the Python package index (by default:
|
||||
<https://upload.pypi.org/legacy/>).
|
||||
|
||||
**Default value**: `"https://upload.pypi.org/legacy/"`
|
||||
|
||||
**Type**: `str`
|
||||
|
||||
**Example usage**:
|
||||
|
||||
=== "pyproject.toml"
|
||||
|
||||
```toml
|
||||
[tool.uv]
|
||||
publish-url = "https://test.pypi.org/legacy/"
|
||||
```
|
||||
=== "uv.toml"
|
||||
|
||||
```toml
|
||||
|
||||
publish-url = "https://test.pypi.org/legacy/"
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### [`python-downloads`](#python-downloads) {: #python-downloads }
|
||||
|
||||
Whether to allow Python downloads.
|
||||
|
|
|
|||
|
|
@ -0,0 +1 @@
|
|||
astral-test-*
|
||||
|
|
@ -0,0 +1,179 @@
|
|||
# /// script
|
||||
# requires-python = ">=3.12"
|
||||
# dependencies = [
|
||||
# "httpx>=0.27,<0.28",
|
||||
# "packaging>=24.1,<25",
|
||||
# ]
|
||||
# ///
|
||||
|
||||
"""
|
||||
Test `uv publish` by uploading a new version of astral-test-<test case> to testpypi,
|
||||
authenticating by one of various options.
|
||||
|
||||
# Setup
|
||||
|
||||
**astral-test-token**
|
||||
Set the `UV_TEST_PUBLISH_TOKEN` environment variables.
|
||||
|
||||
**astral-test-password**
|
||||
Set the `UV_TEST_PUBLISH_PASSWORD` environment variable.
|
||||
This project also uses token authentication since it's the only thing that PyPI
|
||||
supports, but they both CLI options.
|
||||
TODO(konsti): Add an index for testing that supports username/password.
|
||||
|
||||
**astral-test-keyring**
|
||||
```console
|
||||
uv pip install keyring
|
||||
keyring set https://test.pypi.org/legacy/?astral-test-keyring __token__
|
||||
```
|
||||
The query parameter a horrible hack stolen from
|
||||
https://github.com/pypa/twine/issues/565#issue-555219267
|
||||
to prevent the other projects from implicitly using the same credentials.
|
||||
"""
|
||||
|
||||
import os
|
||||
import re
|
||||
from argparse import ArgumentParser
|
||||
from pathlib import Path
|
||||
from shutil import rmtree
|
||||
from subprocess import check_call
|
||||
|
||||
import httpx
|
||||
from packaging.utils import parse_sdist_filename, parse_wheel_filename
|
||||
|
||||
cwd = Path(__file__).parent
|
||||
|
||||
project_urls = {
|
||||
"astral-test-token": "https://test.pypi.org/simple/astral-test-token/",
|
||||
"astral-test-password": "https://test.pypi.org/simple/astral-test-password/",
|
||||
"astral-test-keyring": "https://test.pypi.org/simple/astral-test-keyring/",
|
||||
"astral-test-gitlab-pat": "https://gitlab.com/api/v4/projects/61853105/packages/pypi/simple/astral-test-gitlab-pat",
|
||||
}
|
||||
|
||||
|
||||
def get_new_version(project_name: str) -> str:
|
||||
"""Return the next free path version on pypi"""
|
||||
data = httpx.get(project_urls[project_name]).text
|
||||
versions = set()
|
||||
for filename in list(m.group(1) for m in re.finditer(">([^<]+)</a>", data)):
|
||||
if filename.endswith(".whl"):
|
||||
[_name, version, _build, _tags] = parse_wheel_filename(filename)
|
||||
else:
|
||||
[_name, version] = parse_sdist_filename(filename)
|
||||
versions.add(version)
|
||||
max_version = max(versions)
|
||||
|
||||
# Bump the path version to obtain an empty version
|
||||
release = list(max_version.release)
|
||||
release[-1] += 1
|
||||
return ".".join(str(i) for i in release)
|
||||
|
||||
|
||||
def create_project(project_name: str, uv: Path):
|
||||
if cwd.joinpath(project_name).exists():
|
||||
rmtree(cwd.joinpath(project_name))
|
||||
check_call([uv, "init", "--lib", project_name], cwd=cwd)
|
||||
pyproject_toml = cwd.joinpath(project_name).joinpath("pyproject.toml")
|
||||
|
||||
# Set to an unclaimed version
|
||||
toml = pyproject_toml.read_text()
|
||||
new_version = get_new_version(project_name)
|
||||
toml = re.sub('version = ".*"', f'version = "{new_version}"', toml)
|
||||
pyproject_toml.write_text(toml)
|
||||
|
||||
|
||||
def publish_project(project_name: str, uv: Path):
|
||||
# Create the project
|
||||
create_project(project_name, uv)
|
||||
|
||||
# Build the project
|
||||
check_call([uv, "build"], cwd=cwd.joinpath(project_name))
|
||||
|
||||
# Upload the project
|
||||
if project_name == "astral-test-token":
|
||||
env = os.environ.copy()
|
||||
env["UV_PUBLISH_TOKEN"] = os.environ["UV_TEST_PUBLISH_TOKEN"]
|
||||
check_call(
|
||||
[
|
||||
uv,
|
||||
"publish",
|
||||
"--publish-url",
|
||||
"https://test.pypi.org/legacy/",
|
||||
],
|
||||
cwd=cwd.joinpath(project_name),
|
||||
env=env,
|
||||
)
|
||||
elif project_name == "astral-test-password":
|
||||
env = os.environ.copy()
|
||||
env["UV_PUBLISH_PASSWORD"] = os.environ["UV_TEST_PUBLISH_PASSWORD"]
|
||||
check_call(
|
||||
[
|
||||
uv,
|
||||
"publish",
|
||||
"--publish-url",
|
||||
"https://test.pypi.org/legacy/",
|
||||
"--username",
|
||||
"__token__",
|
||||
],
|
||||
cwd=cwd.joinpath(project_name),
|
||||
env=env,
|
||||
)
|
||||
elif project_name == "astral-test-keyring":
|
||||
check_call(
|
||||
[
|
||||
uv,
|
||||
"publish",
|
||||
"--publish-url",
|
||||
"https://test.pypi.org/legacy/?astral-test-keyring",
|
||||
"--username",
|
||||
"__token__",
|
||||
"--keyring-provider",
|
||||
"subprocess",
|
||||
],
|
||||
cwd=cwd.joinpath(project_name),
|
||||
)
|
||||
elif project_name == "astral-test-gitlab-pat":
|
||||
env = os.environ.copy()
|
||||
env["UV_PUBLISH_PASSWORD"] = os.environ["UV_TEST_PUBLISH_GITLAB_PAT"]
|
||||
check_call(
|
||||
[
|
||||
uv,
|
||||
"publish",
|
||||
"--publish-url",
|
||||
"https://gitlab.com/api/v4/projects/61853105/packages/pypi",
|
||||
"--username",
|
||||
"astral-test-gitlab-pat",
|
||||
],
|
||||
cwd=cwd.joinpath(project_name),
|
||||
env=env,
|
||||
)
|
||||
else:
|
||||
raise ValueError(f"Unknown project name: {project_name}")
|
||||
|
||||
|
||||
def main():
|
||||
parser = ArgumentParser()
|
||||
parser.add_argument("projects", choices=list(project_urls) + ["all"], nargs="+")
|
||||
parser.add_argument("--uv")
|
||||
args = parser.parse_args()
|
||||
|
||||
if args.uv:
|
||||
# We change the working directory for the subprocess calls, so we have to
|
||||
# absolutize the path.
|
||||
uv = Path.cwd().joinpath(args.uv)
|
||||
else:
|
||||
check_call(["cargo", "build"])
|
||||
executable_suffix = ".exe" if os.name == "nt" else ""
|
||||
uv = cwd.parent.parent.joinpath(f"target/debug/uv{executable_suffix}")
|
||||
|
||||
if args.projects == ["all"]:
|
||||
projects = list(project_urls)
|
||||
else:
|
||||
projects = args.projects
|
||||
|
||||
for project_name in projects:
|
||||
publish_project(project_name, uv)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
|
@ -329,6 +329,14 @@
|
|||
"null"
|
||||
]
|
||||
},
|
||||
"publish-url": {
|
||||
"description": "The URL for publishing packages to the Python package index (by default: <https://upload.pypi.org/legacy/>).",
|
||||
"type": [
|
||||
"string",
|
||||
"null"
|
||||
],
|
||||
"format": "uri"
|
||||
},
|
||||
"python-downloads": {
|
||||
"description": "Whether to allow Python downloads.",
|
||||
"anyOf": [
|
||||
|
|
|
|||
Loading…
Reference in New Issue