Apply Cache-Control overrides to response, not request headers (#14736)

## Summary

This was just an oversight on my part in the initial implementation.

Closes https://github.com/astral-sh/uv/issues/14719.

## Test Plan

With:

```toml
[project]
name = "foo"
version = "0.1.0"
description = "Add your description here"
readme = "README.md"
requires-python = ">=3.13.2"
dependencies = [
]

[[tool.uv.index]]
url = "https://download.pytorch.org/whl/cpu"
cache-control = { api = "max-age=600" }
```

Ran `cargo run lock -vvv` and verified that the PyTorch index response
was cached (whereas it typically returns `cache-control:
no-cache,no-store,must-revalidate`).
This commit is contained in:
Charlie Marsh 2025-07-18 16:32:29 -04:00 committed by GitHub
parent 574aa1ef11
commit d0efe1ed9c
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
4 changed files with 201 additions and 50 deletions

View File

@ -304,7 +304,7 @@ impl CachedClient {
.await? .await?
} else { } else {
debug!("No cache entry for: {}", req.url()); debug!("No cache entry for: {}", req.url());
let (response, cache_policy) = self.fresh_request(req).await?; let (response, cache_policy) = self.fresh_request(req, cache_control).await?;
CachedResponse::ModifiedOrNew { CachedResponse::ModifiedOrNew {
response, response,
cache_policy, cache_policy,
@ -318,7 +318,12 @@ impl CachedClient {
"Broken fresh cache entry (for payload) at {}, removing: {err}", "Broken fresh cache entry (for payload) at {}, removing: {err}",
cache_entry.path().display() cache_entry.path().display()
); );
self.resend_and_heal_cache(fresh_req, cache_entry, response_callback) self.resend_and_heal_cache(
fresh_req,
cache_entry,
cache_control,
response_callback,
)
.await .await
} }
}, },
@ -339,7 +344,12 @@ impl CachedClient {
(for payload) at {}, removing: {err}", (for payload) at {}, removing: {err}",
cache_entry.path().display() cache_entry.path().display()
); );
self.resend_and_heal_cache(fresh_req, cache_entry, response_callback) self.resend_and_heal_cache(
fresh_req,
cache_entry,
cache_control,
response_callback,
)
.await .await
} }
} }
@ -355,7 +365,12 @@ impl CachedClient {
// ETag didn't match). We need to make a fresh request. // ETag didn't match). We need to make a fresh request.
if response.status() == http::StatusCode::NOT_MODIFIED { if response.status() == http::StatusCode::NOT_MODIFIED {
warn!("Server returned unusable 304 for: {}", fresh_req.url()); warn!("Server returned unusable 304 for: {}", fresh_req.url());
self.resend_and_heal_cache(fresh_req, cache_entry, response_callback) self.resend_and_heal_cache(
fresh_req,
cache_entry,
cache_control,
response_callback,
)
.await .await
} else { } else {
self.run_response_callback( self.run_response_callback(
@ -379,9 +394,10 @@ impl CachedClient {
&self, &self,
req: Request, req: Request,
cache_entry: &CacheEntry, cache_entry: &CacheEntry,
cache_control: CacheControl<'_>,
response_callback: Callback, response_callback: Callback,
) -> Result<Payload, CachedClientError<CallBackError>> { ) -> Result<Payload, CachedClientError<CallBackError>> {
let (response, cache_policy) = self.fresh_request(req).await?; let (response, cache_policy) = self.fresh_request(req, cache_control).await?;
let payload = self let payload = self
.run_response_callback(cache_entry, cache_policy, response, async |resp| { .run_response_callback(cache_entry, cache_policy, response, async |resp| {
@ -401,10 +417,11 @@ impl CachedClient {
&self, &self,
req: Request, req: Request,
cache_entry: &CacheEntry, cache_entry: &CacheEntry,
cache_control: CacheControl<'_>,
response_callback: Callback, response_callback: Callback,
) -> Result<Payload::Target, CachedClientError<CallBackError>> { ) -> Result<Payload::Target, CachedClientError<CallBackError>> {
let _ = fs_err::tokio::remove_file(&cache_entry.path()).await; let _ = fs_err::tokio::remove_file(&cache_entry.path()).await;
let (response, cache_policy) = self.fresh_request(req).await?; let (response, cache_policy) = self.fresh_request(req, cache_control).await?;
self.run_response_callback(cache_entry, cache_policy, response, response_callback) self.run_response_callback(cache_entry, cache_policy, response, response_callback)
.await .await
} }
@ -476,20 +493,13 @@ impl CachedClient {
) -> Result<CachedResponse, Error> { ) -> Result<CachedResponse, Error> {
// Apply the cache control header, if necessary. // Apply the cache control header, if necessary.
match cache_control { match cache_control {
CacheControl::None | CacheControl::AllowStale => {} CacheControl::None | CacheControl::AllowStale | CacheControl::Override(..) => {}
CacheControl::MustRevalidate => { CacheControl::MustRevalidate => {
req.headers_mut().insert( req.headers_mut().insert(
http::header::CACHE_CONTROL, http::header::CACHE_CONTROL,
http::HeaderValue::from_static("no-cache"), http::HeaderValue::from_static("no-cache"),
); );
} }
CacheControl::Override(value) => {
req.headers_mut().insert(
http::header::CACHE_CONTROL,
http::HeaderValue::from_str(value)
.map_err(|_| ErrorKind::InvalidCacheControl(value.to_string()))?,
);
}
} }
Ok(match cached.cache_policy.before_request(&mut req) { Ok(match cached.cache_policy.before_request(&mut req) {
BeforeRequest::Fresh => { BeforeRequest::Fresh => {
@ -499,7 +509,12 @@ impl CachedClient {
BeforeRequest::Stale(new_cache_policy_builder) => match cache_control { BeforeRequest::Stale(new_cache_policy_builder) => match cache_control {
CacheControl::None | CacheControl::MustRevalidate | CacheControl::Override(_) => { CacheControl::None | CacheControl::MustRevalidate | CacheControl::Override(_) => {
debug!("Found stale response for: {}", req.url()); debug!("Found stale response for: {}", req.url());
self.send_cached_handle_stale(req, cached, new_cache_policy_builder) self.send_cached_handle_stale(
req,
cache_control,
cached,
new_cache_policy_builder,
)
.await? .await?
} }
CacheControl::AllowStale => { CacheControl::AllowStale => {
@ -513,7 +528,7 @@ impl CachedClient {
"Cached request doesn't match current request for: {}", "Cached request doesn't match current request for: {}",
req.url() req.url()
); );
let (response, cache_policy) = self.fresh_request(req).await?; let (response, cache_policy) = self.fresh_request(req, cache_control).await?;
CachedResponse::ModifiedOrNew { CachedResponse::ModifiedOrNew {
response, response,
cache_policy, cache_policy,
@ -525,12 +540,13 @@ impl CachedClient {
async fn send_cached_handle_stale( async fn send_cached_handle_stale(
&self, &self,
req: Request, req: Request,
cache_control: CacheControl<'_>,
cached: DataWithCachePolicy, cached: DataWithCachePolicy,
new_cache_policy_builder: CachePolicyBuilder, new_cache_policy_builder: CachePolicyBuilder,
) -> Result<CachedResponse, Error> { ) -> Result<CachedResponse, Error> {
let url = DisplaySafeUrl::from(req.url().clone()); let url = DisplaySafeUrl::from(req.url().clone());
debug!("Sending revalidation request for: {url}"); debug!("Sending revalidation request for: {url}");
let response = self let mut response = self
.0 .0
.execute(req) .execute(req)
.instrument(info_span!("revalidation_request", url = url.as_str())) .instrument(info_span!("revalidation_request", url = url.as_str()))
@ -538,6 +554,16 @@ impl CachedClient {
.map_err(|err| ErrorKind::from_reqwest_middleware(url.clone(), err))? .map_err(|err| ErrorKind::from_reqwest_middleware(url.clone(), err))?
.error_for_status() .error_for_status()
.map_err(|err| ErrorKind::from_reqwest(url.clone(), err))?; .map_err(|err| ErrorKind::from_reqwest(url.clone(), err))?;
// If the user set a custom `Cache-Control` header, override it.
if let CacheControl::Override(header) = cache_control {
response.headers_mut().insert(
http::header::CACHE_CONTROL,
http::HeaderValue::from_str(header)
.expect("Cache-Control header must be valid UTF-8"),
);
}
match cached match cached
.cache_policy .cache_policy
.after_response(new_cache_policy_builder, &response) .after_response(new_cache_policy_builder, &response)
@ -566,16 +592,26 @@ impl CachedClient {
async fn fresh_request( async fn fresh_request(
&self, &self,
req: Request, req: Request,
cache_control: CacheControl<'_>,
) -> Result<(Response, Option<Box<CachePolicy>>), Error> { ) -> Result<(Response, Option<Box<CachePolicy>>), Error> {
let url = DisplaySafeUrl::from(req.url().clone()); let url = DisplaySafeUrl::from(req.url().clone());
trace!("Sending fresh {} request for {}", req.method(), url); trace!("Sending fresh {} request for {}", req.method(), url);
let cache_policy_builder = CachePolicyBuilder::new(&req); let cache_policy_builder = CachePolicyBuilder::new(&req);
let response = self let mut response = self
.0 .0
.execute(req) .execute(req)
.await .await
.map_err(|err| ErrorKind::from_reqwest_middleware(url.clone(), err))?; .map_err(|err| ErrorKind::from_reqwest_middleware(url.clone(), err))?;
// If the user set a custom `Cache-Control` header, override it.
if let CacheControl::Override(header) = cache_control {
response.headers_mut().insert(
http::header::CACHE_CONTROL,
http::HeaderValue::from_str(header)
.expect("Cache-Control header must be valid UTF-8"),
);
}
let retry_count = response let retry_count = response
.extensions() .extensions()
.get::<reqwest_retry::RetryCount>() .get::<reqwest_retry::RetryCount>()
@ -690,6 +726,7 @@ impl CachedClient {
&self, &self,
req: Request, req: Request,
cache_entry: &CacheEntry, cache_entry: &CacheEntry,
cache_control: CacheControl<'_>,
response_callback: Callback, response_callback: Callback,
) -> Result<Payload, CachedClientError<CallBackError>> { ) -> Result<Payload, CachedClientError<CallBackError>> {
let mut past_retries = 0; let mut past_retries = 0;
@ -698,7 +735,7 @@ impl CachedClient {
loop { loop {
let fresh_req = req.try_clone().expect("HTTP request must be cloneable"); let fresh_req = req.try_clone().expect("HTTP request must be cloneable");
let result = self let result = self
.skip_cache(fresh_req, cache_entry, &response_callback) .skip_cache(fresh_req, cache_entry, cache_control, &response_callback)
.await; .await;
// Check if the middleware already performed retries // Check if the middleware already performed retries

View File

@ -441,6 +441,26 @@ impl<'a> IndexLocations {
} }
} }
} }
/// Return the Simple API cache control header for an [`IndexUrl`], if configured.
pub fn simple_api_cache_control_for(&self, url: &IndexUrl) -> Option<&str> {
for index in &self.indexes {
if index.url() == url {
return index.cache_control.as_ref()?.api.as_deref();
}
}
None
}
/// Return the artifact cache control header for an [`IndexUrl`], if configured.
pub fn artifact_cache_control_for(&self, url: &IndexUrl) -> Option<&str> {
for index in &self.indexes {
if index.url() == url {
return index.cache_control.as_ref()?.files.as_deref();
}
}
None
}
} }
impl From<&IndexLocations> for uv_auth::Indexes { impl From<&IndexLocations> for uv_auth::Indexes {

View File

@ -20,7 +20,7 @@ use uv_client::{
}; };
use uv_distribution_filename::WheelFilename; use uv_distribution_filename::WheelFilename;
use uv_distribution_types::{ use uv_distribution_types::{
BuildableSource, BuiltDist, Dist, HashPolicy, Hashed, InstalledDist, Name, SourceDist, BuildableSource, BuiltDist, Dist, HashPolicy, Hashed, IndexUrl, InstalledDist, Name, SourceDist,
}; };
use uv_extract::hash::Hasher; use uv_extract::hash::Hasher;
use uv_fs::write_atomic; use uv_fs::write_atomic;
@ -201,6 +201,7 @@ impl<'a, Context: BuildContext> DistributionDatabase<'a, Context> {
match self match self
.stream_wheel( .stream_wheel(
url.clone(), url.clone(),
dist.index(),
&wheel.filename, &wheel.filename,
wheel.file.size, wheel.file.size,
&wheel_entry, &wheel_entry,
@ -236,6 +237,7 @@ impl<'a, Context: BuildContext> DistributionDatabase<'a, Context> {
let archive = self let archive = self
.download_wheel( .download_wheel(
url, url,
dist.index(),
&wheel.filename, &wheel.filename,
wheel.file.size, wheel.file.size,
&wheel_entry, &wheel_entry,
@ -272,6 +274,7 @@ impl<'a, Context: BuildContext> DistributionDatabase<'a, Context> {
match self match self
.stream_wheel( .stream_wheel(
wheel.url.raw().clone(), wheel.url.raw().clone(),
None,
&wheel.filename, &wheel.filename,
None, None,
&wheel_entry, &wheel_entry,
@ -301,6 +304,7 @@ impl<'a, Context: BuildContext> DistributionDatabase<'a, Context> {
let archive = self let archive = self
.download_wheel( .download_wheel(
wheel.url.raw().clone(), wheel.url.raw().clone(),
None,
&wheel.filename, &wheel.filename,
None, None,
&wheel_entry, &wheel_entry,
@ -534,6 +538,7 @@ impl<'a, Context: BuildContext> DistributionDatabase<'a, Context> {
async fn stream_wheel( async fn stream_wheel(
&self, &self,
url: DisplaySafeUrl, url: DisplaySafeUrl,
index: Option<&IndexUrl>,
filename: &WheelFilename, filename: &WheelFilename,
size: Option<u64>, size: Option<u64>,
wheel_entry: &CacheEntry, wheel_entry: &CacheEntry,
@ -616,13 +621,24 @@ impl<'a, Context: BuildContext> DistributionDatabase<'a, Context> {
// Fetch the archive from the cache, or download it if necessary. // Fetch the archive from the cache, or download it if necessary.
let req = self.request(url.clone())?; let req = self.request(url.clone())?;
// Determine the cache control policy for the URL.
let cache_control = match self.client.unmanaged.connectivity() { let cache_control = match self.client.unmanaged.connectivity() {
Connectivity::Online => CacheControl::from( Connectivity::Online => {
if let Some(header) = index.and_then(|index| {
self.build_context
.locations()
.artifact_cache_control_for(index)
}) {
CacheControl::Override(header)
} else {
CacheControl::from(
self.build_context self.build_context
.cache() .cache()
.freshness(&http_entry, Some(&filename.name), None) .freshness(&http_entry, Some(&filename.name), None)
.map_err(Error::CacheRead)?, .map_err(Error::CacheRead)?,
), )
}
}
Connectivity::Offline => CacheControl::AllowStale, Connectivity::Offline => CacheControl::AllowStale,
}; };
@ -654,7 +670,12 @@ impl<'a, Context: BuildContext> DistributionDatabase<'a, Context> {
.managed(async |client| { .managed(async |client| {
client client
.cached_client() .cached_client()
.skip_cache_with_retry(self.request(url)?, &http_entry, download) .skip_cache_with_retry(
self.request(url)?,
&http_entry,
cache_control,
download,
)
.await .await
.map_err(|err| match err { .map_err(|err| match err {
CachedClientError::Callback { err, .. } => err, CachedClientError::Callback { err, .. } => err,
@ -671,6 +692,7 @@ impl<'a, Context: BuildContext> DistributionDatabase<'a, Context> {
async fn download_wheel( async fn download_wheel(
&self, &self,
url: DisplaySafeUrl, url: DisplaySafeUrl,
index: Option<&IndexUrl>,
filename: &WheelFilename, filename: &WheelFilename,
size: Option<u64>, size: Option<u64>,
wheel_entry: &CacheEntry, wheel_entry: &CacheEntry,
@ -783,13 +805,24 @@ impl<'a, Context: BuildContext> DistributionDatabase<'a, Context> {
// Fetch the archive from the cache, or download it if necessary. // Fetch the archive from the cache, or download it if necessary.
let req = self.request(url.clone())?; let req = self.request(url.clone())?;
// Determine the cache control policy for the URL.
let cache_control = match self.client.unmanaged.connectivity() { let cache_control = match self.client.unmanaged.connectivity() {
Connectivity::Online => CacheControl::from( Connectivity::Online => {
if let Some(header) = index.and_then(|index| {
self.build_context
.locations()
.artifact_cache_control_for(index)
}) {
CacheControl::Override(header)
} else {
CacheControl::from(
self.build_context self.build_context
.cache() .cache()
.freshness(&http_entry, Some(&filename.name), None) .freshness(&http_entry, Some(&filename.name), None)
.map_err(Error::CacheRead)?, .map_err(Error::CacheRead)?,
), )
}
}
Connectivity::Offline => CacheControl::AllowStale, Connectivity::Offline => CacheControl::AllowStale,
}; };
@ -821,7 +854,12 @@ impl<'a, Context: BuildContext> DistributionDatabase<'a, Context> {
.managed(async |client| { .managed(async |client| {
client client
.cached_client() .cached_client()
.skip_cache_with_retry(self.request(url)?, &http_entry, download) .skip_cache_with_retry(
self.request(url)?,
&http_entry,
cache_control,
download,
)
.await .await
.map_err(|err| match err { .map_err(|err| match err {
CachedClientError::Callback { err, .. } => err, CachedClientError::Callback { err, .. } => err,

View File

@ -32,7 +32,7 @@ use uv_client::{
use uv_configuration::{BuildKind, BuildOutput, ConfigSettings, SourceStrategy}; use uv_configuration::{BuildKind, BuildOutput, ConfigSettings, SourceStrategy};
use uv_distribution_filename::{SourceDistExtension, WheelFilename}; use uv_distribution_filename::{SourceDistExtension, WheelFilename};
use uv_distribution_types::{ use uv_distribution_types::{
BuildableSource, DirectorySourceUrl, GitSourceUrl, HashPolicy, Hashed, PathSourceUrl, BuildableSource, DirectorySourceUrl, GitSourceUrl, HashPolicy, Hashed, IndexUrl, PathSourceUrl,
SourceDist, SourceUrl, SourceDist, SourceUrl,
}; };
use uv_extract::hash::Hasher; use uv_extract::hash::Hasher;
@ -148,6 +148,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
self.url( self.url(
source, source,
&url, &url,
Some(&dist.index),
&cache_shard, &cache_shard,
None, None,
dist.ext, dist.ext,
@ -168,6 +169,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
self.url( self.url(
source, source,
&dist.url, &dist.url,
None,
&cache_shard, &cache_shard,
dist.subdirectory.as_deref(), dist.subdirectory.as_deref(),
dist.ext, dist.ext,
@ -213,6 +215,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
self.url( self.url(
source, source,
resource.url, resource.url,
None,
&cache_shard, &cache_shard,
resource.subdirectory, resource.subdirectory,
resource.ext, resource.ext,
@ -288,7 +291,16 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
.await; .await;
} }
self.url_metadata(source, &url, &cache_shard, None, dist.ext, hashes, client) self.url_metadata(
source,
&url,
Some(&dist.index),
&cache_shard,
None,
dist.ext,
hashes,
client,
)
.boxed_local() .boxed_local()
.await? .await?
} }
@ -302,6 +314,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
self.url_metadata( self.url_metadata(
source, source,
&dist.url, &dist.url,
None,
&cache_shard, &cache_shard,
dist.subdirectory.as_deref(), dist.subdirectory.as_deref(),
dist.ext, dist.ext,
@ -340,6 +353,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
self.url_metadata( self.url_metadata(
source, source,
resource.url, resource.url,
None,
&cache_shard, &cache_shard,
resource.subdirectory, resource.subdirectory,
resource.ext, resource.ext,
@ -395,6 +409,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
&self, &self,
source: &BuildableSource<'data>, source: &BuildableSource<'data>,
url: &'data DisplaySafeUrl, url: &'data DisplaySafeUrl,
index: Option<&'data IndexUrl>,
cache_shard: &CacheShard, cache_shard: &CacheShard,
subdirectory: Option<&'data Path>, subdirectory: Option<&'data Path>,
ext: SourceDistExtension, ext: SourceDistExtension,
@ -406,7 +421,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
// Fetch the revision for the source distribution. // Fetch the revision for the source distribution.
let revision = self let revision = self
.url_revision(source, ext, url, cache_shard, hashes, client) .url_revision(source, ext, url, index, cache_shard, hashes, client)
.await?; .await?;
// Before running the build, check that the hashes match. // Before running the build, check that the hashes match.
@ -448,6 +463,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
source, source,
ext, ext,
url, url,
index,
&source_dist_entry, &source_dist_entry,
revision, revision,
hashes, hashes,
@ -511,6 +527,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
&self, &self,
source: &BuildableSource<'data>, source: &BuildableSource<'data>,
url: &'data Url, url: &'data Url,
index: Option<&'data IndexUrl>,
cache_shard: &CacheShard, cache_shard: &CacheShard,
subdirectory: Option<&'data Path>, subdirectory: Option<&'data Path>,
ext: SourceDistExtension, ext: SourceDistExtension,
@ -521,7 +538,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
// Fetch the revision for the source distribution. // Fetch the revision for the source distribution.
let revision = self let revision = self
.url_revision(source, ext, url, cache_shard, hashes, client) .url_revision(source, ext, url, index, cache_shard, hashes, client)
.await?; .await?;
// Before running the build, check that the hashes match. // Before running the build, check that the hashes match.
@ -578,6 +595,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
source, source,
ext, ext,
url, url,
index,
&source_dist_entry, &source_dist_entry,
revision, revision,
hashes, hashes,
@ -689,18 +707,31 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
source: &BuildableSource<'_>, source: &BuildableSource<'_>,
ext: SourceDistExtension, ext: SourceDistExtension,
url: &Url, url: &Url,
index: Option<&IndexUrl>,
cache_shard: &CacheShard, cache_shard: &CacheShard,
hashes: HashPolicy<'_>, hashes: HashPolicy<'_>,
client: &ManagedClient<'_>, client: &ManagedClient<'_>,
) -> Result<Revision, Error> { ) -> Result<Revision, Error> {
let cache_entry = cache_shard.entry(HTTP_REVISION); let cache_entry = cache_shard.entry(HTTP_REVISION);
// Determine the cache control policy for the request.
let cache_control = match client.unmanaged.connectivity() { let cache_control = match client.unmanaged.connectivity() {
Connectivity::Online => CacheControl::from( Connectivity::Online => {
if let Some(header) = index.and_then(|index| {
self.build_context
.locations()
.artifact_cache_control_for(index)
}) {
CacheControl::Override(header)
} else {
CacheControl::from(
self.build_context self.build_context
.cache() .cache()
.freshness(&cache_entry, source.name(), source.source_tree()) .freshness(&cache_entry, source.name(), source.source_tree())
.map_err(Error::CacheRead)?, .map_err(Error::CacheRead)?,
), )
}
}
Connectivity::Offline => CacheControl::AllowStale, Connectivity::Offline => CacheControl::AllowStale,
}; };
@ -750,6 +781,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
.skip_cache_with_retry( .skip_cache_with_retry(
Self::request(DisplaySafeUrl::from(url.clone()), client)?, Self::request(DisplaySafeUrl::from(url.clone()), client)?,
&cache_entry, &cache_entry,
cache_control,
download, download,
) )
.await .await
@ -2056,6 +2088,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
source: &BuildableSource<'_>, source: &BuildableSource<'_>,
ext: SourceDistExtension, ext: SourceDistExtension,
url: &Url, url: &Url,
index: Option<&IndexUrl>,
entry: &CacheEntry, entry: &CacheEntry,
revision: Revision, revision: Revision,
hashes: HashPolicy<'_>, hashes: HashPolicy<'_>,
@ -2063,6 +2096,28 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
) -> Result<Revision, Error> { ) -> Result<Revision, Error> {
warn!("Re-downloading missing source distribution: {source}"); warn!("Re-downloading missing source distribution: {source}");
let cache_entry = entry.shard().entry(HTTP_REVISION); let cache_entry = entry.shard().entry(HTTP_REVISION);
// Determine the cache control policy for the request.
let cache_control = match client.unmanaged.connectivity() {
Connectivity::Online => {
if let Some(header) = index.and_then(|index| {
self.build_context
.locations()
.artifact_cache_control_for(index)
}) {
CacheControl::Override(header)
} else {
CacheControl::from(
self.build_context
.cache()
.freshness(&cache_entry, source.name(), source.source_tree())
.map_err(Error::CacheRead)?,
)
}
}
Connectivity::Offline => CacheControl::AllowStale,
};
let download = |response| { let download = |response| {
async { async {
// Take the union of the requested and existing hash algorithms. // Take the union of the requested and existing hash algorithms.
@ -2096,6 +2151,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
.skip_cache_with_retry( .skip_cache_with_retry(
Self::request(DisplaySafeUrl::from(url.clone()), client)?, Self::request(DisplaySafeUrl::from(url.clone()), client)?,
&cache_entry, &cache_entry,
cache_control,
download, download,
) )
.await .await