mirror of
https://github.com/astral-sh/uv.git
synced 2025-11-20 03:49:54 +00:00
Add missing UV_TEST_NO_HTTP_RETRY_DELAY check and better logging (#16313)
The `install_http_retries` test goes from 15s to 0.3s. Additionally, we log the retry delay.
This commit is contained in:
parent
83635a6c45
commit
52cc3c8b94
4 changed files with 26 additions and 14 deletions
|
|
@ -608,7 +608,11 @@ impl BaseClient {
|
||||||
|
|
||||||
/// The [`RetryPolicy`] for the client.
|
/// The [`RetryPolicy`] for the client.
|
||||||
pub fn retry_policy(&self) -> ExponentialBackoff {
|
pub fn retry_policy(&self) -> ExponentialBackoff {
|
||||||
ExponentialBackoff::builder().build_with_max_retries(self.retries)
|
let mut builder = ExponentialBackoff::builder();
|
||||||
|
if env::var_os(EnvVars::UV_TEST_NO_HTTP_RETRY_DELAY).is_some() {
|
||||||
|
builder = builder.retry_bounds(Duration::from_millis(0), Duration::from_millis(0));
|
||||||
|
}
|
||||||
|
builder.build_with_max_retries(self.retries)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -687,13 +687,15 @@ impl CachedClient {
|
||||||
let total_retries = past_retries + middleware_retries;
|
let total_retries = past_retries + middleware_retries;
|
||||||
let retry_decision = retry_policy.should_retry(start_time, total_retries);
|
let retry_decision = retry_policy.should_retry(start_time, total_retries);
|
||||||
if let reqwest_retry::RetryDecision::Retry { execute_after } = retry_decision {
|
if let reqwest_retry::RetryDecision::Retry { execute_after } = retry_decision {
|
||||||
debug!(
|
|
||||||
"Transient failure while handling response from {}; retrying...",
|
|
||||||
req.url(),
|
|
||||||
);
|
|
||||||
let duration = execute_after
|
let duration = execute_after
|
||||||
.duration_since(SystemTime::now())
|
.duration_since(SystemTime::now())
|
||||||
.unwrap_or_else(|_| Duration::default());
|
.unwrap_or_else(|_| Duration::default());
|
||||||
|
|
||||||
|
debug!(
|
||||||
|
"Transient failure while handling response from {}; retrying after {:.1}s...",
|
||||||
|
req.url(),
|
||||||
|
duration.as_secs_f32(),
|
||||||
|
);
|
||||||
tokio::time::sleep(duration).await;
|
tokio::time::sleep(duration).await;
|
||||||
past_retries += 1;
|
past_retries += 1;
|
||||||
continue;
|
continue;
|
||||||
|
|
@ -745,13 +747,14 @@ impl CachedClient {
|
||||||
let total_retries = past_retries + middleware_retries;
|
let total_retries = past_retries + middleware_retries;
|
||||||
let retry_decision = retry_policy.should_retry(start_time, total_retries);
|
let retry_decision = retry_policy.should_retry(start_time, total_retries);
|
||||||
if let reqwest_retry::RetryDecision::Retry { execute_after } = retry_decision {
|
if let reqwest_retry::RetryDecision::Retry { execute_after } = retry_decision {
|
||||||
debug!(
|
|
||||||
"Transient failure while handling response from {}; retrying...",
|
|
||||||
req.url(),
|
|
||||||
);
|
|
||||||
let duration = execute_after
|
let duration = execute_after
|
||||||
.duration_since(SystemTime::now())
|
.duration_since(SystemTime::now())
|
||||||
.unwrap_or_else(|_| Duration::default());
|
.unwrap_or_else(|_| Duration::default());
|
||||||
|
debug!(
|
||||||
|
"Transient failure while handling response from {}; retrying after {}s...",
|
||||||
|
req.url(),
|
||||||
|
duration.as_secs(),
|
||||||
|
);
|
||||||
tokio::time::sleep(duration).await;
|
tokio::time::sleep(duration).await;
|
||||||
past_retries += 1;
|
past_retries += 1;
|
||||||
continue;
|
continue;
|
||||||
|
|
|
||||||
|
|
@ -409,11 +409,15 @@ pub async fn upload(
|
||||||
if UvRetryableStrategy.handle(&result) == Some(Retryable::Transient) {
|
if UvRetryableStrategy.handle(&result) == Some(Retryable::Transient) {
|
||||||
let retry_decision = retry_policy.should_retry(start_time, n_past_retries);
|
let retry_decision = retry_policy.should_retry(start_time, n_past_retries);
|
||||||
if let reqwest_retry::RetryDecision::Retry { execute_after } = retry_decision {
|
if let reqwest_retry::RetryDecision::Retry { execute_after } = retry_decision {
|
||||||
warn_user!("Transient failure while handling response for {registry}; retrying...");
|
|
||||||
reporter.on_upload_complete(idx);
|
reporter.on_upload_complete(idx);
|
||||||
let duration = execute_after
|
let duration = execute_after
|
||||||
.duration_since(SystemTime::now())
|
.duration_since(SystemTime::now())
|
||||||
.unwrap_or_else(|_| Duration::default());
|
.unwrap_or_else(|_| Duration::default());
|
||||||
|
warn_user!(
|
||||||
|
"Transient failure while handling response for {}; retrying after {}s...",
|
||||||
|
registry,
|
||||||
|
duration.as_secs()
|
||||||
|
);
|
||||||
tokio::time::sleep(duration).await;
|
tokio::time::sleep(duration).await;
|
||||||
n_past_retries += 1;
|
n_past_retries += 1;
|
||||||
continue;
|
continue;
|
||||||
|
|
|
||||||
|
|
@ -973,13 +973,14 @@ impl ManagedPythonDownload {
|
||||||
if let reqwest_retry::RetryDecision::Retry { execute_after } =
|
if let reqwest_retry::RetryDecision::Retry { execute_after } =
|
||||||
retry_decision
|
retry_decision
|
||||||
{
|
{
|
||||||
debug!(
|
|
||||||
"Transient failure while handling response for {}; retrying...",
|
|
||||||
self.key()
|
|
||||||
);
|
|
||||||
let duration = execute_after
|
let duration = execute_after
|
||||||
.duration_since(SystemTime::now())
|
.duration_since(SystemTime::now())
|
||||||
.unwrap_or_else(|_| Duration::default());
|
.unwrap_or_else(|_| Duration::default());
|
||||||
|
debug!(
|
||||||
|
"Transient failure while handling response for {}; retrying after {}s...",
|
||||||
|
self.key(),
|
||||||
|
duration.as_secs()
|
||||||
|
);
|
||||||
tokio::time::sleep(duration).await;
|
tokio::time::sleep(duration).await;
|
||||||
retried_here = true;
|
retried_here = true;
|
||||||
continue; // Retry.
|
continue; // Retry.
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue