Bump MSRV to 1.85 and Edition 2024 (#13516)

## Summary

Builds on https://github.com/astral-sh/uv/pull/11724.

Closes https://github.com/astral-sh/uv/issues/13476.
This commit is contained in:
Charlie Marsh 2025-05-18 19:38:43 -04:00 committed by GitHub
parent cc6e766232
commit c5032aee80
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
240 changed files with 726 additions and 737 deletions

View file

@ -11,8 +11,8 @@ exclude = [
resolver = "2" resolver = "2"
[workspace.package] [workspace.package]
edition = "2021" edition = "2024"
rust-version = "1.84" rust-version = "1.85"
homepage = "https://pypi.org/project/uv/" homepage = "https://pypi.org/project/uv/"
documentation = "https://pypi.org/project/uv/" documentation = "https://pypi.org/project/uv/"
repository = "https://github.com/astral-sh/uv" repository = "https://github.com/astral-sh/uv"

View file

@ -1,7 +1,7 @@
[package] [package]
name = "uv-auth" name = "uv-auth"
version = "0.0.1" version = "0.0.1"
edition = "2021" edition = { workspace = true }
[lib] [lib]
doctest = false doctest = false

View file

@ -10,8 +10,8 @@ use url::Url;
use uv_once_map::OnceMap; use uv_once_map::OnceMap;
use crate::credentials::{Credentials, Username};
use crate::Realm; use crate::Realm;
use crate::credentials::{Credentials, Username};
type FxOnceMap<K, V> = OnceMap<K, V, BuildHasherDefault<FxHasher>>; type FxOnceMap<K, V> = OnceMap<K, V, BuildHasherDefault<FxHasher>>;

View file

@ -5,8 +5,8 @@ use std::borrow::Cow;
use std::fmt; use std::fmt;
use netrc::Netrc; use netrc::Netrc;
use reqwest::header::HeaderValue;
use reqwest::Request; use reqwest::Request;
use reqwest::header::HeaderValue;
use std::io::Read; use std::io::Read;
use std::io::Write; use std::io::Write;
use url::Url; use url::Url;

View file

@ -4,11 +4,11 @@ use http::{Extensions, StatusCode};
use url::Url; use url::Url;
use crate::{ use crate::{
CREDENTIALS_CACHE, CredentialsCache, KeyringProvider,
cache::FetchUrl, cache::FetchUrl,
credentials::{Credentials, Username}, credentials::{Credentials, Username},
index::{AuthPolicy, Indexes}, index::{AuthPolicy, Indexes},
realm::Realm, realm::Realm,
CredentialsCache, KeyringProvider, CREDENTIALS_CACHE,
}; };
use anyhow::{anyhow, format_err}; use anyhow::{anyhow, format_err};
use netrc::Netrc; use netrc::Netrc;
@ -591,8 +591,8 @@ mod tests {
use wiremock::matchers::{basic_auth, method, path_regex}; use wiremock::matchers::{basic_auth, method, path_regex};
use wiremock::{Mock, MockServer, ResponseTemplate}; use wiremock::{Mock, MockServer, ResponseTemplate};
use crate::credentials::Password;
use crate::Index; use crate::Index;
use crate::credentials::Password;
use super::*; use super::*;
@ -1097,7 +1097,10 @@ mod tests {
let mut url = base_url.clone(); let mut url = base_url.clone();
url.set_username("other_user").unwrap(); url.set_username("other_user").unwrap();
assert!( assert!(
matches!(client.get(url).send().await, Err(reqwest_middleware::Error::Middleware(_))), matches!(
client.get(url).send().await,
Err(reqwest_middleware::Error::Middleware(_))
),
"If the username does not match, a password should not be fetched, and the middleware should fail eagerly since `authenticate = always` is not satisfied" "If the username does not match, a password should not be fetched, and the middleware should fail eagerly since `authenticate = always` is not satisfied"
); );
@ -1614,8 +1617,8 @@ mod tests {
/// credentials for _every_ request URL at the cost of inconsistent behavior when /// credentials for _every_ request URL at the cost of inconsistent behavior when
/// credentials are not scoped to a realm. /// credentials are not scoped to a realm.
#[test(tokio::test)] #[test(tokio::test)]
async fn test_credentials_from_keyring_mixed_authentication_in_realm_same_username( async fn test_credentials_from_keyring_mixed_authentication_in_realm_same_username()
) -> Result<(), Error> { -> Result<(), Error> {
let username = "user"; let username = "user";
let password_1 = "password1"; let password_1 = "password1";
let password_2 = "password2"; let password_2 = "password2";
@ -1714,8 +1717,8 @@ mod tests {
/// where multiple URLs with the same username and realm share the same realm-level /// where multiple URLs with the same username and realm share the same realm-level
/// credentials cache entry. /// credentials cache entry.
#[test(tokio::test)] #[test(tokio::test)]
async fn test_credentials_from_keyring_mixed_authentication_different_indexes_same_realm( async fn test_credentials_from_keyring_mixed_authentication_different_indexes_same_realm()
) -> Result<(), Error> { -> Result<(), Error> {
let username = "user"; let username = "user";
let password_1 = "password1"; let password_1 = "password1";
let password_2 = "password2"; let password_2 = "password2";
@ -1826,8 +1829,8 @@ mod tests {
/// Demonstrates that when an index' credentials are cached for its realm, we /// Demonstrates that when an index' credentials are cached for its realm, we
/// find those credentials if they're not present in the keyring. /// find those credentials if they're not present in the keyring.
#[test(tokio::test)] #[test(tokio::test)]
async fn test_credentials_from_keyring_shared_authentication_different_indexes_same_realm( async fn test_credentials_from_keyring_shared_authentication_different_indexes_same_realm()
) -> Result<(), Error> { -> Result<(), Error> {
let username = "user"; let username = "user";
let password = "password"; let password = "password";

View file

@ -1,7 +1,7 @@
use std::str::FromStr; use std::str::FromStr;
use uv_bench::criterion::{ use uv_bench::criterion::{
criterion_group, criterion_main, measurement::WallTime, BenchmarkId, Criterion, Throughput, BenchmarkId, Criterion, Throughput, criterion_group, criterion_main, measurement::WallTime,
}; };
use uv_distribution_filename::WheelFilename; use uv_distribution_filename::WheelFilename;
use uv_platform_tags::{AbiTag, LanguageTag, PlatformTag, Tags}; use uv_platform_tags::{AbiTag, LanguageTag, PlatformTag, Tags};

View file

@ -1,7 +1,7 @@
use std::str::FromStr; use std::str::FromStr;
use uv_bench::criterion::black_box; use uv_bench::criterion::black_box;
use uv_bench::criterion::{criterion_group, criterion_main, measurement::WallTime, Criterion}; use uv_bench::criterion::{Criterion, criterion_group, criterion_main, measurement::WallTime};
use uv_cache::Cache; use uv_cache::Cache;
use uv_client::RegistryClientBuilder; use uv_client::RegistryClientBuilder;
use uv_distribution_types::Requirement; use uv_distribution_types::Requirement;

View file

@ -4,7 +4,7 @@ mod settings;
mod source_dist; mod source_dist;
mod wheel; mod wheel;
pub use metadata::{check_direct_build, PyProjectToml}; pub use metadata::{PyProjectToml, check_direct_build};
pub use settings::{BuildBackendSettings, WheelDataIncludes}; pub use settings::{BuildBackendSettings, WheelDataIncludes};
pub use source_dist::{build_source_dist, list_source_dist}; pub use source_dist::{build_source_dist, list_source_dist};
pub use wheel::{build_editable, build_wheel, list_wheel, metadata}; pub use wheel::{build_editable, build_wheel, list_wheel, metadata};

View file

@ -1,10 +1,10 @@
use crate::metadata::DEFAULT_EXCLUDES; use crate::metadata::DEFAULT_EXCLUDES;
use crate::wheel::build_exclude_matcher; use crate::wheel::build_exclude_matcher;
use crate::{ use crate::{
find_roots, BuildBackendSettings, DirectoryWriter, Error, FileList, ListWriter, PyProjectToml, BuildBackendSettings, DirectoryWriter, Error, FileList, ListWriter, PyProjectToml, find_roots,
}; };
use flate2::write::GzEncoder;
use flate2::Compression; use flate2::Compression;
use flate2::write::GzEncoder;
use fs_err::File; use fs_err::File;
use globset::{Glob, GlobSet}; use globset::{Glob, GlobSet};
use std::io; use std::io;

View file

@ -17,8 +17,8 @@ use uv_warnings::warn_user_once;
use crate::metadata::DEFAULT_EXCLUDES; use crate::metadata::DEFAULT_EXCLUDES;
use crate::{ use crate::{
find_module_root, find_roots, BuildBackendSettings, DirectoryWriter, Error, FileList, BuildBackendSettings, DirectoryWriter, Error, FileList, ListWriter, PyProjectToml,
ListWriter, PyProjectToml, find_module_root, find_roots,
}; };
/// Build a wheel from the source tree and place it in the output directory. /// Build a wheel from the source tree and place it in the output directory.

View file

@ -19,13 +19,13 @@ use fs_err as fs;
use indoc::formatdoc; use indoc::formatdoc;
use itertools::Itertools; use itertools::Itertools;
use rustc_hash::FxHashMap; use rustc_hash::FxHashMap;
use serde::de::{value, IntoDeserializer, SeqAccess, Visitor}; use serde::de::{IntoDeserializer, SeqAccess, Visitor, value};
use serde::{de, Deserialize, Deserializer}; use serde::{Deserialize, Deserializer, de};
use tempfile::TempDir; use tempfile::TempDir;
use tokio::io::AsyncBufReadExt; use tokio::io::AsyncBufReadExt;
use tokio::process::Command; use tokio::process::Command;
use tokio::sync::{Mutex, Semaphore}; use tokio::sync::{Mutex, Semaphore};
use tracing::{debug, info_span, instrument, Instrument}; use tracing::{Instrument, debug, info_span, instrument};
use uv_configuration::{BuildKind, BuildOutput, ConfigSettings, SourceStrategy}; use uv_configuration::{BuildKind, BuildOutput, ConfigSettings, SourceStrategy};
use uv_distribution::BuildRequires; use uv_distribution::BuildRequires;

View file

@ -1,4 +1,4 @@
use anyhow::{bail, Context, Result}; use anyhow::{Context, Result, bail};
use std::env; use std::env;
use std::io::Write; use std::io::Write;
use std::path::PathBuf; use std::path::PathBuf;

View file

@ -2,8 +2,8 @@ use std::borrow::Cow;
use std::collections::{BTreeMap, BTreeSet}; use std::collections::{BTreeMap, BTreeSet};
use std::hash::{Hash, Hasher}; use std::hash::{Hash, Hasher};
use std::num::{ use std::num::{
NonZeroI128, NonZeroI16, NonZeroI32, NonZeroI64, NonZeroI8, NonZeroU128, NonZeroU16, NonZeroI8, NonZeroI16, NonZeroI32, NonZeroI64, NonZeroI128, NonZeroU8, NonZeroU16, NonZeroU32,
NonZeroU32, NonZeroU64, NonZeroU8, NonZeroU64, NonZeroU128,
}; };
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};

View file

@ -11,7 +11,7 @@ use tracing::debug;
pub use archive::ArchiveId; pub use archive::ArchiveId;
use uv_cache_info::Timestamp; use uv_cache_info::Timestamp;
use uv_fs::{cachedir, directories, LockedFile}; use uv_fs::{LockedFile, cachedir, directories};
use uv_normalize::PackageName; use uv_normalize::PackageName;
use uv_pypi_types::ResolutionMetadata; use uv_pypi_types::ResolutionMetadata;
@ -19,7 +19,7 @@ pub use crate::by_timestamp::CachedByTimestamp;
#[cfg(feature = "clap")] #[cfg(feature = "clap")]
pub use crate::cli::CacheArgs; pub use crate::cli::CacheArgs;
use crate::removal::Remover; use crate::removal::Remover;
pub use crate::removal::{rm_rf, Removal}; pub use crate::removal::{Removal, rm_rf};
pub use crate::wheel::WheelCache; pub use crate::wheel::WheelCache;
use crate::wheel::WheelCacheKind; use crate::wheel::WheelCacheKind;
@ -1194,11 +1194,7 @@ impl Refresh {
pub fn combine(self, other: Refresh) -> Self { pub fn combine(self, other: Refresh) -> Self {
/// Return the maximum of two timestamps. /// Return the maximum of two timestamps.
fn max(a: Timestamp, b: Timestamp) -> Timestamp { fn max(a: Timestamp, b: Timestamp) -> Timestamp {
if a > b { if a > b { a } else { b }
a
} else {
b
}
} }
match (self, other) { match (self, other) {

View file

@ -2,7 +2,7 @@ use std::path::{Path, PathBuf};
use url::Url; use url::Url;
use uv_cache_key::{cache_digest, CanonicalUrl}; use uv_cache_key::{CanonicalUrl, cache_digest};
use uv_distribution_types::IndexUrl; use uv_distribution_types::IndexUrl;
/// Cache wheels and their metadata, both from remote wheels and built from source distributions. /// Cache wheels and their metadata, both from remote wheels and built from source distributions.

View file

@ -1,4 +1,4 @@
use anyhow::{anyhow, Result}; use anyhow::{Result, anyhow};
use clap::{Args, ValueEnum}; use clap::{Args, ValueEnum};
use uv_warnings::warn_user; use uv_warnings::warn_user;

View file

@ -3,9 +3,9 @@ use std::ops::{Deref, DerefMut};
use std::path::PathBuf; use std::path::PathBuf;
use std::str::FromStr; use std::str::FromStr;
use anyhow::{anyhow, Result}; use anyhow::{Result, anyhow};
use clap::builder::styling::{AnsiColor, Effects, Style};
use clap::builder::Styles; use clap::builder::Styles;
use clap::builder::styling::{AnsiColor, Effects, Style};
use clap::{Args, Parser, Subcommand}; use clap::{Args, Parser, Subcommand};
use url::Url; use url::Url;

View file

@ -3,7 +3,7 @@
use std::fmt; use std::fmt;
use serde::Serialize; use serde::Serialize;
use uv_pep508::{uv_pep440::Version, PackageName}; use uv_pep508::{PackageName, uv_pep440::Version};
/// Information about the git repository where uv was built from. /// Information about the git repository where uv was built from.
#[derive(Serialize)] #[derive(Serialize)]

View file

@ -1,7 +1,7 @@
[package] [package]
name = "uv-client" name = "uv-client"
version = "0.0.1" version = "0.0.1"
edition = "2021" edition = { workspace = true }
[lib] [lib]
doctest = false doctest = false

View file

@ -25,10 +25,10 @@ use uv_static::EnvVars;
use uv_version::version; use uv_version::version;
use uv_warnings::warn_user_once; use uv_warnings::warn_user_once;
use crate::Connectivity;
use crate::linehaul::LineHaul; use crate::linehaul::LineHaul;
use crate::middleware::OfflineMiddleware; use crate::middleware::OfflineMiddleware;
use crate::tls::read_identity; use crate::tls::read_identity;
use crate::Connectivity;
pub const DEFAULT_RETRIES: u32 = 3; pub const DEFAULT_RETRIES: u32 = 3;

View file

@ -8,17 +8,17 @@ use reqwest_retry::RetryPolicy;
use rkyv::util::AlignedVec; use rkyv::util::AlignedVec;
use serde::de::DeserializeOwned; use serde::de::DeserializeOwned;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use tracing::{debug, info_span, instrument, trace, warn, Instrument}; use tracing::{Instrument, debug, info_span, instrument, trace, warn};
use uv_cache::{CacheEntry, Freshness}; use uv_cache::{CacheEntry, Freshness};
use uv_fs::write_atomic; use uv_fs::write_atomic;
use crate::base_client::is_extended_transient_error;
use crate::BaseClient; use crate::BaseClient;
use crate::base_client::is_extended_transient_error;
use crate::{ use crate::{
Error, ErrorKind,
httpcache::{AfterResponse, BeforeRequest, CachePolicy, CachePolicyBuilder}, httpcache::{AfterResponse, BeforeRequest, CachePolicy, CachePolicyBuilder},
rkyvutil::OwnedArchive, rkyvutil::OwnedArchive,
Error, ErrorKind,
}; };
/// A trait the generalizes (de)serialization at a high level. /// A trait the generalizes (de)serialization at a high level.
@ -230,7 +230,7 @@ impl CachedClient {
CallbackReturn: Future<Output = Result<Payload, CallBackError>>, CallbackReturn: Future<Output = Result<Payload, CallBackError>>,
{ {
let payload = self let payload = self
.get_cacheable(req, cache_entry, cache_control, |resp| async { .get_cacheable(req, cache_entry, cache_control, async |resp| {
let payload = response_callback(resp).await?; let payload = response_callback(resp).await?;
Ok(SerdeCacheable { inner: payload }) Ok(SerdeCacheable { inner: payload })
}) })
@ -359,7 +359,7 @@ impl CachedClient {
let (response, cache_policy) = self.fresh_request(req).await?; let (response, cache_policy) = self.fresh_request(req).await?;
let payload = self let payload = self
.run_response_callback(cache_entry, cache_policy, response, move |resp| async { .run_response_callback(cache_entry, cache_policy, response, async |resp| {
let payload = response_callback(resp).await?; let payload = response_callback(resp).await?;
Ok(SerdeCacheable { inner: payload }) Ok(SerdeCacheable { inner: payload })
}) })
@ -585,7 +585,7 @@ impl CachedClient {
CallbackReturn: Future<Output = Result<Payload, CallBackError>>, CallbackReturn: Future<Output = Result<Payload, CallBackError>>,
{ {
let payload = self let payload = self
.get_cacheable_with_retry(req, cache_entry, cache_control, |resp| async { .get_cacheable_with_retry(req, cache_entry, cache_control, async |resp| {
let payload = response_callback(resp).await?; let payload = response_callback(resp).await?;
Ok(SerdeCacheable { inner: payload }) Ok(SerdeCacheable { inner: payload })
}) })

View file

@ -10,7 +10,7 @@ use uv_normalize::PackageName;
use uv_redacted::redacted_url; use uv_redacted::redacted_url;
use crate::middleware::OfflineError; use crate::middleware::OfflineError;
use crate::{html, FlatIndexError}; use crate::{FlatIndexError, html};
#[derive(Debug, thiserror::Error)] #[derive(Debug, thiserror::Error)]
#[error(transparent)] #[error(transparent)]
@ -46,7 +46,7 @@ impl Error {
/// Returns `true` if this error corresponds to an I/O "not found" error. /// Returns `true` if this error corresponds to an I/O "not found" error.
pub(crate) fn is_file_not_exists(&self) -> bool { pub(crate) fn is_file_not_exists(&self) -> bool {
let ErrorKind::Io(ref err) = &*self.kind else { let ErrorKind::Io(err) = &*self.kind else {
return false; return false;
}; };
matches!(err.kind(), std::io::ErrorKind::NotFound) matches!(err.kind(), std::io::ErrorKind::NotFound)
@ -246,7 +246,9 @@ pub enum ErrorKind {
#[error("Writing to cache archive failed: {0}")] #[error("Writing to cache archive failed: {0}")]
ArchiveWrite(String), ArchiveWrite(String),
#[error("Network connectivity is disabled, but the requested data wasn't found in the cache for: `{0}`")] #[error(
"Network connectivity is disabled, but the requested data wasn't found in the cache for: `{0}`"
)]
Offline(String), Offline(String),
} }

View file

@ -2,7 +2,7 @@ use std::path::{Path, PathBuf};
use futures::{FutureExt, StreamExt}; use futures::{FutureExt, StreamExt};
use reqwest::Response; use reqwest::Response;
use tracing::{debug, info_span, warn, Instrument}; use tracing::{Instrument, debug, info_span, warn};
use url::Url; use url::Url;
use uv_cache::{Cache, CacheBucket}; use uv_cache::{Cache, CacheBucket};
@ -113,7 +113,7 @@ impl<'a> FlatIndexClient<'a> {
indexes: impl Iterator<Item = &IndexUrl>, indexes: impl Iterator<Item = &IndexUrl>,
) -> Result<FlatIndexEntries, FlatIndexError> { ) -> Result<FlatIndexEntries, FlatIndexError> {
let mut fetches = futures::stream::iter(indexes) let mut fetches = futures::stream::iter(indexes)
.map(|index| async move { .map(async |index| {
let entries = self.fetch_index(index).await?; let entries = self.fetch_index(index).await?;
if entries.is_empty() { if entries.is_empty() {
warn!("No packages found in `--find-links` entry: {}", index); warn!("No packages found in `--find-links` entry: {}", index);

View file

@ -1,6 +1,6 @@
pub use base_client::{ pub use base_client::{
is_extended_transient_error, AuthIntegration, BaseClient, BaseClientBuilder, ExtraMiddleware, AuthIntegration, BaseClient, BaseClientBuilder, DEFAULT_RETRIES, ExtraMiddleware,
UvRetryableStrategy, DEFAULT_RETRIES, UvRetryableStrategy, is_extended_transient_error,
}; };
pub use cached_client::{CacheControl, CachedClient, CachedClientError, DataWithCachePolicy}; pub use cached_client::{CacheControl, CachedClient, CachedClientError, DataWithCachePolicy};
pub use error::{Error, ErrorKind, WrappedReqwestError}; pub use error::{Error, ErrorKind, WrappedReqwestError};

View file

@ -20,7 +20,11 @@ impl OfflineError {
impl std::fmt::Display for OfflineError { impl std::fmt::Display for OfflineError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "Network connectivity is disabled, but the requested data wasn't found in the cache for: `{}`", self.url) write!(
f,
"Network connectivity is disabled, but the requested data wasn't found in the cache for: `{}`",
self.url
)
} }
} }

View file

@ -13,7 +13,7 @@ use reqwest::{Proxy, Response};
use reqwest_middleware::ClientWithMiddleware; use reqwest_middleware::ClientWithMiddleware;
use rustc_hash::FxHashMap; use rustc_hash::FxHashMap;
use tokio::sync::{Mutex, Semaphore}; use tokio::sync::{Mutex, Semaphore};
use tracing::{debug, info_span, instrument, trace, warn, Instrument}; use tracing::{Instrument, debug, info_span, instrument, trace, warn};
use url::Url; use url::Url;
use uv_auth::Indexes; use uv_auth::Indexes;
@ -352,7 +352,9 @@ impl RegistryClient {
// The search failed because of an HTTP status code that we don't ignore for // The search failed because of an HTTP status code that we don't ignore for
// this index. We end our search here. // this index. We end our search here.
SimpleMetadataSearchOutcome::StatusCodeFailure(status_code) => { SimpleMetadataSearchOutcome::StatusCodeFailure(status_code) => {
debug!("Indexes search failed because of status code failure: {status_code}"); debug!(
"Indexes search failed because of status code failure: {status_code}"
);
break; break;
} }
} }
@ -371,7 +373,7 @@ impl RegistryClient {
// Otherwise, fetch concurrently. // Otherwise, fetch concurrently.
IndexStrategy::UnsafeBestMatch | IndexStrategy::UnsafeFirstMatch => { IndexStrategy::UnsafeBestMatch | IndexStrategy::UnsafeFirstMatch => {
results = futures::stream::iter(indexes) results = futures::stream::iter(indexes)
.map(|index| async move { .map(async |index| {
let _permit = download_concurrency.acquire().await; let _permit = download_concurrency.acquire().await;
match index.format { match index.format {
IndexFormat::Simple => { IndexFormat::Simple => {
@ -400,12 +402,10 @@ impl RegistryClient {
} }
}) })
.buffered(8) .buffered(8)
.filter_map(|result: Result<_, Error>| async move { .filter_map(async |result: Result<_, Error>| match result {
match result { Ok((index, Some(metadata))) => Some(Ok((index, metadata))),
Ok((index, Some(metadata))) => Some(Ok((index, metadata))), Ok((_, None)) => None,
Ok((_, None)) => None, Err(err) => Some(Err(err)),
Err(err) => Some(Err(err)),
}
}) })
.try_collect::<Vec<_>>() .try_collect::<Vec<_>>()
.await?; .await?;
@ -800,7 +800,7 @@ impl RegistryClient {
lock_entry.lock().await.map_err(ErrorKind::CacheWrite)? lock_entry.lock().await.map_err(ErrorKind::CacheWrite)?
}; };
let response_callback = |response: Response| async { let response_callback = async |response: Response| {
let bytes = response let bytes = response
.bytes() .bytes()
.await .await
@ -987,11 +987,12 @@ impl RegistryClient {
std::io::Error::new( std::io::Error::new(
std::io::ErrorKind::TimedOut, std::io::ErrorKind::TimedOut,
format!( format!(
"Failed to download distribution due to network timeout. Try increasing UV_HTTP_TIMEOUT (current value: {}s).", self.timeout().as_secs() "Failed to download distribution due to network timeout. Try increasing UV_HTTP_TIMEOUT (current value: {}s).",
self.timeout().as_secs()
), ),
) )
} else { } else {
std::io::Error::new(std::io::ErrorKind::Other, err) std::io::Error::other(err)
} }
} }
} }
@ -1224,7 +1225,7 @@ mod tests {
use uv_normalize::PackageName; use uv_normalize::PackageName;
use uv_pypi_types::{JoinRelativeError, SimpleJson}; use uv_pypi_types::{JoinRelativeError, SimpleJson};
use crate::{html::SimpleHtml, SimpleMetadata, SimpleMetadatum}; use crate::{SimpleMetadata, SimpleMetadatum, html::SimpleHtml};
#[test] #[test]
fn ignore_failing_files() { fn ignore_failing_files() {

View file

@ -12,12 +12,12 @@ serializing and deserializing.
*/ */
use rkyv::{ use rkyv::{
Archive, Deserialize, Portable, Serialize,
api::high::{HighDeserializer, HighSerializer, HighValidator}, api::high::{HighDeserializer, HighSerializer, HighValidator},
bytecheck::CheckBytes, bytecheck::CheckBytes,
rancor, rancor,
ser::allocator::ArenaHandle, ser::allocator::ArenaHandle,
util::AlignedVec, util::AlignedVec,
Archive, Deserialize, Portable, Serialize,
}; };
use crate::{Error, ErrorKind}; use crate::{Error, ErrorKind};

View file

@ -1,5 +1,5 @@
use std::{ use std::{
collections::{btree_map::Entry, BTreeMap}, collections::{BTreeMap, btree_map::Entry},
str::FromStr, str::FromStr,
}; };
use uv_cache_key::CacheKeyHasher; use uv_cache_key::CacheKeyHasher;

View file

@ -1,6 +1,6 @@
use std::{borrow::Cow, sync::Arc}; use std::{borrow::Cow, sync::Arc};
use uv_normalize::{DefaultGroups, GroupName, DEV_DEPENDENCIES}; use uv_normalize::{DEV_DEPENDENCIES, DefaultGroups, GroupName};
/// Manager of all dependency-group decisions and settings history. /// Manager of all dependency-group decisions and settings history.
/// ///

View file

@ -1,7 +1,7 @@
//! Configure rayon and determine thread stack sizes. //! Configure rayon and determine thread stack sizes.
use std::sync::atomic::{AtomicUsize, Ordering};
use std::sync::LazyLock; use std::sync::LazyLock;
use std::sync::atomic::{AtomicUsize, Ordering};
use uv_static::EnvVars; use uv_static::EnvVars;
/// The default minimum stack size for uv threads. /// The default minimum stack size for uv threads.

View file

@ -1,7 +1,7 @@
[package] [package]
name = "uv-console" name = "uv-console"
version = "0.0.1" version = "0.0.1"
edition = "2021" edition = { workspace = true }
description = "Utilities for interacting with the terminal" description = "Utilities for interacting with the terminal"
[lib] [lib]

View file

@ -1,4 +1,4 @@
use console::{measure_text_width, style, Key, Term}; use console::{Key, Term, measure_text_width, style};
use std::{cmp::Ordering, iter}; use std::{cmp::Ordering, iter};
/// Prompt the user for confirmation in the given [`Term`]. /// Prompt the user for confirmation in the given [`Term`].

View file

@ -3,13 +3,13 @@ use std::cmp::max;
use std::path::PathBuf; use std::path::PathBuf;
use anstream::println; use anstream::println;
use anyhow::{bail, Result}; use anyhow::{Result, bail};
use clap::{Command, CommandFactory}; use clap::{Command, CommandFactory};
use itertools::Itertools; use itertools::Itertools;
use pretty_assertions::StrComparison; use pretty_assertions::StrComparison;
use crate::generate_all::Mode;
use crate::ROOT_DIR; use crate::ROOT_DIR;
use crate::generate_all::Mode;
use uv_cli::Cli; use uv_cli::Cli;
@ -353,7 +353,7 @@ mod tests {
use crate::generate_all::Mode; use crate::generate_all::Mode;
use super::{main, Args}; use super::{Args, main};
#[test] #[test]
fn test_generate_cli_reference() -> Result<()> { fn test_generate_cli_reference() -> Result<()> {

View file

@ -7,8 +7,8 @@ use std::path::PathBuf;
use uv_static::EnvVars; use uv_static::EnvVars;
use crate::generate_all::Mode;
use crate::ROOT_DIR; use crate::ROOT_DIR;
use crate::generate_all::Mode;
#[derive(clap::Args)] #[derive(clap::Args)]
pub(crate) struct Args { pub(crate) struct Args {
@ -113,7 +113,7 @@ mod tests {
use crate::generate_all::Mode; use crate::generate_all::Mode;
use super::{main, Args}; use super::{Args, main};
#[test] #[test]
fn test_generate_env_vars_reference() -> Result<()> { fn test_generate_env_vars_reference() -> Result<()> {

View file

@ -1,16 +1,16 @@
use std::path::PathBuf; use std::path::PathBuf;
use anstream::println; use anstream::println;
use anyhow::{bail, Result}; use anyhow::{Result, bail};
use pretty_assertions::StrComparison; use pretty_assertions::StrComparison;
use schemars::{schema_for, JsonSchema}; use schemars::{JsonSchema, schema_for};
use serde::Deserialize; use serde::Deserialize;
use uv_settings::Options as SettingsOptions; use uv_settings::Options as SettingsOptions;
use uv_workspace::pyproject::ToolUv as WorkspaceOptions; use uv_workspace::pyproject::ToolUv as WorkspaceOptions;
use crate::generate_all::Mode;
use crate::ROOT_DIR; use crate::ROOT_DIR;
use crate::generate_all::Mode;
#[derive(Deserialize, JsonSchema)] #[derive(Deserialize, JsonSchema)]
#[serde(deny_unknown_fields)] #[serde(deny_unknown_fields)]
@ -118,7 +118,7 @@ mod tests {
use crate::generate_all::Mode; use crate::generate_all::Mode;
use super::{main, Args}; use super::{Args, main};
#[test] #[test]
fn test_generate_json_schema() -> Result<()> { fn test_generate_json_schema() -> Result<()> {

View file

@ -5,7 +5,7 @@ use std::fmt::Write;
use std::path::PathBuf; use std::path::PathBuf;
use anstream::println; use anstream::println;
use anyhow::{bail, Result}; use anyhow::{Result, bail};
use itertools::Itertools; use itertools::Itertools;
use pretty_assertions::StrComparison; use pretty_assertions::StrComparison;
use schemars::JsonSchema; use schemars::JsonSchema;
@ -16,8 +16,8 @@ use uv_options_metadata::{OptionField, OptionSet, OptionsMetadata, Visit};
use uv_settings::Options as SettingsOptions; use uv_settings::Options as SettingsOptions;
use uv_workspace::pyproject::ToolUv as WorkspaceOptions; use uv_workspace::pyproject::ToolUv as WorkspaceOptions;
use crate::generate_all::Mode;
use crate::ROOT_DIR; use crate::ROOT_DIR;
use crate::generate_all::Mode;
#[derive(Deserialize, JsonSchema, OptionsMetadata)] #[derive(Deserialize, JsonSchema, OptionsMetadata)]
#[serde(deny_unknown_fields)] #[serde(deny_unknown_fields)]
@ -398,7 +398,7 @@ mod tests {
use crate::generate_all::Mode; use crate::generate_all::Mode;
use super::{main, Args}; use super::{Args, main};
#[test] #[test]
fn test_generate_options_reference() -> Result<()> { fn test_generate_options_reference() -> Result<()> {

View file

@ -7,8 +7,8 @@ use std::time::Instant;
use anstream::eprintln; use anstream::eprintln;
use owo_colors::OwoColorize; use owo_colors::OwoColorize;
use tracing::debug; use tracing::debug;
use tracing_durations_export::plot::PlotConfig;
use tracing_durations_export::DurationsLayerBuilder; use tracing_durations_export::DurationsLayerBuilder;
use tracing_durations_export::plot::PlotConfig;
use tracing_subscriber::filter::Directive; use tracing_subscriber::filter::Directive;
use tracing_subscriber::layer::SubscriberExt; use tracing_subscriber::layer::SubscriberExt;
use tracing_subscriber::util::SubscriberInitExt; use tracing_subscriber::util::SubscriberInitExt;

View file

@ -2,10 +2,10 @@
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};
use anyhow::{anyhow, Result}; use anyhow::{Result, anyhow};
use clap::Parser; use clap::Parser;
use poloto::build; use poloto::build;
use resvg::usvg_text_layout::{fontdb, TreeTextToPath}; use resvg::usvg_text_layout::{TreeTextToPath, fontdb};
use serde::Deserialize; use serde::Deserialize;
use tagu::prelude::*; use tagu::prelude::*;

View file

@ -1,7 +1,7 @@
use std::str::FromStr; use std::str::FromStr;
use anstream::println; use anstream::println;
use anyhow::{bail, Result}; use anyhow::{Result, bail};
use clap::Parser; use clap::Parser;
use uv_cache::{Cache, CacheArgs}; use uv_cache::{Cache, CacheArgs};

View file

@ -86,11 +86,7 @@ pub fn legacy_user_state_dir() -> Option<PathBuf> {
/// Return a [`PathBuf`] if the given [`OsString`] is an absolute path. /// Return a [`PathBuf`] if the given [`OsString`] is an absolute path.
fn parse_path(path: OsString) -> Option<PathBuf> { fn parse_path(path: OsString) -> Option<PathBuf> {
let path = PathBuf::from(path); let path = PathBuf::from(path);
if path.is_absolute() { if path.is_absolute() { Some(path) } else { None }
Some(path)
} else {
None
}
} }
/// Returns the path to the user configuration directory. /// Returns the path to the user configuration directory.

View file

@ -226,11 +226,13 @@ mod tests {
#[test] #[test]
fn name_too_long() { fn name_too_long() {
assert!(SourceDistFilename::parse( assert!(
"foo.zip", SourceDistFilename::parse(
SourceDistExtension::Zip, "foo.zip",
&PackageName::from_str("foo-lib").unwrap() SourceDistExtension::Zip,
) &PackageName::from_str("foo-lib").unwrap()
.is_err()); )
.is_err()
);
} }
} }

View file

@ -3,7 +3,7 @@ use std::hash::Hash;
use std::str::FromStr; use std::str::FromStr;
use memchr::memchr; use memchr::memchr;
use serde::{de, Deserialize, Deserializer, Serialize, Serializer}; use serde::{Deserialize, Deserializer, Serialize, Serializer, de};
use thiserror::Error; use thiserror::Error;
use url::Url; use url::Url;

View file

@ -12,6 +12,7 @@ use crate::{
/// A built distribution (wheel) that exists in the local cache. /// A built distribution (wheel) that exists in the local cache.
#[derive(Debug, Clone, Hash, PartialEq, Eq)] #[derive(Debug, Clone, Hash, PartialEq, Eq)]
#[allow(clippy::large_enum_variant)]
pub enum CachedDist { pub enum CachedDist {
/// The distribution exists in a registry, like `PyPI`. /// The distribution exists in a registry, like `PyPI`.
Registry(CachedRegistryDist), Registry(CachedRegistryDist),

View file

@ -1,8 +1,8 @@
use std::collections::VecDeque; use std::collections::VecDeque;
use std::fmt::{Debug, Display, Formatter}; use std::fmt::{Debug, Display, Formatter};
use petgraph::prelude::EdgeRef;
use petgraph::Direction; use petgraph::Direction;
use petgraph::prelude::EdgeRef;
use rustc_hash::FxHashSet; use rustc_hash::FxHashSet;
use version_ranges::Ranges; use version_ranges::Ranges;

View file

@ -10,7 +10,7 @@ use rustc_hash::{FxHashMap, FxHashSet};
use thiserror::Error; use thiserror::Error;
use url::{ParseError, Url}; use url::{ParseError, Url};
use uv_pep508::{split_scheme, Scheme, VerbatimUrl, VerbatimUrlError}; use uv_pep508::{Scheme, VerbatimUrl, VerbatimUrlError, split_scheme};
use crate::{Index, IndexStatusCodeStrategy, Verbatim}; use crate::{Index, IndexStatusCodeStrategy, Verbatim};

View file

@ -7,12 +7,12 @@ use thiserror::Error;
use url::Url; use url::Url;
use uv_distribution_filename::DistExtension; use uv_distribution_filename::DistExtension;
use uv_fs::{relative_to, PortablePath, PortablePathBuf, CWD}; use uv_fs::{CWD, PortablePath, PortablePathBuf, relative_to};
use uv_git_types::{GitOid, GitReference, GitUrl, GitUrlParseError, OidParseError}; use uv_git_types::{GitOid, GitReference, GitUrl, GitUrlParseError, OidParseError};
use uv_normalize::{ExtraName, GroupName, PackageName}; use uv_normalize::{ExtraName, GroupName, PackageName};
use uv_pep440::VersionSpecifiers; use uv_pep440::VersionSpecifiers;
use uv_pep508::{ use uv_pep508::{
marker, MarkerEnvironment, MarkerTree, RequirementOrigin, VerbatimUrl, VersionOrUrl, MarkerEnvironment, MarkerTree, RequirementOrigin, VerbatimUrl, VersionOrUrl, marker,
}; };
use crate::{IndexMetadata, IndexUrl}; use crate::{IndexMetadata, IndexUrl};

View file

@ -1,4 +1,4 @@
use uv_cache::{ArchiveId, Cache, ARCHIVE_VERSION}; use uv_cache::{ARCHIVE_VERSION, ArchiveId, Cache};
use uv_distribution_filename::WheelFilename; use uv_distribution_filename::WheelFilename;
use uv_distribution_types::Hashed; use uv_distribution_types::Hashed;
use uv_pypi_types::{HashDigest, HashDigests}; use uv_pypi_types::{HashDigest, HashDigests};

View file

@ -10,7 +10,7 @@ use tempfile::TempDir;
use tokio::io::{AsyncRead, AsyncSeekExt, ReadBuf}; use tokio::io::{AsyncRead, AsyncSeekExt, ReadBuf};
use tokio::sync::Semaphore; use tokio::sync::Semaphore;
use tokio_util::compat::FuturesAsyncReadCompatExt; use tokio_util::compat::FuturesAsyncReadCompatExt;
use tracing::{info_span, instrument, warn, Instrument}; use tracing::{Instrument, info_span, instrument, warn};
use url::Url; use url::Url;
use uv_cache::{ArchiveId, CacheBucket, CacheEntry, WheelCache}; use uv_cache::{ArchiveId, CacheBucket, CacheEntry, WheelCache};
@ -97,7 +97,7 @@ impl<'a, Context: BuildContext> DistributionDatabase<'a, Context> {
), ),
) )
} else { } else {
io::Error::new(io::ErrorKind::Other, err) io::Error::other(err)
} }
} }
@ -647,7 +647,7 @@ impl<'a, Context: BuildContext> DistributionDatabase<'a, Context> {
archive archive
} else { } else {
self.client self.client
.managed(|client| async { .managed(async |client| {
client client
.cached_client() .cached_client()
.skip_cache_with_retry(self.request(url)?, &http_entry, download) .skip_cache_with_retry(self.request(url)?, &http_entry, download)
@ -814,7 +814,7 @@ impl<'a, Context: BuildContext> DistributionDatabase<'a, Context> {
archive archive
} else { } else {
self.client self.client
.managed(|client| async { .managed(async |client| {
client client
.cached_client() .cached_client()
.skip_cache_with_retry(self.request(url)?, &http_entry, download) .skip_cache_with_retry(self.request(url)?, &http_entry, download)

View file

@ -8,9 +8,9 @@ use uv_distribution_types::{
use uv_platform_tags::Tags; use uv_platform_tags::Tags;
use uv_types::HashStrategy; use uv_types::HashStrategy;
use crate::index::cached_wheel::CachedWheel;
use crate::source::{HttpRevisionPointer, LocalRevisionPointer, HTTP_REVISION, LOCAL_REVISION};
use crate::Error; use crate::Error;
use crate::index::cached_wheel::CachedWheel;
use crate::source::{HTTP_REVISION, HttpRevisionPointer, LOCAL_REVISION, LocalRevisionPointer};
/// A local index of built distributions for a specific source distribution. /// A local index of built distributions for a specific source distribution.
#[derive(Debug)] #[derive(Debug)]

View file

@ -12,7 +12,7 @@ use uv_platform_tags::Tags;
use uv_types::HashStrategy; use uv_types::HashStrategy;
use crate::index::cached_wheel::CachedWheel; use crate::index::cached_wheel::CachedWheel;
use crate::source::{HttpRevisionPointer, LocalRevisionPointer, HTTP_REVISION, LOCAL_REVISION}; use crate::source::{HTTP_REVISION, HttpRevisionPointer, LOCAL_REVISION, LocalRevisionPointer};
/// An entry in the [`RegistryWheelIndex`]. /// An entry in the [`RegistryWheelIndex`].
#[derive(Debug, Clone, Hash, PartialEq, Eq)] #[derive(Debug, Clone, Hash, PartialEq, Eq)]
@ -64,7 +64,7 @@ impl<'a> RegistryWheelIndex<'a> {
/// Get an entry in the index. /// Get an entry in the index.
fn get_impl(&mut self, name: &'a PackageName) -> &[IndexEntry] { fn get_impl(&mut self, name: &'a PackageName) -> &[IndexEntry] {
let versions = match self.index.entry(name) { (match self.index.entry(name) {
Entry::Occupied(entry) => entry.into_mut(), Entry::Occupied(entry) => entry.into_mut(),
Entry::Vacant(entry) => entry.insert(Self::index( Entry::Vacant(entry) => entry.insert(Self::index(
name, name,
@ -74,8 +74,7 @@ impl<'a> RegistryWheelIndex<'a> {
self.hasher, self.hasher,
self.build_configuration, self.build_configuration,
)), )),
}; }) as _
versions
} }
/// Add a package to the index by reading from the cache. /// Add a package to the index by reading from the cache.

View file

@ -13,10 +13,10 @@ use uv_distribution_types::{
use uv_git_types::{GitReference, GitUrl, GitUrlParseError}; use uv_git_types::{GitReference, GitUrl, GitUrlParseError};
use uv_normalize::{ExtraName, GroupName, PackageName}; use uv_normalize::{ExtraName, GroupName, PackageName};
use uv_pep440::VersionSpecifiers; use uv_pep440::VersionSpecifiers;
use uv_pep508::{looks_like_git_repository, MarkerTree, VerbatimUrl, VersionOrUrl}; use uv_pep508::{MarkerTree, VerbatimUrl, VersionOrUrl, looks_like_git_repository};
use uv_pypi_types::{ConflictItem, ParsedUrlError, VerbatimParsedUrl}; use uv_pypi_types::{ConflictItem, ParsedUrlError, VerbatimParsedUrl};
use uv_workspace::pyproject::{PyProjectToml, Source, Sources};
use uv_workspace::Workspace; use uv_workspace::Workspace;
use uv_workspace::pyproject::{PyProjectToml, Source, Sources};
use crate::metadata::GitWorkspaceMember; use crate::metadata::GitWorkspaceMember;
@ -285,8 +285,7 @@ impl LoweredRequirement {
// relative to main workspace: `../current_workspace/packages/current_project` // relative to main workspace: `../current_workspace/packages/current_project`
let url = VerbatimUrl::from_absolute_path(member.root())?; let url = VerbatimUrl::from_absolute_path(member.root())?;
let install_path = url.to_file_path().map_err(|()| { let install_path = url.to_file_path().map_err(|()| {
LoweringError::RelativeTo(io::Error::new( LoweringError::RelativeTo(io::Error::other(
io::ErrorKind::Other,
"Invalid path in file URL", "Invalid path in file URL",
)) ))
})?; })?;
@ -689,12 +688,9 @@ fn path_source(
RequirementOrigin::Workspace => workspace_root, RequirementOrigin::Workspace => workspace_root,
}; };
let url = VerbatimUrl::from_path(path, base)?.with_given(path.to_string_lossy()); let url = VerbatimUrl::from_path(path, base)?.with_given(path.to_string_lossy());
let install_path = url.to_file_path().map_err(|()| { let install_path = url
LoweringError::RelativeTo(io::Error::new( .to_file_path()
io::ErrorKind::Other, .map_err(|()| LoweringError::RelativeTo(io::Error::other("Invalid path in file URL")))?;
"Invalid path in file URL",
))
})?;
let is_dir = if let Ok(metadata) = install_path.metadata() { let is_dir = if let Ok(metadata) = install_path.metadata() {
metadata.is_dir() metadata.is_dir()

View file

@ -6,14 +6,14 @@ use rustc_hash::FxHashSet;
use uv_configuration::SourceStrategy; use uv_configuration::SourceStrategy;
use uv_distribution_types::{IndexLocations, Requirement}; use uv_distribution_types::{IndexLocations, Requirement};
use uv_normalize::{ExtraName, GroupName, PackageName, DEV_DEPENDENCIES}; use uv_normalize::{DEV_DEPENDENCIES, ExtraName, GroupName, PackageName};
use uv_pep508::MarkerTree; use uv_pep508::MarkerTree;
use uv_workspace::dependency_groups::FlatDependencyGroups; use uv_workspace::dependency_groups::FlatDependencyGroups;
use uv_workspace::pyproject::{Sources, ToolUvSources}; use uv_workspace::pyproject::{Sources, ToolUvSources};
use uv_workspace::{DiscoveryOptions, MemberDiscovery, ProjectWorkspace, WorkspaceCache}; use uv_workspace::{DiscoveryOptions, MemberDiscovery, ProjectWorkspace, WorkspaceCache};
use crate::metadata::{GitWorkspaceMember, LoweredRequirement, MetadataError};
use crate::Metadata; use crate::Metadata;
use crate::metadata::{GitWorkspaceMember, LoweredRequirement, MetadataError};
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct RequiresDist { pub struct RequiresDist {
@ -466,8 +466,8 @@ mod test {
use uv_workspace::pyproject::PyProjectToml; use uv_workspace::pyproject::PyProjectToml;
use uv_workspace::{DiscoveryOptions, ProjectWorkspace, WorkspaceCache}; use uv_workspace::{DiscoveryOptions, ProjectWorkspace, WorkspaceCache};
use crate::metadata::requires_dist::FlatRequiresDist;
use crate::RequiresDist; use crate::RequiresDist;
use crate::metadata::requires_dist::FlatRequiresDist;
async fn requires_dist_from_pyproject_toml(contents: &str) -> anyhow::Result<RequiresDist> { async fn requires_dist_from_pyproject_toml(contents: &str) -> anyhow::Result<RequiresDist> {
let pyproject_toml = PyProjectToml::from_string(contents.to_string())?; let pyproject_toml = PyProjectToml::from_string(contents.to_string())?;

View file

@ -18,7 +18,7 @@ use fs_err::tokio as fs;
use futures::{FutureExt, TryStreamExt}; use futures::{FutureExt, TryStreamExt};
use reqwest::{Response, StatusCode}; use reqwest::{Response, StatusCode};
use tokio_util::compat::FuturesAsyncReadCompatExt; use tokio_util::compat::FuturesAsyncReadCompatExt;
use tracing::{debug, info_span, instrument, warn, Instrument}; use tracing::{Instrument, debug, info_span, instrument, warn};
use url::Url; use url::Url;
use zip::ZipArchive; use zip::ZipArchive;
@ -39,7 +39,7 @@ use uv_fs::{rename_with_retry, write_atomic};
use uv_git_types::{GitHubRepository, GitOid}; use uv_git_types::{GitHubRepository, GitOid};
use uv_metadata::read_archive_metadata; use uv_metadata::read_archive_metadata;
use uv_normalize::PackageName; use uv_normalize::PackageName;
use uv_pep440::{release_specifiers_to_ranges, Version}; use uv_pep440::{Version, release_specifiers_to_ranges};
use uv_platform_tags::Tags; use uv_platform_tags::Tags;
use uv_pypi_types::{HashAlgorithm, HashDigest, HashDigests, PyProjectToml, ResolutionMetadata}; use uv_pypi_types::{HashAlgorithm, HashDigest, HashDigests, PyProjectToml, ResolutionMetadata};
use uv_types::{BuildContext, BuildStack, SourceBuildTrait}; use uv_types::{BuildContext, BuildStack, SourceBuildTrait};
@ -736,7 +736,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
Ok(revision) Ok(revision)
} else { } else {
client client
.managed(|client| async move { .managed(async |client| {
client client
.cached_client() .cached_client()
.skip_cache_with_retry( .skip_cache_with_retry(
@ -1925,7 +1925,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
debug!("Attempting to fetch `pyproject.toml` from: {url}"); debug!("Attempting to fetch `pyproject.toml` from: {url}");
let content = client let content = client
.managed(|client| async { .managed(async |client| {
let response = client let response = client
.uncached_client(git.repository()) .uncached_client(git.repository())
.get(&url) .get(&url)
@ -2073,7 +2073,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
.instrument(info_span!("download", source_dist = %source)) .instrument(info_span!("download", source_dist = %source))
}; };
client client
.managed(|client| async move { .managed(async |client| {
client client
.cached_client() .cached_client()
.skip_cache_with_retry( .skip_cache_with_retry(
@ -2107,7 +2107,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
.map_err(Error::CacheWrite)?; .map_err(Error::CacheWrite)?;
let reader = response let reader = response
.bytes_stream() .bytes_stream()
.map_err(|err| std::io::Error::new(std::io::ErrorKind::Other, err)) .map_err(std::io::Error::other)
.into_async_read(); .into_async_read();
// Create a hasher for each hash algorithm. // Create a hasher for each hash algorithm.

View file

@ -1,8 +1,8 @@
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};
use std::sync::{LazyLock, Mutex}; use std::sync::{LazyLock, Mutex};
use crate::vendor::{CloneableSeekableReader, HasLength};
use crate::Error; use crate::Error;
use crate::vendor::{CloneableSeekableReader, HasLength};
use rayon::prelude::*; use rayon::prelude::*;
use rustc_hash::FxHashSet; use rustc_hash::FxHashSet;
use tracing::warn; use tracing::warn;

View file

@ -255,7 +255,7 @@ pub async fn rename_with_retry(
let from = from.as_ref(); let from = from.as_ref();
let to = to.as_ref(); let to = to.as_ref();
let rename = || async { fs_err::rename(from, to) }; let rename = async || fs_err::rename(from, to);
rename rename
.retry(backoff_file_move()) .retry(backoff_file_move())
@ -312,16 +312,13 @@ pub fn with_retry_sync(
}) })
.call() .call()
.map_err(|err| { .map_err(|err| {
std::io::Error::new( std::io::Error::other(format!(
std::io::ErrorKind::Other, "Failed {} {} to {}: {}",
format!( operation_name,
"Failed {} {} to {}: {}", from.display(),
operation_name, to.display(),
from.display(), err
to.display(), ))
err
),
)
}) })
} }
#[cfg(not(windows))] #[cfg(not(windows))]
@ -417,21 +414,15 @@ pub async fn persist_with_retry(
match persisted { match persisted {
Ok(_) => Ok(()), Ok(_) => Ok(()),
Err(PersistRetryError::Persist(error_message)) => Err(std::io::Error::new( Err(PersistRetryError::Persist(error_message)) => Err(std::io::Error::other(format!(
std::io::ErrorKind::Other, "Failed to persist temporary file to {}: {}",
format!( to.display(),
"Failed to persist temporary file to {}: {}", error_message,
to.display(), ))),
error_message, Err(PersistRetryError::LostState) => Err(std::io::Error::other(format!(
), "Failed to retrieve temporary file while trying to persist to {}",
)), to.display()
Err(PersistRetryError::LostState) => Err(std::io::Error::new( ))),
std::io::ErrorKind::Other,
format!(
"Failed to retrieve temporary file while trying to persist to {}",
to.display()
),
)),
} }
} }
#[cfg(not(windows))] #[cfg(not(windows))]
@ -491,21 +482,15 @@ pub fn persist_with_retry_sync(
match persisted { match persisted {
Ok(_) => Ok(()), Ok(_) => Ok(()),
Err(PersistRetryError::Persist(error_message)) => Err(std::io::Error::new( Err(PersistRetryError::Persist(error_message)) => Err(std::io::Error::other(format!(
std::io::ErrorKind::Other, "Failed to persist temporary file to {}: {}",
format!( to.display(),
"Failed to persist temporary file to {}: {}", error_message,
to.display(), ))),
error_message, Err(PersistRetryError::LostState) => Err(std::io::Error::other(format!(
), "Failed to retrieve temporary file while trying to persist to {}",
)), to.display()
Err(PersistRetryError::LostState) => Err(std::io::Error::new( ))),
std::io::ErrorKind::Other,
format!(
"Failed to retrieve temporary file while trying to persist to {}",
to.display()
),
)),
} }
} }
#[cfg(not(windows))] #[cfg(not(windows))]
@ -617,14 +602,11 @@ impl LockedFile {
); );
file.file().lock_exclusive().map_err(|err| { file.file().lock_exclusive().map_err(|err| {
// Not an fs_err method, we need to build our own path context // Not an fs_err method, we need to build our own path context
std::io::Error::new( std::io::Error::other(format!(
std::io::ErrorKind::Other, "Could not acquire lock for `{resource}` at `{}`: {}",
format!( file.path().user_display(),
"Could not acquire lock for `{resource}` at `{}`: {}", err
file.path().user_display(), ))
err
),
)
})?; })?;
debug!("Acquired lock for `{resource}`"); debug!("Acquired lock for `{resource}`");

View file

@ -319,14 +319,11 @@ pub fn relative_to(
.map(|stripped| (stripped, ancestor)) .map(|stripped| (stripped, ancestor))
}) })
.ok_or_else(|| { .ok_or_else(|| {
std::io::Error::new( std::io::Error::other(format!(
std::io::ErrorKind::Other, "Trivial strip failed: {} vs. {}",
format!( path.simplified_display(),
"Trivial strip failed: {} vs. {}", base.simplified_display()
path.simplified_display(), ))
base.simplified_display()
),
)
})?; })?;
// go as many levels up as required // go as many levels up as required

View file

@ -8,7 +8,7 @@ use std::str::{self};
use std::sync::LazyLock; use std::sync::LazyLock;
use anyhow::{Context, Result}; use anyhow::{Context, Result};
use cargo_util::{paths, ProcessBuilder}; use cargo_util::{ProcessBuilder, paths};
use reqwest::StatusCode; use reqwest::StatusCode;
use reqwest_middleware::ClientWithMiddleware; use reqwest_middleware::ClientWithMiddleware;
use tracing::{debug, warn}; use tracing::{debug, warn};

View file

@ -1,4 +1,4 @@
pub use crate::credentials::{store_credentials_from_url, GIT_STORE}; pub use crate::credentials::{GIT_STORE, store_credentials_from_url};
pub use crate::git::GIT; pub use crate::git::GIT;
pub use crate::resolver::{ pub use crate::resolver::{
GitResolver, GitResolverError, RepositoryReference, ResolvedRepositoryReference, GitResolver, GitResolverError, RepositoryReference, ResolvedRepositoryReference,

View file

@ -3,13 +3,13 @@ use std::path::PathBuf;
use std::str::FromStr; use std::str::FromStr;
use std::sync::Arc; use std::sync::Arc;
use dashmap::mapref::one::Ref;
use dashmap::DashMap; use dashmap::DashMap;
use dashmap::mapref::one::Ref;
use fs_err::tokio as fs; use fs_err::tokio as fs;
use reqwest_middleware::ClientWithMiddleware; use reqwest_middleware::ClientWithMiddleware;
use tracing::debug; use tracing::debug;
use uv_cache_key::{cache_digest, RepositoryUrl}; use uv_cache_key::{RepositoryUrl, cache_digest};
use uv_fs::LockedFile; use uv_fs::LockedFile;
use uv_git_types::{GitHubRepository, GitOid, GitReference, GitUrl}; use uv_git_types::{GitHubRepository, GitOid, GitReference, GitUrl};
use uv_version::version; use uv_version::version;

View file

@ -11,12 +11,12 @@ use reqwest_middleware::ClientWithMiddleware;
use tracing::{debug, instrument}; use tracing::{debug, instrument};
use url::Url; use url::Url;
use uv_cache_key::{cache_digest, RepositoryUrl}; use uv_cache_key::{RepositoryUrl, cache_digest};
use uv_git_types::GitUrl; use uv_git_types::GitUrl;
use uv_redacted::redacted_url; use uv_redacted::redacted_url;
use crate::git::GitRemote;
use crate::GIT_STORE; use crate::GIT_STORE;
use crate::git::GitRemote;
/// A remote Git source that can be checked out locally. /// A remote Git source that can be checked out locally.
pub struct GitSource { pub struct GitSource {

View file

@ -1,7 +1,7 @@
use globset::{Glob, GlobSet, GlobSetBuilder}; use globset::{Glob, GlobSet, GlobSetBuilder};
use regex_automata::dfa; use regex_automata::dfa;
use regex_automata::dfa::Automaton; use regex_automata::dfa::Automaton;
use std::path::{Path, MAIN_SEPARATOR, MAIN_SEPARATOR_STR}; use std::path::{MAIN_SEPARATOR, MAIN_SEPARATOR_STR, Path};
use tracing::warn; use tracing::warn;
/// Chosen at a whim -Konsti /// Chosen at a whim -Konsti
@ -32,14 +32,13 @@ impl GlobDirFilter {
.iter() .iter()
.map(|glob| { .map(|glob| {
let main_separator = regex::escape(MAIN_SEPARATOR_STR); let main_separator = regex::escape(MAIN_SEPARATOR_STR);
let regex = glob
.regex() glob.regex()
// We are using a custom DFA builder // We are using a custom DFA builder
.strip_prefix("(?-u)") .strip_prefix("(?-u)")
.expect("a glob is a non-unicode byte regex") .expect("a glob is a non-unicode byte regex")
// Match windows paths if applicable // Match windows paths if applicable
.replace('/', &main_separator); .replace('/', &main_separator)
regex
}) })
.collect(); .collect();
@ -123,9 +122,9 @@ impl GlobDirFilter {
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use crate::glob_dir_filter::GlobDirFilter;
use crate::PortableGlobParser; use crate::PortableGlobParser;
use std::path::{Path, MAIN_SEPARATOR}; use crate::glob_dir_filter::GlobDirFilter;
use std::path::{MAIN_SEPARATOR, Path};
use tempfile::tempdir; use tempfile::tempdir;
use walkdir::WalkDir; use walkdir::WalkDir;

View file

@ -15,8 +15,8 @@ use uv_pypi_types::{DirectUrl, Metadata10};
use crate::linker::{LinkMode, Locks}; use crate::linker::{LinkMode, Locks};
use crate::wheel::{ use crate::wheel::{
dist_info_metadata, find_dist_info, install_data, parse_scripts, parse_wheel_file, LibKind, dist_info_metadata, find_dist_info, install_data, parse_scripts, parse_wheel_file,
read_record_file, write_installer_metadata, write_script_entrypoints, LibKind, read_record_file, write_installer_metadata, write_script_entrypoints,
}; };
use crate::{Error, Layout}; use crate::{Error, Layout};

View file

@ -12,8 +12,8 @@ use uv_pypi_types::Scheme;
pub use install::install_wheel; pub use install::install_wheel;
pub use linker::{LinkMode, Locks}; pub use linker::{LinkMode, Locks};
pub use uninstall::{uninstall_egg, uninstall_legacy_editable, uninstall_wheel, Uninstall}; pub use uninstall::{Uninstall, uninstall_egg, uninstall_legacy_editable, uninstall_wheel};
pub use wheel::{parse_wheel_file, read_record_file, LibKind}; pub use wheel::{LibKind, parse_wheel_file, read_record_file};
mod install; mod install;
mod linker; mod linker;

View file

@ -4,7 +4,7 @@ use rustc_hash::FxHashSet;
use serde::Serialize; use serde::Serialize;
use std::sync::LazyLock; use std::sync::LazyLock;
use crate::{wheel, Error}; use crate::{Error, wheel};
/// A script defining the name of the runnable entrypoint and the module and function that should be /// A script defining the name of the runnable entrypoint and the module and function that should be
/// run. /// run.
@ -109,7 +109,7 @@ pub(crate) fn scripts_from_ini(
#[cfg(test)] #[cfg(test)]
mod test { mod test {
use crate::script::{scripts_from_ini, Script}; use crate::script::{Script, scripts_from_ini};
#[test] #[test]
fn test_valid_script_names() { fn test_valid_script_names() {

View file

@ -6,8 +6,8 @@ use std::sync::{LazyLock, Mutex};
use tracing::trace; use tracing::trace;
use uv_fs::write_atomic_sync; use uv_fs::write_atomic_sync;
use crate::wheel::read_record_file;
use crate::Error; use crate::Error;
use crate::wheel::read_record_file;
/// Uninstall the wheel represented by the given `.dist-info` directory. /// Uninstall the wheel represented by the given `.dist-info` directory.
pub fn uninstall_wheel(dist_info: &Path) -> Result<Uninstall, Error> { pub fn uninstall_wheel(dist_info: &Path) -> Result<Uninstall, Error> {
@ -261,11 +261,7 @@ pub fn uninstall_legacy_editable(egg_link: &Path) -> Result<Uninstall, Error> {
.lines() .lines()
.find_map(|line| { .find_map(|line| {
let line = line.trim(); let line = line.trim();
if line.is_empty() { if line.is_empty() { None } else { Some(line) }
None
} else {
Some(line)
}
}) })
.ok_or_else(|| Error::InvalidEggLink(egg_link.to_path_buf()))?; .ok_or_else(|| Error::InvalidEggLink(egg_link.to_path_buf()))?;

View file

@ -13,7 +13,7 @@ use tracing::{debug, instrument, trace, warn};
use walkdir::WalkDir; use walkdir::WalkDir;
use uv_cache_info::CacheInfo; use uv_cache_info::CacheInfo;
use uv_fs::{persist_with_retry_sync, relative_to, Simplified}; use uv_fs::{Simplified, persist_with_retry_sync, relative_to};
use uv_normalize::PackageName; use uv_normalize::PackageName;
use uv_pypi_types::DirectUrl; use uv_pypi_types::DirectUrl;
use uv_shell::escape_posix_for_single_quotes; use uv_shell::escape_posix_for_single_quotes;
@ -21,7 +21,7 @@ use uv_trampoline_builder::windows_script_launcher;
use uv_warnings::warn_user_once; use uv_warnings::warn_user_once;
use crate::record::RecordEntry; use crate::record::RecordEntry;
use crate::script::{scripts_from_ini, Script}; use crate::script::{Script, scripts_from_ini};
use crate::{Error, Layout}; use crate::{Error, Layout};
/// Wrapper script template function /// Wrapper script template function
@ -210,13 +210,10 @@ pub(crate) fn write_script_entrypoints(
let entrypoint_relative = pathdiff::diff_paths(&entrypoint_absolute, site_packages) let entrypoint_relative = pathdiff::diff_paths(&entrypoint_absolute, site_packages)
.ok_or_else(|| { .ok_or_else(|| {
Error::Io(io::Error::new( Error::Io(io::Error::other(format!(
io::ErrorKind::Other, "Could not find relative path for: {}",
format!( entrypoint_absolute.simplified_display()
"Could not find relative path for: {}", )))
entrypoint_absolute.simplified_display()
),
))
})?; })?;
// Generate the launcher script. // Generate the launcher script.
@ -407,13 +404,10 @@ fn install_script(
let script_absolute = layout.scheme.scripts.join(file.file_name()); let script_absolute = layout.scheme.scripts.join(file.file_name());
let script_relative = let script_relative =
pathdiff::diff_paths(&script_absolute, site_packages).ok_or_else(|| { pathdiff::diff_paths(&script_absolute, site_packages).ok_or_else(|| {
Error::Io(io::Error::new( Error::Io(io::Error::other(format!(
io::ErrorKind::Other, "Could not find relative path for: {}",
format!( script_absolute.simplified_display()
"Could not find relative path for: {}", )))
script_absolute.simplified_display()
),
))
})?; })?;
let path = file.path(); let path = file.path();
@ -723,13 +717,10 @@ pub(crate) fn get_relocatable_executable(
) -> Result<PathBuf, Error> { ) -> Result<PathBuf, Error> {
Ok(if relocatable { Ok(if relocatable {
pathdiff::diff_paths(&executable, &layout.scheme.scripts).ok_or_else(|| { pathdiff::diff_paths(&executable, &layout.scheme.scripts).ok_or_else(|| {
Error::Io(io::Error::new( Error::Io(io::Error::other(format!(
io::ErrorKind::Other, "Could not find relative path for: {}",
format!( executable.simplified_display()
"Could not find relative path for: {}", )))
executable.simplified_display()
),
))
})? })?
} else { } else {
executable executable
@ -896,12 +887,12 @@ mod test {
use assert_fs::prelude::*; use assert_fs::prelude::*;
use indoc::{formatdoc, indoc}; use indoc::{formatdoc, indoc};
use crate::wheel::format_shebang;
use crate::Error; use crate::Error;
use crate::wheel::format_shebang;
use super::{ use super::{
get_script_executable, parse_email_message_file, parse_wheel_file, read_record_file, RecordEntry, Script, get_script_executable, parse_email_message_file, parse_wheel_file,
write_installer_metadata, RecordEntry, Script, read_record_file, write_installer_metadata,
}; };
#[test] #[test]

View file

@ -17,8 +17,10 @@ pub struct Installer<'a> {
link_mode: LinkMode, link_mode: LinkMode,
cache: Option<&'a Cache>, cache: Option<&'a Cache>,
reporter: Option<Arc<dyn Reporter>>, reporter: Option<Arc<dyn Reporter>>,
installer_name: Option<String>, /// The name of the [`Installer`].
installer_metadata: bool, name: Option<String>,
/// The metadata associated with the [`Installer`].
metadata: bool,
} }
impl<'a> Installer<'a> { impl<'a> Installer<'a> {
@ -29,8 +31,8 @@ impl<'a> Installer<'a> {
link_mode: LinkMode::default(), link_mode: LinkMode::default(),
cache: None, cache: None,
reporter: None, reporter: None,
installer_name: Some("uv".to_string()), name: Some("uv".to_string()),
installer_metadata: true, metadata: true,
} }
} }
@ -62,7 +64,7 @@ impl<'a> Installer<'a> {
#[must_use] #[must_use]
pub fn with_installer_name(self, installer_name: Option<String>) -> Self { pub fn with_installer_name(self, installer_name: Option<String>) -> Self {
Self { Self {
installer_name, name: installer_name,
..self ..self
} }
} }
@ -71,7 +73,7 @@ impl<'a> Installer<'a> {
#[must_use] #[must_use]
pub fn with_installer_metadata(self, installer_metadata: bool) -> Self { pub fn with_installer_metadata(self, installer_metadata: bool) -> Self {
Self { Self {
installer_metadata, metadata: installer_metadata,
..self ..self
} }
} }
@ -84,8 +86,8 @@ impl<'a> Installer<'a> {
cache, cache,
link_mode, link_mode,
reporter, reporter,
installer_name, name: installer_name,
installer_metadata, metadata: installer_metadata,
} = self; } = self;
if cache.is_some_and(Cache::is_temporary) { if cache.is_some_and(Cache::is_temporary) {
@ -136,11 +138,11 @@ impl<'a> Installer<'a> {
install( install(
wheels, wheels,
self.venv.interpreter().layout(), self.venv.interpreter().layout(),
self.installer_name, self.name,
self.link_mode, self.link_mode,
self.reporter, self.reporter,
self.venv.relocatable(), self.venv.relocatable(),
self.installer_metadata, self.metadata,
) )
} }
} }

View file

@ -1,9 +1,9 @@
pub use compile::{compile_tree, CompileError}; pub use compile::{CompileError, compile_tree};
pub use installer::{Installer, Reporter as InstallReporter}; pub use installer::{Installer, Reporter as InstallReporter};
pub use plan::{Plan, Planner}; pub use plan::{Plan, Planner};
pub use preparer::{Error as PrepareError, Preparer, Reporter as PrepareReporter}; pub use preparer::{Error as PrepareError, Preparer, Reporter as PrepareReporter};
pub use site_packages::{SatisfiesResult, SitePackages, SitePackagesDiagnostic}; pub use site_packages::{SatisfiesResult, SitePackages, SitePackagesDiagnostic};
pub use uninstall::{uninstall, UninstallError}; pub use uninstall::{UninstallError, uninstall};
mod compile; mod compile;
mod preparer; mod preparer;

View file

@ -1,4 +1,4 @@
use anyhow::{bail, Result}; use anyhow::{Result, bail};
use std::sync::Arc; use std::sync::Arc;
use tracing::{debug, warn}; use tracing::{debug, warn};
@ -18,8 +18,8 @@ use uv_pypi_types::VerbatimParsedUrl;
use uv_python::PythonEnvironment; use uv_python::PythonEnvironment;
use uv_types::HashStrategy; use uv_types::HashStrategy;
use crate::satisfies::RequirementSatisfaction;
use crate::SitePackages; use crate::SitePackages;
use crate::satisfies::RequirementSatisfaction;
/// A planner to generate an [`Plan`] based on a set of requirements. /// A planner to generate an [`Plan`] based on a set of requirements.
#[derive(Debug)] #[derive(Debug)]

View file

@ -1,7 +1,7 @@
use std::cmp::Reverse; use std::cmp::Reverse;
use std::sync::Arc; use std::sync::Arc;
use futures::{stream::FuturesUnordered, FutureExt, Stream, TryFutureExt, TryStreamExt}; use futures::{FutureExt, Stream, TryFutureExt, TryStreamExt, stream::FuturesUnordered};
use tracing::{debug, instrument}; use tracing::{debug, instrument};
use url::Url; use url::Url;
@ -70,7 +70,7 @@ impl<'a, Context: BuildContext> Preparer<'a, Context> {
) -> impl Stream<Item = Result<CachedDist, Error>> + 'stream { ) -> impl Stream<Item = Result<CachedDist, Error>> + 'stream {
distributions distributions
.into_iter() .into_iter()
.map(|dist| async move { .map(async |dist| {
let wheel = self let wheel = self
.get_wheel((*dist).clone(), in_flight, resolution) .get_wheel((*dist).clone(), in_flight, resolution)
.boxed_local() .boxed_local()

View file

@ -25,8 +25,7 @@ impl RequirementSatisfaction {
pub(crate) fn check(distribution: &InstalledDist, source: &RequirementSource) -> Self { pub(crate) fn check(distribution: &InstalledDist, source: &RequirementSource) -> Self {
trace!( trace!(
"Comparing installed with source: {:?} {:?}", "Comparing installed with source: {:?} {:?}",
distribution, distribution, source
source
); );
// Filter out already-installed packages. // Filter out already-installed packages.
match source { match source {
@ -194,8 +193,7 @@ impl RequirementSatisfaction {
{ {
trace!( trace!(
"Path mismatch: {:?} vs. {:?}", "Path mismatch: {:?} vs. {:?}",
requested_path, requested_path, installed_path,
installed_path,
); );
return Self::Mismatch; return Self::Mismatch;
} }
@ -264,8 +262,7 @@ impl RequirementSatisfaction {
{ {
trace!( trace!(
"Path mismatch: {:?} vs. {:?}", "Path mismatch: {:?} vs. {:?}",
requested_path, requested_path, installed_path,
installed_path,
); );
return Self::Mismatch; return Self::Mismatch;
} }

View file

@ -1,7 +1,7 @@
[package] [package]
name = "uv-macros" name = "uv-macros"
version = "0.0.1" version = "0.0.1"
edition = "2021" edition = { workspace = true }
[lib] [lib]
proc-macro = true proc-macro = true

View file

@ -2,7 +2,7 @@ mod options_metadata;
use proc_macro::TokenStream; use proc_macro::TokenStream;
use quote::quote; use quote::quote;
use syn::{parse_macro_input, Attribute, DeriveInput, ImplItem, ItemImpl, LitStr}; use syn::{Attribute, DeriveInput, ImplItem, ItemImpl, LitStr, parse_macro_input};
#[proc_macro_derive(OptionsMetadata, attributes(option, doc, option_group))] #[proc_macro_derive(OptionsMetadata, attributes(option, doc, option_group))]
pub fn derive_options_metadata(input: TokenStream) -> TokenStream { pub fn derive_options_metadata(input: TokenStream) -> TokenStream {
@ -38,7 +38,7 @@ fn impl_combine(ast: &DeriveInput) -> TokenStream {
} }
}); });
let gen = quote! { let stream = quote! {
impl crate::Combine for #name { impl crate::Combine for #name {
fn combine(self, other: #name) -> #name { fn combine(self, other: #name) -> #name {
#name { #name {
@ -47,7 +47,7 @@ fn impl_combine(ast: &DeriveInput) -> TokenStream {
} }
} }
}; };
gen.into() stream.into()
} }
fn get_doc_comment(attrs: &[Attribute]) -> String { fn get_doc_comment(attrs: &[Attribute]) -> String {

View file

@ -301,15 +301,24 @@ fn parse_field_attributes(attribute: &Attribute) -> syn::Result<FieldAttributes>
})?; })?;
let Some(default) = default else { let Some(default) = default else {
return Err(syn::Error::new(attribute.span(), "Mandatory `default` field is missing in `#[option]` attribute. Specify the default using `#[option(default=\"..\")]`.")); return Err(syn::Error::new(
attribute.span(),
"Mandatory `default` field is missing in `#[option]` attribute. Specify the default using `#[option(default=\"..\")]`.",
));
}; };
let Some(value_type) = value_type else { let Some(value_type) = value_type else {
return Err(syn::Error::new(attribute.span(), "Mandatory `value_type` field is missing in `#[option]` attribute. Specify the value type using `#[option(value_type=\"..\")]`.")); return Err(syn::Error::new(
attribute.span(),
"Mandatory `value_type` field is missing in `#[option]` attribute. Specify the value type using `#[option(value_type=\"..\")]`.",
));
}; };
let Some(example) = example else { let Some(example) = example else {
return Err(syn::Error::new(attribute.span(), "Mandatory `example` field is missing in `#[option]` attribute. Add an example using `#[option(example=\"..\")]`.")); return Err(syn::Error::new(
attribute.span(),
"Mandatory `example` field is missing in `#[option]` attribute. Add an example using `#[option(example=\"..\")]`.",
));
}; };
Ok(FieldAttributes { Ok(FieldAttributes {

View file

@ -1,7 +1,7 @@
[package] [package]
name = "uv-normalize" name = "uv-normalize"
version = "0.0.1" version = "0.0.1"
edition = "2021" edition = { workspace = true }
description = "Normalization for distribution, package and extra names." description = "Normalization for distribution, package and extra names."
[lib] [lib]

View file

@ -7,7 +7,7 @@ use serde::{Deserialize, Deserializer, Serialize};
use uv_small_str::SmallString; use uv_small_str::SmallString;
use crate::{validate_and_normalize_ref, InvalidNameError}; use crate::{InvalidNameError, validate_and_normalize_ref};
/// Either the literal "all" or a list of extras /// Either the literal "all" or a list of extras
#[derive(Debug, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)] #[derive(Debug, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)]

View file

@ -9,7 +9,7 @@ use serde::{Deserialize, Deserializer, Serialize, Serializer};
use uv_small_str::SmallString; use uv_small_str::SmallString;
use crate::{ use crate::{
validate_and_normalize_ref, InvalidNameError, InvalidPipGroupError, InvalidPipGroupPathError, InvalidNameError, InvalidPipGroupError, InvalidPipGroupPathError, validate_and_normalize_ref,
}; };
/// The normalized name of a dependency group. /// The normalized name of a dependency group.

View file

@ -3,7 +3,7 @@ use std::fmt::{Display, Formatter};
pub use dist_info_name::DistInfoName; pub use dist_info_name::DistInfoName;
pub use extra_name::{DefaultExtras, ExtraName}; pub use extra_name::{DefaultExtras, ExtraName};
pub use group_name::{DefaultGroups, GroupName, PipGroupName, DEV_DEPENDENCIES}; pub use group_name::{DEV_DEPENDENCIES, DefaultGroups, GroupName, PipGroupName};
pub use package_name::PackageName; pub use package_name::PackageName;
use uv_small_str::SmallString; use uv_small_str::SmallString;

View file

@ -6,7 +6,7 @@ use serde::{Deserialize, Deserializer, Serialize};
use uv_small_str::SmallString; use uv_small_str::SmallString;
use crate::{validate_and_normalize_ref, InvalidNameError}; use crate::{InvalidNameError, validate_and_normalize_ref};
/// The normalized name of a package. /// The normalized name of a package.
/// ///
@ -51,13 +51,11 @@ impl PackageName {
owned_string.push('_'); owned_string.push('_');
// Iterate over the rest of the string. // Iterate over the rest of the string.
owned_string.extend(self.0[dash_position + 1..].chars().map(|character| { owned_string.extend(
if character == '-' { self.0[dash_position + 1..]
'_' .chars()
} else { .map(|character| if character == '-' { '_' } else { character }),
character );
}
}));
Cow::Owned(owned_string) Cow::Owned(owned_string)
} else { } else {

View file

@ -25,13 +25,13 @@
#[cfg(feature = "version-ranges")] #[cfg(feature = "version-ranges")]
pub use version_ranges::{ pub use version_ranges::{
release_specifier_to_range, release_specifiers_to_ranges, LowerBound, UpperBound, LowerBound, UpperBound, release_specifier_to_range, release_specifiers_to_ranges,
}; };
pub use { pub use {
version::{ version::{
LocalSegment, LocalVersion, LocalVersionSlice, Operator, OperatorParseError, Prerelease, LocalSegment, LocalVersion, LocalVersionSlice, MIN_VERSION, Operator, OperatorParseError,
PrereleaseKind, Version, VersionParseError, VersionPattern, VersionPatternParseError, Prerelease, PrereleaseKind, Version, VersionParseError, VersionPattern,
MIN_VERSION, VersionPatternParseError,
}, },
version_specifier::{ version_specifier::{
VersionSpecifier, VersionSpecifierBuildError, VersionSpecifiers, VersionSpecifier, VersionSpecifierBuildError, VersionSpecifiers,

View file

@ -1,4 +1,4 @@
use serde::{de, Deserialize, Deserializer, Serialize, Serializer}; use serde::{Deserialize, Deserializer, Serialize, Serializer, de};
use std::fmt::Formatter; use std::fmt::Formatter;
use std::num::NonZero; use std::num::NonZero;
use std::ops::Deref; use std::ops::Deref;

View file

@ -5,9 +5,9 @@ use std::ops::Bound;
use std::str::FromStr; use std::str::FromStr;
use crate::{ use crate::{
version, Operator, OperatorParseError, Version, VersionPattern, VersionPatternParseError, Operator, OperatorParseError, Version, VersionPattern, VersionPatternParseError, version,
}; };
use serde::{de, Deserialize, Deserializer, Serialize, Serializer}; use serde::{Deserialize, Deserializer, Serialize, Serializer, de};
#[cfg(feature = "tracing")] #[cfg(feature = "tracing")]
use tracing::warn; use tracing::warn;
@ -828,15 +828,21 @@ mod tests {
fn test_equal() { fn test_equal() {
let version = Version::from_str("1.1.post1").unwrap(); let version = Version::from_str("1.1.post1").unwrap();
assert!(!VersionSpecifier::from_str("== 1.1") assert!(
.unwrap() !VersionSpecifier::from_str("== 1.1")
.contains(&version)); .unwrap()
assert!(VersionSpecifier::from_str("== 1.1.post1") .contains(&version)
.unwrap() );
.contains(&version)); assert!(
assert!(VersionSpecifier::from_str("== 1.1.*") VersionSpecifier::from_str("== 1.1.post1")
.unwrap() .unwrap()
.contains(&version)); .contains(&version)
);
assert!(
VersionSpecifier::from_str("== 1.1.*")
.unwrap()
.contains(&version)
);
} }
const VERSIONS_ALL: &[&str] = &[ const VERSIONS_ALL: &[&str] = &[
@ -1087,12 +1093,16 @@ mod tests {
#[test] #[test]
fn test_arbitrary_equality() { fn test_arbitrary_equality() {
assert!(VersionSpecifier::from_str("=== 1.2a1") assert!(
.unwrap() VersionSpecifier::from_str("=== 1.2a1")
.contains(&Version::from_str("1.2a1").unwrap())); .unwrap()
assert!(!VersionSpecifier::from_str("=== 1.2a1") .contains(&Version::from_str("1.2a1").unwrap())
.unwrap() );
.contains(&Version::from_str("1.2a1+local").unwrap())); assert!(
!VersionSpecifier::from_str("=== 1.2a1")
.unwrap()
.contains(&Version::from_str("1.2a1+local").unwrap())
);
} }
#[test] #[test]

View file

@ -21,7 +21,7 @@ use std::fmt::{Debug, Display, Formatter};
use std::path::Path; use std::path::Path;
use std::str::FromStr; use std::str::FromStr;
use serde::{de, Deserialize, Deserializer, Serialize, Serializer}; use serde::{Deserialize, Deserializer, Serialize, Serializer, de};
use thiserror::Error; use thiserror::Error;
use url::Url; use url::Url;
@ -42,8 +42,8 @@ pub use uv_normalize::{ExtraName, InvalidNameError, PackageName};
pub use uv_pep440; pub use uv_pep440;
use uv_pep440::{VersionSpecifier, VersionSpecifiers}; use uv_pep440::{VersionSpecifier, VersionSpecifiers};
pub use verbatim_url::{ pub use verbatim_url::{
expand_env_vars, looks_like_git_repository, split_scheme, strip_host, Scheme, VerbatimUrl, Scheme, VerbatimUrl, VerbatimUrlError, expand_env_vars, looks_like_git_repository,
VerbatimUrlError, split_scheme, strip_host,
}; };
mod cursor; mod cursor;
@ -980,7 +980,7 @@ mod tests {
use uv_pep440::{Operator, Version, VersionPattern, VersionSpecifier}; use uv_pep440::{Operator, Version, VersionPattern, VersionSpecifier};
use crate::cursor::Cursor; use crate::cursor::Cursor;
use crate::marker::{parse, MarkerExpression, MarkerTree, MarkerValueVersion}; use crate::marker::{MarkerExpression, MarkerTree, MarkerValueVersion, parse};
use crate::{ use crate::{
MarkerOperator, MarkerValueString, Requirement, TracingReporter, VerbatimUrl, VersionOrUrl, MarkerOperator, MarkerValueString, Requirement, TracingReporter, VerbatimUrl, VersionOrUrl,
}; };

View file

@ -55,12 +55,12 @@ use itertools::{Either, Itertools};
use rustc_hash::FxHashMap; use rustc_hash::FxHashMap;
use version_ranges::Ranges; use version_ranges::Ranges;
use uv_pep440::{release_specifier_to_range, Operator, Version, VersionSpecifier}; use uv_pep440::{Operator, Version, VersionSpecifier, release_specifier_to_range};
use crate::marker::MarkerValueExtra;
use crate::marker::lowering::{ use crate::marker::lowering::{
CanonicalMarkerValueExtra, CanonicalMarkerValueString, CanonicalMarkerValueVersion, CanonicalMarkerValueExtra, CanonicalMarkerValueString, CanonicalMarkerValueVersion,
}; };
use crate::marker::MarkerValueExtra;
use crate::{ use crate::{
ExtraOperator, MarkerExpression, MarkerOperator, MarkerValueString, MarkerValueVersion, ExtraOperator, MarkerExpression, MarkerOperator, MarkerValueString, MarkerValueVersion,
}; };
@ -153,11 +153,7 @@ impl InternerGuard<'_> {
.entry(node.clone()) .entry(node.clone())
.or_insert_with(|| NodeId::new(self.shared.nodes.push(node), false)); .or_insert_with(|| NodeId::new(self.shared.nodes.push(node), false));
if flipped { if flipped { id.not() } else { *id }
id.not()
} else {
*id
}
} }
/// Returns a decision node for a single marker expression. /// Returns a decision node for a single marker expression.
@ -1708,7 +1704,7 @@ impl fmt::Debug for NodeId {
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::{NodeId, INTERNER}; use super::{INTERNER, NodeId};
use crate::MarkerExpression; use crate::MarkerExpression;
fn expr(s: &str) -> NodeId { fn expr(s: &str) -> NodeId {

View file

@ -6,13 +6,13 @@ use std::str::FromStr;
use arcstr::ArcStr; use arcstr::ArcStr;
use itertools::Itertools; use itertools::Itertools;
use serde::{de, Deserialize, Deserializer, Serialize, Serializer}; use serde::{Deserialize, Deserializer, Serialize, Serializer, de};
use version_ranges::Ranges; use version_ranges::Ranges;
use uv_normalize::ExtraName; use uv_normalize::ExtraName;
use uv_pep440::{Version, VersionParseError, VersionSpecifier}; use uv_pep440::{Version, VersionParseError, VersionSpecifier};
use super::algebra::{Edges, NodeId, Variable, INTERNER}; use super::algebra::{Edges, INTERNER, NodeId, Variable};
use super::simplify; use super::simplify;
use crate::cursor::Cursor; use crate::cursor::Cursor;
use crate::marker::lowering::{ use crate::marker::lowering::{
@ -2967,9 +2967,11 @@ mod test {
#[test] #[test]
fn test_is_false() { fn test_is_false() {
assert!(m("python_version < '3.10' and python_version >= '3.10'").is_false()); assert!(m("python_version < '3.10' and python_version >= '3.10'").is_false());
assert!(m("(python_version < '3.10' and python_version >= '3.10') \ assert!(
m("(python_version < '3.10' and python_version >= '3.10') \
or (python_version < '3.9' and python_version >= '3.9')") or (python_version < '3.9' and python_version >= '3.9')")
.is_false()); .is_false()
);
assert!(!m("python_version < '3.10'").is_false()); assert!(!m("python_version < '3.10'").is_false());
assert!(!m("python_version < '0'").is_false()); assert!(!m("python_version < '0'").is_false());
@ -3226,11 +3228,13 @@ mod test {
m("os_name == 'Linux'"), m("os_name == 'Linux'"),
); );
assert!(m(" assert!(
m("
(os_name == 'Linux' and extra == 'foo') (os_name == 'Linux' and extra == 'foo')
or (os_name != 'Linux' and extra == 'bar')") or (os_name != 'Linux' and extra == 'bar')")
.without_extras() .without_extras()
.is_true()); .is_true()
);
assert_eq!( assert_eq!(
m("os_name == 'Linux' and extra != 'foo'").without_extras(), m("os_name == 'Linux' and extra != 'foo'").without_extras(),
@ -3259,11 +3263,13 @@ mod test {
m("os_name == 'Linux' and extra == 'foo'").only_extras(), m("os_name == 'Linux' and extra == 'foo'").only_extras(),
m("extra == 'foo'"), m("extra == 'foo'"),
); );
assert!(m(" assert!(
m("
(os_name == 'foo' and extra == 'foo') (os_name == 'foo' and extra == 'foo')
or (os_name == 'bar' and extra != 'foo')") or (os_name == 'bar' and extra != 'foo')")
.only_extras() .only_extras()
.is_true()); .is_true()
);
assert_eq!( assert_eq!(
m(" m("
(os_name == 'Linux' and extra == 'foo') (os_name == 'Linux' and extra == 'foo')

View file

@ -8,9 +8,9 @@ use uv_normalize::ExtraName;
use crate::marker::parse; use crate::marker::parse;
use crate::{ use crate::{
expand_env_vars, parse_extras_cursor, split_extras, split_scheme, strip_host, Cursor, Cursor, MarkerEnvironment, MarkerTree, Pep508Error, Pep508ErrorSource, Pep508Url, Reporter,
MarkerEnvironment, MarkerTree, Pep508Error, Pep508ErrorSource, Pep508Url, Reporter, RequirementOrigin, Scheme, TracingReporter, VerbatimUrl, VerbatimUrlError, expand_env_vars,
RequirementOrigin, Scheme, TracingReporter, VerbatimUrl, VerbatimUrlError, parse_extras_cursor, split_extras, split_scheme, strip_host,
}; };
/// An extension over [`Pep508Url`] that also supports parsing unnamed requirements, namely paths. /// An extension over [`Pep508Url`] that also supports parsing unnamed requirements, namely paths.
@ -19,7 +19,7 @@ use crate::{
pub trait UnnamedRequirementUrl: Pep508Url { pub trait UnnamedRequirementUrl: Pep508Url {
/// Parse a URL from a relative or absolute path. /// Parse a URL from a relative or absolute path.
fn parse_path(path: impl AsRef<Path>, working_dir: impl AsRef<Path>) fn parse_path(path: impl AsRef<Path>, working_dir: impl AsRef<Path>)
-> Result<Self, Self::Err>; -> Result<Self, Self::Err>;
/// Parse a URL from an absolute path. /// Parse a URL from an absolute path.
fn parse_absolute_path(path: impl AsRef<Path>) -> Result<Self, Self::Err>; fn parse_absolute_path(path: impl AsRef<Path>) -> Result<Self, Self::Err>;

View file

@ -2,7 +2,7 @@
name = "uv-performance-memory-allocator" name = "uv-performance-memory-allocator"
version = "0.1.0" version = "0.1.0"
publish = false publish = false
edition = "2021" edition = "2024"
[lib] [lib]
doctest = false doctest = false

View file

@ -236,8 +236,8 @@ pub enum ParseLanguageTagError {
mod tests { mod tests {
use std::str::FromStr; use std::str::FromStr;
use crate::language_tag::ParseLanguageTagError;
use crate::LanguageTag; use crate::LanguageTag;
use crate::language_tag::ParseLanguageTagError;
#[test] #[test]
fn none() { fn none() {

View file

@ -7,7 +7,7 @@ use std::{env, fmt, io};
use fs_err::tokio::File; use fs_err::tokio::File;
use futures::TryStreamExt; use futures::TryStreamExt;
use glob::{glob, GlobError, PatternError}; use glob::{GlobError, PatternError, glob};
use itertools::Itertools; use itertools::Itertools;
use reqwest::header::AUTHORIZATION; use reqwest::header::AUTHORIZATION;
use reqwest::multipart::Part; use reqwest::multipart::Part;
@ -21,15 +21,15 @@ use thiserror::Error;
use tokio::io::{AsyncReadExt, BufReader}; use tokio::io::{AsyncReadExt, BufReader};
use tokio::sync::Semaphore; use tokio::sync::Semaphore;
use tokio_util::io::ReaderStream; use tokio_util::io::ReaderStream;
use tracing::{debug, enabled, trace, warn, Level}; use tracing::{Level, debug, enabled, trace, warn};
use trusted_publishing::TrustedPublishingToken; use trusted_publishing::TrustedPublishingToken;
use url::Url; use url::Url;
use uv_auth::Credentials; use uv_auth::Credentials;
use uv_cache::{Cache, Refresh}; use uv_cache::{Cache, Refresh};
use uv_client::{ use uv_client::{
BaseClient, MetadataFormat, OwnedArchive, RegistryClientBuilder, UvRetryableStrategy, BaseClient, DEFAULT_RETRIES, MetadataFormat, OwnedArchive, RegistryClientBuilder,
DEFAULT_RETRIES, UvRetryableStrategy,
}; };
use uv_configuration::{KeyringProviderType, TrustedPublishing}; use uv_configuration::{KeyringProviderType, TrustedPublishing};
use uv_distribution_filename::{DistFilename, SourceDistExtension, SourceDistFilename}; use uv_distribution_filename::{DistFilename, SourceDistExtension, SourceDistFilename};
@ -243,6 +243,7 @@ impl PublishSendError {
/// <https://github.com/astral-sh/uv/issues/8030> caused by /// <https://github.com/astral-sh/uv/issues/8030> caused by
/// <https://github.com/pypa/setuptools/issues/3777> in combination with /// <https://github.com/pypa/setuptools/issues/3777> in combination with
/// <https://github.com/pypi/warehouse/blob/50a58f3081e693a3772c0283050a275e350004bf/warehouse/forklift/legacy.py#L1133-L1155> /// <https://github.com/pypi/warehouse/blob/50a58f3081e693a3772c0283050a275e350004bf/warehouse/forklift/legacy.py#L1133-L1155>
#[allow(clippy::result_large_err)]
pub fn files_for_publishing( pub fn files_for_publishing(
paths: Vec<String>, paths: Vec<String>,
) -> Result<Vec<(PathBuf, String, DistFilename)>, PublishError> { ) -> Result<Vec<(PathBuf, String, DistFilename)>, PublishError> {
@ -585,7 +586,7 @@ async fn source_dist_pkg_info(file: &Path) -> Result<Vec<u8>, PublishPrepareErro
let mut pkg_infos: Vec<(PathBuf, Vec<u8>)> = archive let mut pkg_infos: Vec<(PathBuf, Vec<u8>)> = archive
.entries()? .entries()?
.map_err(PublishPrepareError::from) .map_err(PublishPrepareError::from)
.try_filter_map(|mut entry| async move { .try_filter_map(async |mut entry| {
let path = entry let path = entry
.path() .path()
.map_err(PublishPrepareError::from)? .map_err(PublishPrepareError::from)?
@ -883,7 +884,7 @@ async fn handle_response(registry: &Url, response: Response) -> Result<(), Publi
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use crate::{build_request, form_metadata, Reporter}; use crate::{Reporter, build_request, form_metadata};
use insta::{assert_debug_snapshot, assert_snapshot}; use insta::{assert_debug_snapshot, assert_snapshot};
use itertools::Itertools; use itertools::Itertools;
use std::path::PathBuf; use std::path::PathBuf;

View file

@ -1,8 +1,8 @@
//! Trusted publishing (via OIDC) with GitHub actions. //! Trusted publishing (via OIDC) with GitHub actions.
use base64::prelude::BASE64_URL_SAFE_NO_PAD;
use base64::Engine; use base64::Engine;
use reqwest::{header, StatusCode}; use base64::prelude::BASE64_URL_SAFE_NO_PAD;
use reqwest::{StatusCode, header};
use reqwest_middleware::ClientWithMiddleware; use reqwest_middleware::ClientWithMiddleware;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use std::env; use std::env;

View file

@ -1,5 +1,5 @@
use regex::Regex; use regex::Regex;
use serde::{de, Deserialize, Deserializer, Serialize}; use serde::{Deserialize, Deserializer, Serialize, de};
use std::borrow::Cow; use std::borrow::Cow;
use std::str::FromStr; use std::str::FromStr;
use std::sync::LazyLock; use std::sync::LazyLock;

View file

@ -2,8 +2,8 @@ use serde::Deserialize;
use uv_normalize::PackageName; use uv_normalize::PackageName;
use crate::metadata::Headers;
use crate::MetadataError; use crate::MetadataError;
use crate::metadata::Headers;
/// A subset of the full core metadata specification, including only the /// A subset of the full core metadata specification, including only the
/// fields that have been consistent across all versions of the specification. /// fields that have been consistent across all versions of the specification.

View file

@ -5,8 +5,8 @@ use std::fmt::Write;
use std::str; use std::str;
use std::str::FromStr; use std::str::FromStr;
use crate::metadata::Headers;
use crate::MetadataError; use crate::MetadataError;
use crate::metadata::Headers;
/// Code Metadata 2.3 as specified in /// Code Metadata 2.3 as specified in
/// <https://packaging.python.org/specifications/core-metadata/>. /// <https://packaging.python.org/specifications/core-metadata/>.

View file

@ -11,9 +11,9 @@ use uv_pep440::{Version, VersionSpecifiers};
use uv_pep508::Requirement; use uv_pep508::Requirement;
use crate::lenient_requirement::LenientRequirement; use crate::lenient_requirement::LenientRequirement;
use crate::metadata::pyproject_toml::PyProjectToml;
use crate::metadata::Headers; use crate::metadata::Headers;
use crate::{metadata, LenientVersionSpecifiers, MetadataError, VerbatimParsedUrl}; use crate::metadata::pyproject_toml::PyProjectToml;
use crate::{LenientVersionSpecifiers, MetadataError, VerbatimParsedUrl, metadata};
/// A subset of the full core metadata specification, including only the /// A subset of the full core metadata specification, including only the
/// fields that are relevant to dependency resolution. /// fields that are relevant to dependency resolution.

View file

@ -18,9 +18,9 @@ use uv_pep508::Pep508Error;
use crate::VerbatimParsedUrl; use crate::VerbatimParsedUrl;
pub use build_requires::BuildRequires; pub use build_requires::BuildRequires;
pub use metadata_resolver::ResolutionMetadata;
pub use metadata10::Metadata10; pub use metadata10::Metadata10;
pub use metadata23::Metadata23; pub use metadata23::Metadata23;
pub use metadata_resolver::ResolutionMetadata;
pub use pyproject_toml::PyProjectToml; pub use pyproject_toml::PyProjectToml;
pub use requires_dist::RequiresDist; pub use requires_dist::RequiresDist;
pub use requires_txt::RequiresTxt; pub use requires_txt::RequiresTxt;

View file

@ -1,8 +1,8 @@
use std::str::FromStr; use std::str::FromStr;
use indexmap::IndexMap; use indexmap::IndexMap;
use serde::de::IntoDeserializer;
use serde::Deserialize; use serde::Deserialize;
use serde::de::IntoDeserializer;
use uv_normalize::{ExtraName, PackageName}; use uv_normalize::{ExtraName, PackageName};
use uv_pep440::Version; use uv_pep440::Version;

View file

@ -7,7 +7,7 @@ use url::{ParseError, Url};
use uv_distribution_filename::{DistExtension, ExtensionError}; use uv_distribution_filename::{DistExtension, ExtensionError};
use uv_git_types::{GitUrl, GitUrlParseError}; use uv_git_types::{GitUrl, GitUrlParseError};
use uv_pep508::{ use uv_pep508::{
looks_like_git_repository, Pep508Url, UnnamedRequirementUrl, VerbatimUrl, VerbatimUrlError, Pep508Url, UnnamedRequirementUrl, VerbatimUrl, VerbatimUrlError, looks_like_git_repository,
}; };
use crate::{ArchiveInfo, DirInfo, DirectUrl, VcsInfo, VcsKind}; use crate::{ArchiveInfo, DirInfo, DirectUrl, VcsInfo, VcsKind};

View file

@ -11,11 +11,11 @@ use tracing::{debug, instrument, trace};
use which::{which, which_all}; use which::{which, which_all};
use uv_cache::Cache; use uv_cache::Cache;
use uv_fs::which::is_executable;
use uv_fs::Simplified; use uv_fs::Simplified;
use uv_fs::which::is_executable;
use uv_pep440::{ use uv_pep440::{
release_specifiers_to_ranges, LowerBound, Prerelease, UpperBound, Version, VersionSpecifier, LowerBound, Prerelease, UpperBound, Version, VersionSpecifier, VersionSpecifiers,
VersionSpecifiers, release_specifiers_to_ranges,
}; };
use uv_static::EnvVars; use uv_static::EnvVars;
use uv_warnings::warn_user_once; use uv_warnings::warn_user_once;
@ -30,11 +30,11 @@ use crate::managed::ManagedPythonInstallations;
use crate::microsoft_store::find_microsoft_store_pythons; use crate::microsoft_store::find_microsoft_store_pythons;
use crate::virtualenv::Error as VirtualEnvError; use crate::virtualenv::Error as VirtualEnvError;
use crate::virtualenv::{ use crate::virtualenv::{
conda_environment_from_env, virtualenv_from_env, virtualenv_from_working_dir, CondaEnvironmentKind, conda_environment_from_env, virtualenv_from_env,
virtualenv_python_executable, CondaEnvironmentKind, virtualenv_from_working_dir, virtualenv_python_executable,
}; };
#[cfg(windows)] #[cfg(windows)]
use crate::windows_registry::{registry_pythons, WindowsPython}; use crate::windows_registry::{WindowsPython, registry_pythons};
use crate::{BrokenSymlink, Interpreter, PythonVersion}; use crate::{BrokenSymlink, Interpreter, PythonVersion};
/// A request to find a Python installation. /// A request to find a Python installation.
@ -251,8 +251,8 @@ pub enum Error {
/// - Discovered virtual environment (e.g. `.venv` in a parent directory) /// - Discovered virtual environment (e.g. `.venv` in a parent directory)
/// ///
/// Notably, "system" environments are excluded. See [`python_executables_from_installed`]. /// Notably, "system" environments are excluded. See [`python_executables_from_installed`].
fn python_executables_from_virtual_environments<'a>( fn python_executables_from_virtual_environments<'a>()
) -> impl Iterator<Item = Result<(PythonSource, PathBuf), Error>> + 'a { -> impl Iterator<Item = Result<(PythonSource, PathBuf), Error>> + 'a {
let from_active_environment = iter::once_with(|| { let from_active_environment = iter::once_with(|| {
virtualenv_from_env() virtualenv_from_env()
.into_iter() .into_iter()
@ -1252,8 +1252,8 @@ pub(crate) fn is_windows_store_shim(path: &Path) -> bool {
CreateFileW, FILE_ATTRIBUTE_REPARSE_POINT, FILE_FLAG_BACKUP_SEMANTICS, CreateFileW, FILE_ATTRIBUTE_REPARSE_POINT, FILE_FLAG_BACKUP_SEMANTICS,
FILE_FLAG_OPEN_REPARSE_POINT, MAXIMUM_REPARSE_DATA_BUFFER_SIZE, OPEN_EXISTING, FILE_FLAG_OPEN_REPARSE_POINT, MAXIMUM_REPARSE_DATA_BUFFER_SIZE, OPEN_EXISTING,
}; };
use windows_sys::Win32::System::Ioctl::FSCTL_GET_REPARSE_POINT;
use windows_sys::Win32::System::IO::DeviceIoControl; use windows_sys::Win32::System::IO::DeviceIoControl;
use windows_sys::Win32::System::Ioctl::FSCTL_GET_REPARSE_POINT;
// The path must be absolute. // The path must be absolute.
if !path.is_absolute() { if !path.is_absolute() {
@ -2715,7 +2715,7 @@ fn split_wheel_tag_release_version(version: Version) -> Version {
mod tests { mod tests {
use std::{path::PathBuf, str::FromStr}; use std::{path::PathBuf, str::FromStr};
use assert_fs::{prelude::*, TempDir}; use assert_fs::{TempDir, prelude::*};
use target_lexicon::{Aarch64Architecture, Architecture}; use target_lexicon::{Aarch64Architecture, Architecture};
use test_log::test; use test_log::test;
use uv_pep440::{Prerelease, PrereleaseKind, VersionSpecifiers}; use uv_pep440::{Prerelease, PrereleaseKind, VersionSpecifiers};

View file

@ -21,13 +21,14 @@ use tokio_util::either::Either;
use tracing::{debug, instrument}; use tracing::{debug, instrument};
use url::Url; use url::Url;
use uv_client::{is_extended_transient_error, BaseClient, WrappedReqwestError}; use uv_client::{BaseClient, WrappedReqwestError, is_extended_transient_error};
use uv_distribution_filename::{ExtensionError, SourceDistExtension}; use uv_distribution_filename::{ExtensionError, SourceDistExtension};
use uv_extract::hash::Hasher; use uv_extract::hash::Hasher;
use uv_fs::{rename_with_retry, Simplified}; use uv_fs::{Simplified, rename_with_retry};
use uv_pypi_types::{HashAlgorithm, HashDigest}; use uv_pypi_types::{HashAlgorithm, HashDigest};
use uv_static::EnvVars; use uv_static::EnvVars;
use crate::PythonVariant;
use crate::implementation::{ use crate::implementation::{
Error as ImplementationError, ImplementationName, LenientImplementationName, Error as ImplementationError, ImplementationName, LenientImplementationName,
}; };
@ -35,7 +36,6 @@ use crate::installation::PythonInstallationKey;
use crate::libc::LibcDetectionError; use crate::libc::LibcDetectionError;
use crate::managed::ManagedPythonInstallation; use crate::managed::ManagedPythonInstallation;
use crate::platform::{self, Arch, Libc, Os}; use crate::platform::{self, Arch, Libc, Os};
use crate::PythonVariant;
use crate::{Interpreter, PythonRequest, PythonVersion, VersionRequest}; use crate::{Interpreter, PythonRequest, PythonVersion, VersionRequest};
#[derive(Error, Debug)] #[derive(Error, Debug)]
@ -88,9 +88,7 @@ pub enum Error {
InvalidRequestPlatform(#[from] platform::Error), InvalidRequestPlatform(#[from] platform::Error),
#[error("No download found for request: {}", _0.green())] #[error("No download found for request: {}", _0.green())]
NoDownloadFound(PythonDownloadRequest), NoDownloadFound(PythonDownloadRequest),
#[error( #[error("A mirror was provided via `{0}`, but the URL does not match the expected format: {0}")]
"A mirror was provided via `{0}`, but the URL does not match the expected format: {0}"
)]
Mirror(&'static str, &'static str), Mirror(&'static str, &'static str),
#[error(transparent)] #[error(transparent)]
LibcDetection(#[from] LibcDetectionError), LibcDetection(#[from] LibcDetectionError),
@ -1183,7 +1181,7 @@ async fn read_url(
let size = response.content_length(); let size = response.content_length();
let stream = response let stream = response
.bytes_stream() .bytes_stream()
.map_err(|err| io::Error::new(io::ErrorKind::Other, err)) .map_err(io::Error::other)
.into_async_read(); .into_async_read();
Ok((Either::Right(stream.compat()), size)) Ok((Either::Right(stream.compat()), size))

View file

@ -14,7 +14,7 @@ use uv_pep440::Version;
use crate::discovery::find_python_installation; use crate::discovery::find_python_installation;
use crate::installation::PythonInstallation; use crate::installation::PythonInstallation;
use crate::virtualenv::{virtualenv_python_executable, PyVenvConfiguration}; use crate::virtualenv::{PyVenvConfiguration, virtualenv_python_executable};
use crate::{ use crate::{
EnvironmentPreference, Error, Interpreter, Prefix, PythonNotFound, PythonPreference, EnvironmentPreference, Error, Interpreter, Prefix, PythonNotFound, PythonPreference,
PythonRequest, Target, PythonRequest, Target,

Some files were not shown because too many files have changed in this diff Show more