mirror of
https://github.com/astral-sh/uv.git
synced 2025-07-07 13:25:00 +00:00
Bump MSRV to 1.85 and Edition 2024 (#13516)
## Summary Builds on https://github.com/astral-sh/uv/pull/11724. Closes https://github.com/astral-sh/uv/issues/13476.
This commit is contained in:
parent
cc6e766232
commit
c5032aee80
240 changed files with 726 additions and 737 deletions
|
@ -11,8 +11,8 @@ exclude = [
|
|||
resolver = "2"
|
||||
|
||||
[workspace.package]
|
||||
edition = "2021"
|
||||
rust-version = "1.84"
|
||||
edition = "2024"
|
||||
rust-version = "1.85"
|
||||
homepage = "https://pypi.org/project/uv/"
|
||||
documentation = "https://pypi.org/project/uv/"
|
||||
repository = "https://github.com/astral-sh/uv"
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
[package]
|
||||
name = "uv-auth"
|
||||
version = "0.0.1"
|
||||
edition = "2021"
|
||||
edition = { workspace = true }
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
|
|
|
@ -10,8 +10,8 @@ use url::Url;
|
|||
|
||||
use uv_once_map::OnceMap;
|
||||
|
||||
use crate::credentials::{Credentials, Username};
|
||||
use crate::Realm;
|
||||
use crate::credentials::{Credentials, Username};
|
||||
|
||||
type FxOnceMap<K, V> = OnceMap<K, V, BuildHasherDefault<FxHasher>>;
|
||||
|
||||
|
|
|
@ -5,8 +5,8 @@ use std::borrow::Cow;
|
|||
use std::fmt;
|
||||
|
||||
use netrc::Netrc;
|
||||
use reqwest::header::HeaderValue;
|
||||
use reqwest::Request;
|
||||
use reqwest::header::HeaderValue;
|
||||
use std::io::Read;
|
||||
use std::io::Write;
|
||||
use url::Url;
|
||||
|
|
|
@ -4,11 +4,11 @@ use http::{Extensions, StatusCode};
|
|||
use url::Url;
|
||||
|
||||
use crate::{
|
||||
CREDENTIALS_CACHE, CredentialsCache, KeyringProvider,
|
||||
cache::FetchUrl,
|
||||
credentials::{Credentials, Username},
|
||||
index::{AuthPolicy, Indexes},
|
||||
realm::Realm,
|
||||
CredentialsCache, KeyringProvider, CREDENTIALS_CACHE,
|
||||
};
|
||||
use anyhow::{anyhow, format_err};
|
||||
use netrc::Netrc;
|
||||
|
@ -591,8 +591,8 @@ mod tests {
|
|||
use wiremock::matchers::{basic_auth, method, path_regex};
|
||||
use wiremock::{Mock, MockServer, ResponseTemplate};
|
||||
|
||||
use crate::credentials::Password;
|
||||
use crate::Index;
|
||||
use crate::credentials::Password;
|
||||
|
||||
use super::*;
|
||||
|
||||
|
@ -1097,7 +1097,10 @@ mod tests {
|
|||
let mut url = base_url.clone();
|
||||
url.set_username("other_user").unwrap();
|
||||
assert!(
|
||||
matches!(client.get(url).send().await, Err(reqwest_middleware::Error::Middleware(_))),
|
||||
matches!(
|
||||
client.get(url).send().await,
|
||||
Err(reqwest_middleware::Error::Middleware(_))
|
||||
),
|
||||
"If the username does not match, a password should not be fetched, and the middleware should fail eagerly since `authenticate = always` is not satisfied"
|
||||
);
|
||||
|
||||
|
@ -1614,8 +1617,8 @@ mod tests {
|
|||
/// credentials for _every_ request URL at the cost of inconsistent behavior when
|
||||
/// credentials are not scoped to a realm.
|
||||
#[test(tokio::test)]
|
||||
async fn test_credentials_from_keyring_mixed_authentication_in_realm_same_username(
|
||||
) -> Result<(), Error> {
|
||||
async fn test_credentials_from_keyring_mixed_authentication_in_realm_same_username()
|
||||
-> Result<(), Error> {
|
||||
let username = "user";
|
||||
let password_1 = "password1";
|
||||
let password_2 = "password2";
|
||||
|
@ -1714,8 +1717,8 @@ mod tests {
|
|||
/// where multiple URLs with the same username and realm share the same realm-level
|
||||
/// credentials cache entry.
|
||||
#[test(tokio::test)]
|
||||
async fn test_credentials_from_keyring_mixed_authentication_different_indexes_same_realm(
|
||||
) -> Result<(), Error> {
|
||||
async fn test_credentials_from_keyring_mixed_authentication_different_indexes_same_realm()
|
||||
-> Result<(), Error> {
|
||||
let username = "user";
|
||||
let password_1 = "password1";
|
||||
let password_2 = "password2";
|
||||
|
@ -1826,8 +1829,8 @@ mod tests {
|
|||
/// Demonstrates that when an index' credentials are cached for its realm, we
|
||||
/// find those credentials if they're not present in the keyring.
|
||||
#[test(tokio::test)]
|
||||
async fn test_credentials_from_keyring_shared_authentication_different_indexes_same_realm(
|
||||
) -> Result<(), Error> {
|
||||
async fn test_credentials_from_keyring_shared_authentication_different_indexes_same_realm()
|
||||
-> Result<(), Error> {
|
||||
let username = "user";
|
||||
let password = "password";
|
||||
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
use std::str::FromStr;
|
||||
|
||||
use uv_bench::criterion::{
|
||||
criterion_group, criterion_main, measurement::WallTime, BenchmarkId, Criterion, Throughput,
|
||||
BenchmarkId, Criterion, Throughput, criterion_group, criterion_main, measurement::WallTime,
|
||||
};
|
||||
use uv_distribution_filename::WheelFilename;
|
||||
use uv_platform_tags::{AbiTag, LanguageTag, PlatformTag, Tags};
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
use std::str::FromStr;
|
||||
|
||||
use uv_bench::criterion::black_box;
|
||||
use uv_bench::criterion::{criterion_group, criterion_main, measurement::WallTime, Criterion};
|
||||
use uv_bench::criterion::{Criterion, criterion_group, criterion_main, measurement::WallTime};
|
||||
use uv_cache::Cache;
|
||||
use uv_client::RegistryClientBuilder;
|
||||
use uv_distribution_types::Requirement;
|
||||
|
|
|
@ -4,7 +4,7 @@ mod settings;
|
|||
mod source_dist;
|
||||
mod wheel;
|
||||
|
||||
pub use metadata::{check_direct_build, PyProjectToml};
|
||||
pub use metadata::{PyProjectToml, check_direct_build};
|
||||
pub use settings::{BuildBackendSettings, WheelDataIncludes};
|
||||
pub use source_dist::{build_source_dist, list_source_dist};
|
||||
pub use wheel::{build_editable, build_wheel, list_wheel, metadata};
|
||||
|
|
|
@ -1,10 +1,10 @@
|
|||
use crate::metadata::DEFAULT_EXCLUDES;
|
||||
use crate::wheel::build_exclude_matcher;
|
||||
use crate::{
|
||||
find_roots, BuildBackendSettings, DirectoryWriter, Error, FileList, ListWriter, PyProjectToml,
|
||||
BuildBackendSettings, DirectoryWriter, Error, FileList, ListWriter, PyProjectToml, find_roots,
|
||||
};
|
||||
use flate2::write::GzEncoder;
|
||||
use flate2::Compression;
|
||||
use flate2::write::GzEncoder;
|
||||
use fs_err::File;
|
||||
use globset::{Glob, GlobSet};
|
||||
use std::io;
|
||||
|
|
|
@ -17,8 +17,8 @@ use uv_warnings::warn_user_once;
|
|||
|
||||
use crate::metadata::DEFAULT_EXCLUDES;
|
||||
use crate::{
|
||||
find_module_root, find_roots, BuildBackendSettings, DirectoryWriter, Error, FileList,
|
||||
ListWriter, PyProjectToml,
|
||||
BuildBackendSettings, DirectoryWriter, Error, FileList, ListWriter, PyProjectToml,
|
||||
find_module_root, find_roots,
|
||||
};
|
||||
|
||||
/// Build a wheel from the source tree and place it in the output directory.
|
||||
|
|
|
@ -19,13 +19,13 @@ use fs_err as fs;
|
|||
use indoc::formatdoc;
|
||||
use itertools::Itertools;
|
||||
use rustc_hash::FxHashMap;
|
||||
use serde::de::{value, IntoDeserializer, SeqAccess, Visitor};
|
||||
use serde::{de, Deserialize, Deserializer};
|
||||
use serde::de::{IntoDeserializer, SeqAccess, Visitor, value};
|
||||
use serde::{Deserialize, Deserializer, de};
|
||||
use tempfile::TempDir;
|
||||
use tokio::io::AsyncBufReadExt;
|
||||
use tokio::process::Command;
|
||||
use tokio::sync::{Mutex, Semaphore};
|
||||
use tracing::{debug, info_span, instrument, Instrument};
|
||||
use tracing::{Instrument, debug, info_span, instrument};
|
||||
|
||||
use uv_configuration::{BuildKind, BuildOutput, ConfigSettings, SourceStrategy};
|
||||
use uv_distribution::BuildRequires;
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
use anyhow::{bail, Context, Result};
|
||||
use anyhow::{Context, Result, bail};
|
||||
use std::env;
|
||||
use std::io::Write;
|
||||
use std::path::PathBuf;
|
||||
|
|
|
@ -2,8 +2,8 @@ use std::borrow::Cow;
|
|||
use std::collections::{BTreeMap, BTreeSet};
|
||||
use std::hash::{Hash, Hasher};
|
||||
use std::num::{
|
||||
NonZeroI128, NonZeroI16, NonZeroI32, NonZeroI64, NonZeroI8, NonZeroU128, NonZeroU16,
|
||||
NonZeroU32, NonZeroU64, NonZeroU8,
|
||||
NonZeroI8, NonZeroI16, NonZeroI32, NonZeroI64, NonZeroI128, NonZeroU8, NonZeroU16, NonZeroU32,
|
||||
NonZeroU64, NonZeroU128,
|
||||
};
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
|
|
|
@ -11,7 +11,7 @@ use tracing::debug;
|
|||
|
||||
pub use archive::ArchiveId;
|
||||
use uv_cache_info::Timestamp;
|
||||
use uv_fs::{cachedir, directories, LockedFile};
|
||||
use uv_fs::{LockedFile, cachedir, directories};
|
||||
use uv_normalize::PackageName;
|
||||
use uv_pypi_types::ResolutionMetadata;
|
||||
|
||||
|
@ -19,7 +19,7 @@ pub use crate::by_timestamp::CachedByTimestamp;
|
|||
#[cfg(feature = "clap")]
|
||||
pub use crate::cli::CacheArgs;
|
||||
use crate::removal::Remover;
|
||||
pub use crate::removal::{rm_rf, Removal};
|
||||
pub use crate::removal::{Removal, rm_rf};
|
||||
pub use crate::wheel::WheelCache;
|
||||
use crate::wheel::WheelCacheKind;
|
||||
|
||||
|
@ -1194,11 +1194,7 @@ impl Refresh {
|
|||
pub fn combine(self, other: Refresh) -> Self {
|
||||
/// Return the maximum of two timestamps.
|
||||
fn max(a: Timestamp, b: Timestamp) -> Timestamp {
|
||||
if a > b {
|
||||
a
|
||||
} else {
|
||||
b
|
||||
}
|
||||
if a > b { a } else { b }
|
||||
}
|
||||
|
||||
match (self, other) {
|
||||
|
|
|
@ -2,7 +2,7 @@ use std::path::{Path, PathBuf};
|
|||
|
||||
use url::Url;
|
||||
|
||||
use uv_cache_key::{cache_digest, CanonicalUrl};
|
||||
use uv_cache_key::{CanonicalUrl, cache_digest};
|
||||
use uv_distribution_types::IndexUrl;
|
||||
|
||||
/// Cache wheels and their metadata, both from remote wheels and built from source distributions.
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
use anyhow::{anyhow, Result};
|
||||
use anyhow::{Result, anyhow};
|
||||
use clap::{Args, ValueEnum};
|
||||
|
||||
use uv_warnings::warn_user;
|
||||
|
|
|
@ -3,9 +3,9 @@ use std::ops::{Deref, DerefMut};
|
|||
use std::path::PathBuf;
|
||||
use std::str::FromStr;
|
||||
|
||||
use anyhow::{anyhow, Result};
|
||||
use clap::builder::styling::{AnsiColor, Effects, Style};
|
||||
use anyhow::{Result, anyhow};
|
||||
use clap::builder::Styles;
|
||||
use clap::builder::styling::{AnsiColor, Effects, Style};
|
||||
use clap::{Args, Parser, Subcommand};
|
||||
|
||||
use url::Url;
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
use std::fmt;
|
||||
|
||||
use serde::Serialize;
|
||||
use uv_pep508::{uv_pep440::Version, PackageName};
|
||||
use uv_pep508::{PackageName, uv_pep440::Version};
|
||||
|
||||
/// Information about the git repository where uv was built from.
|
||||
#[derive(Serialize)]
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
[package]
|
||||
name = "uv-client"
|
||||
version = "0.0.1"
|
||||
edition = "2021"
|
||||
edition = { workspace = true }
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
|
|
|
@ -25,10 +25,10 @@ use uv_static::EnvVars;
|
|||
use uv_version::version;
|
||||
use uv_warnings::warn_user_once;
|
||||
|
||||
use crate::Connectivity;
|
||||
use crate::linehaul::LineHaul;
|
||||
use crate::middleware::OfflineMiddleware;
|
||||
use crate::tls::read_identity;
|
||||
use crate::Connectivity;
|
||||
|
||||
pub const DEFAULT_RETRIES: u32 = 3;
|
||||
|
||||
|
|
|
@ -8,17 +8,17 @@ use reqwest_retry::RetryPolicy;
|
|||
use rkyv::util::AlignedVec;
|
||||
use serde::de::DeserializeOwned;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use tracing::{debug, info_span, instrument, trace, warn, Instrument};
|
||||
use tracing::{Instrument, debug, info_span, instrument, trace, warn};
|
||||
|
||||
use uv_cache::{CacheEntry, Freshness};
|
||||
use uv_fs::write_atomic;
|
||||
|
||||
use crate::base_client::is_extended_transient_error;
|
||||
use crate::BaseClient;
|
||||
use crate::base_client::is_extended_transient_error;
|
||||
use crate::{
|
||||
Error, ErrorKind,
|
||||
httpcache::{AfterResponse, BeforeRequest, CachePolicy, CachePolicyBuilder},
|
||||
rkyvutil::OwnedArchive,
|
||||
Error, ErrorKind,
|
||||
};
|
||||
|
||||
/// A trait the generalizes (de)serialization at a high level.
|
||||
|
@ -230,7 +230,7 @@ impl CachedClient {
|
|||
CallbackReturn: Future<Output = Result<Payload, CallBackError>>,
|
||||
{
|
||||
let payload = self
|
||||
.get_cacheable(req, cache_entry, cache_control, |resp| async {
|
||||
.get_cacheable(req, cache_entry, cache_control, async |resp| {
|
||||
let payload = response_callback(resp).await?;
|
||||
Ok(SerdeCacheable { inner: payload })
|
||||
})
|
||||
|
@ -359,7 +359,7 @@ impl CachedClient {
|
|||
let (response, cache_policy) = self.fresh_request(req).await?;
|
||||
|
||||
let payload = self
|
||||
.run_response_callback(cache_entry, cache_policy, response, move |resp| async {
|
||||
.run_response_callback(cache_entry, cache_policy, response, async |resp| {
|
||||
let payload = response_callback(resp).await?;
|
||||
Ok(SerdeCacheable { inner: payload })
|
||||
})
|
||||
|
@ -585,7 +585,7 @@ impl CachedClient {
|
|||
CallbackReturn: Future<Output = Result<Payload, CallBackError>>,
|
||||
{
|
||||
let payload = self
|
||||
.get_cacheable_with_retry(req, cache_entry, cache_control, |resp| async {
|
||||
.get_cacheable_with_retry(req, cache_entry, cache_control, async |resp| {
|
||||
let payload = response_callback(resp).await?;
|
||||
Ok(SerdeCacheable { inner: payload })
|
||||
})
|
||||
|
|
|
@ -10,7 +10,7 @@ use uv_normalize::PackageName;
|
|||
use uv_redacted::redacted_url;
|
||||
|
||||
use crate::middleware::OfflineError;
|
||||
use crate::{html, FlatIndexError};
|
||||
use crate::{FlatIndexError, html};
|
||||
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
#[error(transparent)]
|
||||
|
@ -46,7 +46,7 @@ impl Error {
|
|||
|
||||
/// Returns `true` if this error corresponds to an I/O "not found" error.
|
||||
pub(crate) fn is_file_not_exists(&self) -> bool {
|
||||
let ErrorKind::Io(ref err) = &*self.kind else {
|
||||
let ErrorKind::Io(err) = &*self.kind else {
|
||||
return false;
|
||||
};
|
||||
matches!(err.kind(), std::io::ErrorKind::NotFound)
|
||||
|
@ -246,7 +246,9 @@ pub enum ErrorKind {
|
|||
#[error("Writing to cache archive failed: {0}")]
|
||||
ArchiveWrite(String),
|
||||
|
||||
#[error("Network connectivity is disabled, but the requested data wasn't found in the cache for: `{0}`")]
|
||||
#[error(
|
||||
"Network connectivity is disabled, but the requested data wasn't found in the cache for: `{0}`"
|
||||
)]
|
||||
Offline(String),
|
||||
}
|
||||
|
||||
|
|
|
@ -2,7 +2,7 @@ use std::path::{Path, PathBuf};
|
|||
|
||||
use futures::{FutureExt, StreamExt};
|
||||
use reqwest::Response;
|
||||
use tracing::{debug, info_span, warn, Instrument};
|
||||
use tracing::{Instrument, debug, info_span, warn};
|
||||
use url::Url;
|
||||
|
||||
use uv_cache::{Cache, CacheBucket};
|
||||
|
@ -113,7 +113,7 @@ impl<'a> FlatIndexClient<'a> {
|
|||
indexes: impl Iterator<Item = &IndexUrl>,
|
||||
) -> Result<FlatIndexEntries, FlatIndexError> {
|
||||
let mut fetches = futures::stream::iter(indexes)
|
||||
.map(|index| async move {
|
||||
.map(async |index| {
|
||||
let entries = self.fetch_index(index).await?;
|
||||
if entries.is_empty() {
|
||||
warn!("No packages found in `--find-links` entry: {}", index);
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
pub use base_client::{
|
||||
is_extended_transient_error, AuthIntegration, BaseClient, BaseClientBuilder, ExtraMiddleware,
|
||||
UvRetryableStrategy, DEFAULT_RETRIES,
|
||||
AuthIntegration, BaseClient, BaseClientBuilder, DEFAULT_RETRIES, ExtraMiddleware,
|
||||
UvRetryableStrategy, is_extended_transient_error,
|
||||
};
|
||||
pub use cached_client::{CacheControl, CachedClient, CachedClientError, DataWithCachePolicy};
|
||||
pub use error::{Error, ErrorKind, WrappedReqwestError};
|
||||
|
|
|
@ -20,7 +20,11 @@ impl OfflineError {
|
|||
|
||||
impl std::fmt::Display for OfflineError {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f, "Network connectivity is disabled, but the requested data wasn't found in the cache for: `{}`", self.url)
|
||||
write!(
|
||||
f,
|
||||
"Network connectivity is disabled, but the requested data wasn't found in the cache for: `{}`",
|
||||
self.url
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -13,7 +13,7 @@ use reqwest::{Proxy, Response};
|
|||
use reqwest_middleware::ClientWithMiddleware;
|
||||
use rustc_hash::FxHashMap;
|
||||
use tokio::sync::{Mutex, Semaphore};
|
||||
use tracing::{debug, info_span, instrument, trace, warn, Instrument};
|
||||
use tracing::{Instrument, debug, info_span, instrument, trace, warn};
|
||||
use url::Url;
|
||||
|
||||
use uv_auth::Indexes;
|
||||
|
@ -352,7 +352,9 @@ impl RegistryClient {
|
|||
// The search failed because of an HTTP status code that we don't ignore for
|
||||
// this index. We end our search here.
|
||||
SimpleMetadataSearchOutcome::StatusCodeFailure(status_code) => {
|
||||
debug!("Indexes search failed because of status code failure: {status_code}");
|
||||
debug!(
|
||||
"Indexes search failed because of status code failure: {status_code}"
|
||||
);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
@ -371,7 +373,7 @@ impl RegistryClient {
|
|||
// Otherwise, fetch concurrently.
|
||||
IndexStrategy::UnsafeBestMatch | IndexStrategy::UnsafeFirstMatch => {
|
||||
results = futures::stream::iter(indexes)
|
||||
.map(|index| async move {
|
||||
.map(async |index| {
|
||||
let _permit = download_concurrency.acquire().await;
|
||||
match index.format {
|
||||
IndexFormat::Simple => {
|
||||
|
@ -400,12 +402,10 @@ impl RegistryClient {
|
|||
}
|
||||
})
|
||||
.buffered(8)
|
||||
.filter_map(|result: Result<_, Error>| async move {
|
||||
match result {
|
||||
Ok((index, Some(metadata))) => Some(Ok((index, metadata))),
|
||||
Ok((_, None)) => None,
|
||||
Err(err) => Some(Err(err)),
|
||||
}
|
||||
.filter_map(async |result: Result<_, Error>| match result {
|
||||
Ok((index, Some(metadata))) => Some(Ok((index, metadata))),
|
||||
Ok((_, None)) => None,
|
||||
Err(err) => Some(Err(err)),
|
||||
})
|
||||
.try_collect::<Vec<_>>()
|
||||
.await?;
|
||||
|
@ -800,7 +800,7 @@ impl RegistryClient {
|
|||
lock_entry.lock().await.map_err(ErrorKind::CacheWrite)?
|
||||
};
|
||||
|
||||
let response_callback = |response: Response| async {
|
||||
let response_callback = async |response: Response| {
|
||||
let bytes = response
|
||||
.bytes()
|
||||
.await
|
||||
|
@ -987,11 +987,12 @@ impl RegistryClient {
|
|||
std::io::Error::new(
|
||||
std::io::ErrorKind::TimedOut,
|
||||
format!(
|
||||
"Failed to download distribution due to network timeout. Try increasing UV_HTTP_TIMEOUT (current value: {}s).", self.timeout().as_secs()
|
||||
"Failed to download distribution due to network timeout. Try increasing UV_HTTP_TIMEOUT (current value: {}s).",
|
||||
self.timeout().as_secs()
|
||||
),
|
||||
)
|
||||
} else {
|
||||
std::io::Error::new(std::io::ErrorKind::Other, err)
|
||||
std::io::Error::other(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1224,7 +1225,7 @@ mod tests {
|
|||
use uv_normalize::PackageName;
|
||||
use uv_pypi_types::{JoinRelativeError, SimpleJson};
|
||||
|
||||
use crate::{html::SimpleHtml, SimpleMetadata, SimpleMetadatum};
|
||||
use crate::{SimpleMetadata, SimpleMetadatum, html::SimpleHtml};
|
||||
|
||||
#[test]
|
||||
fn ignore_failing_files() {
|
||||
|
|
|
@ -12,12 +12,12 @@ serializing and deserializing.
|
|||
*/
|
||||
|
||||
use rkyv::{
|
||||
Archive, Deserialize, Portable, Serialize,
|
||||
api::high::{HighDeserializer, HighSerializer, HighValidator},
|
||||
bytecheck::CheckBytes,
|
||||
rancor,
|
||||
ser::allocator::ArenaHandle,
|
||||
util::AlignedVec,
|
||||
Archive, Deserialize, Portable, Serialize,
|
||||
};
|
||||
|
||||
use crate::{Error, ErrorKind};
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
use std::{
|
||||
collections::{btree_map::Entry, BTreeMap},
|
||||
collections::{BTreeMap, btree_map::Entry},
|
||||
str::FromStr,
|
||||
};
|
||||
use uv_cache_key::CacheKeyHasher;
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
use std::{borrow::Cow, sync::Arc};
|
||||
|
||||
use uv_normalize::{DefaultGroups, GroupName, DEV_DEPENDENCIES};
|
||||
use uv_normalize::{DEV_DEPENDENCIES, DefaultGroups, GroupName};
|
||||
|
||||
/// Manager of all dependency-group decisions and settings history.
|
||||
///
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
//! Configure rayon and determine thread stack sizes.
|
||||
|
||||
use std::sync::atomic::{AtomicUsize, Ordering};
|
||||
use std::sync::LazyLock;
|
||||
use std::sync::atomic::{AtomicUsize, Ordering};
|
||||
use uv_static::EnvVars;
|
||||
|
||||
/// The default minimum stack size for uv threads.
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
[package]
|
||||
name = "uv-console"
|
||||
version = "0.0.1"
|
||||
edition = "2021"
|
||||
edition = { workspace = true }
|
||||
description = "Utilities for interacting with the terminal"
|
||||
|
||||
[lib]
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
use console::{measure_text_width, style, Key, Term};
|
||||
use console::{Key, Term, measure_text_width, style};
|
||||
use std::{cmp::Ordering, iter};
|
||||
|
||||
/// Prompt the user for confirmation in the given [`Term`].
|
||||
|
|
|
@ -3,13 +3,13 @@ use std::cmp::max;
|
|||
use std::path::PathBuf;
|
||||
|
||||
use anstream::println;
|
||||
use anyhow::{bail, Result};
|
||||
use anyhow::{Result, bail};
|
||||
use clap::{Command, CommandFactory};
|
||||
use itertools::Itertools;
|
||||
use pretty_assertions::StrComparison;
|
||||
|
||||
use crate::generate_all::Mode;
|
||||
use crate::ROOT_DIR;
|
||||
use crate::generate_all::Mode;
|
||||
|
||||
use uv_cli::Cli;
|
||||
|
||||
|
@ -353,7 +353,7 @@ mod tests {
|
|||
|
||||
use crate::generate_all::Mode;
|
||||
|
||||
use super::{main, Args};
|
||||
use super::{Args, main};
|
||||
|
||||
#[test]
|
||||
fn test_generate_cli_reference() -> Result<()> {
|
||||
|
|
|
@ -7,8 +7,8 @@ use std::path::PathBuf;
|
|||
|
||||
use uv_static::EnvVars;
|
||||
|
||||
use crate::generate_all::Mode;
|
||||
use crate::ROOT_DIR;
|
||||
use crate::generate_all::Mode;
|
||||
|
||||
#[derive(clap::Args)]
|
||||
pub(crate) struct Args {
|
||||
|
@ -113,7 +113,7 @@ mod tests {
|
|||
|
||||
use crate::generate_all::Mode;
|
||||
|
||||
use super::{main, Args};
|
||||
use super::{Args, main};
|
||||
|
||||
#[test]
|
||||
fn test_generate_env_vars_reference() -> Result<()> {
|
||||
|
|
|
@ -1,16 +1,16 @@
|
|||
use std::path::PathBuf;
|
||||
|
||||
use anstream::println;
|
||||
use anyhow::{bail, Result};
|
||||
use anyhow::{Result, bail};
|
||||
use pretty_assertions::StrComparison;
|
||||
use schemars::{schema_for, JsonSchema};
|
||||
use schemars::{JsonSchema, schema_for};
|
||||
use serde::Deserialize;
|
||||
|
||||
use uv_settings::Options as SettingsOptions;
|
||||
use uv_workspace::pyproject::ToolUv as WorkspaceOptions;
|
||||
|
||||
use crate::generate_all::Mode;
|
||||
use crate::ROOT_DIR;
|
||||
use crate::generate_all::Mode;
|
||||
|
||||
#[derive(Deserialize, JsonSchema)]
|
||||
#[serde(deny_unknown_fields)]
|
||||
|
@ -118,7 +118,7 @@ mod tests {
|
|||
|
||||
use crate::generate_all::Mode;
|
||||
|
||||
use super::{main, Args};
|
||||
use super::{Args, main};
|
||||
|
||||
#[test]
|
||||
fn test_generate_json_schema() -> Result<()> {
|
||||
|
|
|
@ -5,7 +5,7 @@ use std::fmt::Write;
|
|||
use std::path::PathBuf;
|
||||
|
||||
use anstream::println;
|
||||
use anyhow::{bail, Result};
|
||||
use anyhow::{Result, bail};
|
||||
use itertools::Itertools;
|
||||
use pretty_assertions::StrComparison;
|
||||
use schemars::JsonSchema;
|
||||
|
@ -16,8 +16,8 @@ use uv_options_metadata::{OptionField, OptionSet, OptionsMetadata, Visit};
|
|||
use uv_settings::Options as SettingsOptions;
|
||||
use uv_workspace::pyproject::ToolUv as WorkspaceOptions;
|
||||
|
||||
use crate::generate_all::Mode;
|
||||
use crate::ROOT_DIR;
|
||||
use crate::generate_all::Mode;
|
||||
|
||||
#[derive(Deserialize, JsonSchema, OptionsMetadata)]
|
||||
#[serde(deny_unknown_fields)]
|
||||
|
@ -398,7 +398,7 @@ mod tests {
|
|||
|
||||
use crate::generate_all::Mode;
|
||||
|
||||
use super::{main, Args};
|
||||
use super::{Args, main};
|
||||
|
||||
#[test]
|
||||
fn test_generate_options_reference() -> Result<()> {
|
||||
|
|
|
@ -7,8 +7,8 @@ use std::time::Instant;
|
|||
use anstream::eprintln;
|
||||
use owo_colors::OwoColorize;
|
||||
use tracing::debug;
|
||||
use tracing_durations_export::plot::PlotConfig;
|
||||
use tracing_durations_export::DurationsLayerBuilder;
|
||||
use tracing_durations_export::plot::PlotConfig;
|
||||
use tracing_subscriber::filter::Directive;
|
||||
use tracing_subscriber::layer::SubscriberExt;
|
||||
use tracing_subscriber::util::SubscriberInitExt;
|
||||
|
|
|
@ -2,10 +2,10 @@
|
|||
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
use anyhow::{anyhow, Result};
|
||||
use anyhow::{Result, anyhow};
|
||||
use clap::Parser;
|
||||
use poloto::build;
|
||||
use resvg::usvg_text_layout::{fontdb, TreeTextToPath};
|
||||
use resvg::usvg_text_layout::{TreeTextToPath, fontdb};
|
||||
use serde::Deserialize;
|
||||
use tagu::prelude::*;
|
||||
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
use std::str::FromStr;
|
||||
|
||||
use anstream::println;
|
||||
use anyhow::{bail, Result};
|
||||
use anyhow::{Result, bail};
|
||||
use clap::Parser;
|
||||
|
||||
use uv_cache::{Cache, CacheArgs};
|
||||
|
|
|
@ -86,11 +86,7 @@ pub fn legacy_user_state_dir() -> Option<PathBuf> {
|
|||
/// Return a [`PathBuf`] if the given [`OsString`] is an absolute path.
|
||||
fn parse_path(path: OsString) -> Option<PathBuf> {
|
||||
let path = PathBuf::from(path);
|
||||
if path.is_absolute() {
|
||||
Some(path)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
if path.is_absolute() { Some(path) } else { None }
|
||||
}
|
||||
|
||||
/// Returns the path to the user configuration directory.
|
||||
|
|
|
@ -226,11 +226,13 @@ mod tests {
|
|||
|
||||
#[test]
|
||||
fn name_too_long() {
|
||||
assert!(SourceDistFilename::parse(
|
||||
"foo.zip",
|
||||
SourceDistExtension::Zip,
|
||||
&PackageName::from_str("foo-lib").unwrap()
|
||||
)
|
||||
.is_err());
|
||||
assert!(
|
||||
SourceDistFilename::parse(
|
||||
"foo.zip",
|
||||
SourceDistExtension::Zip,
|
||||
&PackageName::from_str("foo-lib").unwrap()
|
||||
)
|
||||
.is_err()
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -3,7 +3,7 @@ use std::hash::Hash;
|
|||
use std::str::FromStr;
|
||||
|
||||
use memchr::memchr;
|
||||
use serde::{de, Deserialize, Deserializer, Serialize, Serializer};
|
||||
use serde::{Deserialize, Deserializer, Serialize, Serializer, de};
|
||||
use thiserror::Error;
|
||||
use url::Url;
|
||||
|
||||
|
|
|
@ -12,6 +12,7 @@ use crate::{
|
|||
|
||||
/// A built distribution (wheel) that exists in the local cache.
|
||||
#[derive(Debug, Clone, Hash, PartialEq, Eq)]
|
||||
#[allow(clippy::large_enum_variant)]
|
||||
pub enum CachedDist {
|
||||
/// The distribution exists in a registry, like `PyPI`.
|
||||
Registry(CachedRegistryDist),
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
use std::collections::VecDeque;
|
||||
use std::fmt::{Debug, Display, Formatter};
|
||||
|
||||
use petgraph::prelude::EdgeRef;
|
||||
use petgraph::Direction;
|
||||
use petgraph::prelude::EdgeRef;
|
||||
use rustc_hash::FxHashSet;
|
||||
use version_ranges::Ranges;
|
||||
|
||||
|
|
|
@ -10,7 +10,7 @@ use rustc_hash::{FxHashMap, FxHashSet};
|
|||
use thiserror::Error;
|
||||
use url::{ParseError, Url};
|
||||
|
||||
use uv_pep508::{split_scheme, Scheme, VerbatimUrl, VerbatimUrlError};
|
||||
use uv_pep508::{Scheme, VerbatimUrl, VerbatimUrlError, split_scheme};
|
||||
|
||||
use crate::{Index, IndexStatusCodeStrategy, Verbatim};
|
||||
|
||||
|
|
|
@ -7,12 +7,12 @@ use thiserror::Error;
|
|||
use url::Url;
|
||||
|
||||
use uv_distribution_filename::DistExtension;
|
||||
use uv_fs::{relative_to, PortablePath, PortablePathBuf, CWD};
|
||||
use uv_fs::{CWD, PortablePath, PortablePathBuf, relative_to};
|
||||
use uv_git_types::{GitOid, GitReference, GitUrl, GitUrlParseError, OidParseError};
|
||||
use uv_normalize::{ExtraName, GroupName, PackageName};
|
||||
use uv_pep440::VersionSpecifiers;
|
||||
use uv_pep508::{
|
||||
marker, MarkerEnvironment, MarkerTree, RequirementOrigin, VerbatimUrl, VersionOrUrl,
|
||||
MarkerEnvironment, MarkerTree, RequirementOrigin, VerbatimUrl, VersionOrUrl, marker,
|
||||
};
|
||||
|
||||
use crate::{IndexMetadata, IndexUrl};
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
use uv_cache::{ArchiveId, Cache, ARCHIVE_VERSION};
|
||||
use uv_cache::{ARCHIVE_VERSION, ArchiveId, Cache};
|
||||
use uv_distribution_filename::WheelFilename;
|
||||
use uv_distribution_types::Hashed;
|
||||
use uv_pypi_types::{HashDigest, HashDigests};
|
||||
|
|
|
@ -10,7 +10,7 @@ use tempfile::TempDir;
|
|||
use tokio::io::{AsyncRead, AsyncSeekExt, ReadBuf};
|
||||
use tokio::sync::Semaphore;
|
||||
use tokio_util::compat::FuturesAsyncReadCompatExt;
|
||||
use tracing::{info_span, instrument, warn, Instrument};
|
||||
use tracing::{Instrument, info_span, instrument, warn};
|
||||
use url::Url;
|
||||
|
||||
use uv_cache::{ArchiveId, CacheBucket, CacheEntry, WheelCache};
|
||||
|
@ -97,7 +97,7 @@ impl<'a, Context: BuildContext> DistributionDatabase<'a, Context> {
|
|||
),
|
||||
)
|
||||
} else {
|
||||
io::Error::new(io::ErrorKind::Other, err)
|
||||
io::Error::other(err)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -647,7 +647,7 @@ impl<'a, Context: BuildContext> DistributionDatabase<'a, Context> {
|
|||
archive
|
||||
} else {
|
||||
self.client
|
||||
.managed(|client| async {
|
||||
.managed(async |client| {
|
||||
client
|
||||
.cached_client()
|
||||
.skip_cache_with_retry(self.request(url)?, &http_entry, download)
|
||||
|
@ -814,7 +814,7 @@ impl<'a, Context: BuildContext> DistributionDatabase<'a, Context> {
|
|||
archive
|
||||
} else {
|
||||
self.client
|
||||
.managed(|client| async {
|
||||
.managed(async |client| {
|
||||
client
|
||||
.cached_client()
|
||||
.skip_cache_with_retry(self.request(url)?, &http_entry, download)
|
||||
|
|
|
@ -8,9 +8,9 @@ use uv_distribution_types::{
|
|||
use uv_platform_tags::Tags;
|
||||
use uv_types::HashStrategy;
|
||||
|
||||
use crate::index::cached_wheel::CachedWheel;
|
||||
use crate::source::{HttpRevisionPointer, LocalRevisionPointer, HTTP_REVISION, LOCAL_REVISION};
|
||||
use crate::Error;
|
||||
use crate::index::cached_wheel::CachedWheel;
|
||||
use crate::source::{HTTP_REVISION, HttpRevisionPointer, LOCAL_REVISION, LocalRevisionPointer};
|
||||
|
||||
/// A local index of built distributions for a specific source distribution.
|
||||
#[derive(Debug)]
|
||||
|
|
|
@ -12,7 +12,7 @@ use uv_platform_tags::Tags;
|
|||
use uv_types::HashStrategy;
|
||||
|
||||
use crate::index::cached_wheel::CachedWheel;
|
||||
use crate::source::{HttpRevisionPointer, LocalRevisionPointer, HTTP_REVISION, LOCAL_REVISION};
|
||||
use crate::source::{HTTP_REVISION, HttpRevisionPointer, LOCAL_REVISION, LocalRevisionPointer};
|
||||
|
||||
/// An entry in the [`RegistryWheelIndex`].
|
||||
#[derive(Debug, Clone, Hash, PartialEq, Eq)]
|
||||
|
@ -64,7 +64,7 @@ impl<'a> RegistryWheelIndex<'a> {
|
|||
|
||||
/// Get an entry in the index.
|
||||
fn get_impl(&mut self, name: &'a PackageName) -> &[IndexEntry] {
|
||||
let versions = match self.index.entry(name) {
|
||||
(match self.index.entry(name) {
|
||||
Entry::Occupied(entry) => entry.into_mut(),
|
||||
Entry::Vacant(entry) => entry.insert(Self::index(
|
||||
name,
|
||||
|
@ -74,8 +74,7 @@ impl<'a> RegistryWheelIndex<'a> {
|
|||
self.hasher,
|
||||
self.build_configuration,
|
||||
)),
|
||||
};
|
||||
versions
|
||||
}) as _
|
||||
}
|
||||
|
||||
/// Add a package to the index by reading from the cache.
|
||||
|
|
|
@ -13,10 +13,10 @@ use uv_distribution_types::{
|
|||
use uv_git_types::{GitReference, GitUrl, GitUrlParseError};
|
||||
use uv_normalize::{ExtraName, GroupName, PackageName};
|
||||
use uv_pep440::VersionSpecifiers;
|
||||
use uv_pep508::{looks_like_git_repository, MarkerTree, VerbatimUrl, VersionOrUrl};
|
||||
use uv_pep508::{MarkerTree, VerbatimUrl, VersionOrUrl, looks_like_git_repository};
|
||||
use uv_pypi_types::{ConflictItem, ParsedUrlError, VerbatimParsedUrl};
|
||||
use uv_workspace::pyproject::{PyProjectToml, Source, Sources};
|
||||
use uv_workspace::Workspace;
|
||||
use uv_workspace::pyproject::{PyProjectToml, Source, Sources};
|
||||
|
||||
use crate::metadata::GitWorkspaceMember;
|
||||
|
||||
|
@ -285,8 +285,7 @@ impl LoweredRequirement {
|
|||
// relative to main workspace: `../current_workspace/packages/current_project`
|
||||
let url = VerbatimUrl::from_absolute_path(member.root())?;
|
||||
let install_path = url.to_file_path().map_err(|()| {
|
||||
LoweringError::RelativeTo(io::Error::new(
|
||||
io::ErrorKind::Other,
|
||||
LoweringError::RelativeTo(io::Error::other(
|
||||
"Invalid path in file URL",
|
||||
))
|
||||
})?;
|
||||
|
@ -689,12 +688,9 @@ fn path_source(
|
|||
RequirementOrigin::Workspace => workspace_root,
|
||||
};
|
||||
let url = VerbatimUrl::from_path(path, base)?.with_given(path.to_string_lossy());
|
||||
let install_path = url.to_file_path().map_err(|()| {
|
||||
LoweringError::RelativeTo(io::Error::new(
|
||||
io::ErrorKind::Other,
|
||||
"Invalid path in file URL",
|
||||
))
|
||||
})?;
|
||||
let install_path = url
|
||||
.to_file_path()
|
||||
.map_err(|()| LoweringError::RelativeTo(io::Error::other("Invalid path in file URL")))?;
|
||||
|
||||
let is_dir = if let Ok(metadata) = install_path.metadata() {
|
||||
metadata.is_dir()
|
||||
|
|
|
@ -6,14 +6,14 @@ use rustc_hash::FxHashSet;
|
|||
|
||||
use uv_configuration::SourceStrategy;
|
||||
use uv_distribution_types::{IndexLocations, Requirement};
|
||||
use uv_normalize::{ExtraName, GroupName, PackageName, DEV_DEPENDENCIES};
|
||||
use uv_normalize::{DEV_DEPENDENCIES, ExtraName, GroupName, PackageName};
|
||||
use uv_pep508::MarkerTree;
|
||||
use uv_workspace::dependency_groups::FlatDependencyGroups;
|
||||
use uv_workspace::pyproject::{Sources, ToolUvSources};
|
||||
use uv_workspace::{DiscoveryOptions, MemberDiscovery, ProjectWorkspace, WorkspaceCache};
|
||||
|
||||
use crate::metadata::{GitWorkspaceMember, LoweredRequirement, MetadataError};
|
||||
use crate::Metadata;
|
||||
use crate::metadata::{GitWorkspaceMember, LoweredRequirement, MetadataError};
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct RequiresDist {
|
||||
|
@ -466,8 +466,8 @@ mod test {
|
|||
use uv_workspace::pyproject::PyProjectToml;
|
||||
use uv_workspace::{DiscoveryOptions, ProjectWorkspace, WorkspaceCache};
|
||||
|
||||
use crate::metadata::requires_dist::FlatRequiresDist;
|
||||
use crate::RequiresDist;
|
||||
use crate::metadata::requires_dist::FlatRequiresDist;
|
||||
|
||||
async fn requires_dist_from_pyproject_toml(contents: &str) -> anyhow::Result<RequiresDist> {
|
||||
let pyproject_toml = PyProjectToml::from_string(contents.to_string())?;
|
||||
|
|
|
@ -18,7 +18,7 @@ use fs_err::tokio as fs;
|
|||
use futures::{FutureExt, TryStreamExt};
|
||||
use reqwest::{Response, StatusCode};
|
||||
use tokio_util::compat::FuturesAsyncReadCompatExt;
|
||||
use tracing::{debug, info_span, instrument, warn, Instrument};
|
||||
use tracing::{Instrument, debug, info_span, instrument, warn};
|
||||
use url::Url;
|
||||
use zip::ZipArchive;
|
||||
|
||||
|
@ -39,7 +39,7 @@ use uv_fs::{rename_with_retry, write_atomic};
|
|||
use uv_git_types::{GitHubRepository, GitOid};
|
||||
use uv_metadata::read_archive_metadata;
|
||||
use uv_normalize::PackageName;
|
||||
use uv_pep440::{release_specifiers_to_ranges, Version};
|
||||
use uv_pep440::{Version, release_specifiers_to_ranges};
|
||||
use uv_platform_tags::Tags;
|
||||
use uv_pypi_types::{HashAlgorithm, HashDigest, HashDigests, PyProjectToml, ResolutionMetadata};
|
||||
use uv_types::{BuildContext, BuildStack, SourceBuildTrait};
|
||||
|
@ -736,7 +736,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
|
|||
Ok(revision)
|
||||
} else {
|
||||
client
|
||||
.managed(|client| async move {
|
||||
.managed(async |client| {
|
||||
client
|
||||
.cached_client()
|
||||
.skip_cache_with_retry(
|
||||
|
@ -1925,7 +1925,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
|
|||
debug!("Attempting to fetch `pyproject.toml` from: {url}");
|
||||
|
||||
let content = client
|
||||
.managed(|client| async {
|
||||
.managed(async |client| {
|
||||
let response = client
|
||||
.uncached_client(git.repository())
|
||||
.get(&url)
|
||||
|
@ -2073,7 +2073,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
|
|||
.instrument(info_span!("download", source_dist = %source))
|
||||
};
|
||||
client
|
||||
.managed(|client| async move {
|
||||
.managed(async |client| {
|
||||
client
|
||||
.cached_client()
|
||||
.skip_cache_with_retry(
|
||||
|
@ -2107,7 +2107,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
|
|||
.map_err(Error::CacheWrite)?;
|
||||
let reader = response
|
||||
.bytes_stream()
|
||||
.map_err(|err| std::io::Error::new(std::io::ErrorKind::Other, err))
|
||||
.map_err(std::io::Error::other)
|
||||
.into_async_read();
|
||||
|
||||
// Create a hasher for each hash algorithm.
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
use std::path::{Path, PathBuf};
|
||||
use std::sync::{LazyLock, Mutex};
|
||||
|
||||
use crate::vendor::{CloneableSeekableReader, HasLength};
|
||||
use crate::Error;
|
||||
use crate::vendor::{CloneableSeekableReader, HasLength};
|
||||
use rayon::prelude::*;
|
||||
use rustc_hash::FxHashSet;
|
||||
use tracing::warn;
|
||||
|
|
|
@ -255,7 +255,7 @@ pub async fn rename_with_retry(
|
|||
let from = from.as_ref();
|
||||
let to = to.as_ref();
|
||||
|
||||
let rename = || async { fs_err::rename(from, to) };
|
||||
let rename = async || fs_err::rename(from, to);
|
||||
|
||||
rename
|
||||
.retry(backoff_file_move())
|
||||
|
@ -312,16 +312,13 @@ pub fn with_retry_sync(
|
|||
})
|
||||
.call()
|
||||
.map_err(|err| {
|
||||
std::io::Error::new(
|
||||
std::io::ErrorKind::Other,
|
||||
format!(
|
||||
"Failed {} {} to {}: {}",
|
||||
operation_name,
|
||||
from.display(),
|
||||
to.display(),
|
||||
err
|
||||
),
|
||||
)
|
||||
std::io::Error::other(format!(
|
||||
"Failed {} {} to {}: {}",
|
||||
operation_name,
|
||||
from.display(),
|
||||
to.display(),
|
||||
err
|
||||
))
|
||||
})
|
||||
}
|
||||
#[cfg(not(windows))]
|
||||
|
@ -417,21 +414,15 @@ pub async fn persist_with_retry(
|
|||
|
||||
match persisted {
|
||||
Ok(_) => Ok(()),
|
||||
Err(PersistRetryError::Persist(error_message)) => Err(std::io::Error::new(
|
||||
std::io::ErrorKind::Other,
|
||||
format!(
|
||||
"Failed to persist temporary file to {}: {}",
|
||||
to.display(),
|
||||
error_message,
|
||||
),
|
||||
)),
|
||||
Err(PersistRetryError::LostState) => Err(std::io::Error::new(
|
||||
std::io::ErrorKind::Other,
|
||||
format!(
|
||||
"Failed to retrieve temporary file while trying to persist to {}",
|
||||
to.display()
|
||||
),
|
||||
)),
|
||||
Err(PersistRetryError::Persist(error_message)) => Err(std::io::Error::other(format!(
|
||||
"Failed to persist temporary file to {}: {}",
|
||||
to.display(),
|
||||
error_message,
|
||||
))),
|
||||
Err(PersistRetryError::LostState) => Err(std::io::Error::other(format!(
|
||||
"Failed to retrieve temporary file while trying to persist to {}",
|
||||
to.display()
|
||||
))),
|
||||
}
|
||||
}
|
||||
#[cfg(not(windows))]
|
||||
|
@ -491,21 +482,15 @@ pub fn persist_with_retry_sync(
|
|||
|
||||
match persisted {
|
||||
Ok(_) => Ok(()),
|
||||
Err(PersistRetryError::Persist(error_message)) => Err(std::io::Error::new(
|
||||
std::io::ErrorKind::Other,
|
||||
format!(
|
||||
"Failed to persist temporary file to {}: {}",
|
||||
to.display(),
|
||||
error_message,
|
||||
),
|
||||
)),
|
||||
Err(PersistRetryError::LostState) => Err(std::io::Error::new(
|
||||
std::io::ErrorKind::Other,
|
||||
format!(
|
||||
"Failed to retrieve temporary file while trying to persist to {}",
|
||||
to.display()
|
||||
),
|
||||
)),
|
||||
Err(PersistRetryError::Persist(error_message)) => Err(std::io::Error::other(format!(
|
||||
"Failed to persist temporary file to {}: {}",
|
||||
to.display(),
|
||||
error_message,
|
||||
))),
|
||||
Err(PersistRetryError::LostState) => Err(std::io::Error::other(format!(
|
||||
"Failed to retrieve temporary file while trying to persist to {}",
|
||||
to.display()
|
||||
))),
|
||||
}
|
||||
}
|
||||
#[cfg(not(windows))]
|
||||
|
@ -617,14 +602,11 @@ impl LockedFile {
|
|||
);
|
||||
file.file().lock_exclusive().map_err(|err| {
|
||||
// Not an fs_err method, we need to build our own path context
|
||||
std::io::Error::new(
|
||||
std::io::ErrorKind::Other,
|
||||
format!(
|
||||
"Could not acquire lock for `{resource}` at `{}`: {}",
|
||||
file.path().user_display(),
|
||||
err
|
||||
),
|
||||
)
|
||||
std::io::Error::other(format!(
|
||||
"Could not acquire lock for `{resource}` at `{}`: {}",
|
||||
file.path().user_display(),
|
||||
err
|
||||
))
|
||||
})?;
|
||||
|
||||
debug!("Acquired lock for `{resource}`");
|
||||
|
|
|
@ -319,14 +319,11 @@ pub fn relative_to(
|
|||
.map(|stripped| (stripped, ancestor))
|
||||
})
|
||||
.ok_or_else(|| {
|
||||
std::io::Error::new(
|
||||
std::io::ErrorKind::Other,
|
||||
format!(
|
||||
"Trivial strip failed: {} vs. {}",
|
||||
path.simplified_display(),
|
||||
base.simplified_display()
|
||||
),
|
||||
)
|
||||
std::io::Error::other(format!(
|
||||
"Trivial strip failed: {} vs. {}",
|
||||
path.simplified_display(),
|
||||
base.simplified_display()
|
||||
))
|
||||
})?;
|
||||
|
||||
// go as many levels up as required
|
||||
|
|
|
@ -8,7 +8,7 @@ use std::str::{self};
|
|||
use std::sync::LazyLock;
|
||||
|
||||
use anyhow::{Context, Result};
|
||||
use cargo_util::{paths, ProcessBuilder};
|
||||
use cargo_util::{ProcessBuilder, paths};
|
||||
use reqwest::StatusCode;
|
||||
use reqwest_middleware::ClientWithMiddleware;
|
||||
use tracing::{debug, warn};
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
pub use crate::credentials::{store_credentials_from_url, GIT_STORE};
|
||||
pub use crate::credentials::{GIT_STORE, store_credentials_from_url};
|
||||
pub use crate::git::GIT;
|
||||
pub use crate::resolver::{
|
||||
GitResolver, GitResolverError, RepositoryReference, ResolvedRepositoryReference,
|
||||
|
|
|
@ -3,13 +3,13 @@ use std::path::PathBuf;
|
|||
use std::str::FromStr;
|
||||
use std::sync::Arc;
|
||||
|
||||
use dashmap::mapref::one::Ref;
|
||||
use dashmap::DashMap;
|
||||
use dashmap::mapref::one::Ref;
|
||||
use fs_err::tokio as fs;
|
||||
use reqwest_middleware::ClientWithMiddleware;
|
||||
use tracing::debug;
|
||||
|
||||
use uv_cache_key::{cache_digest, RepositoryUrl};
|
||||
use uv_cache_key::{RepositoryUrl, cache_digest};
|
||||
use uv_fs::LockedFile;
|
||||
use uv_git_types::{GitHubRepository, GitOid, GitReference, GitUrl};
|
||||
use uv_version::version;
|
||||
|
|
|
@ -11,12 +11,12 @@ use reqwest_middleware::ClientWithMiddleware;
|
|||
use tracing::{debug, instrument};
|
||||
use url::Url;
|
||||
|
||||
use uv_cache_key::{cache_digest, RepositoryUrl};
|
||||
use uv_cache_key::{RepositoryUrl, cache_digest};
|
||||
use uv_git_types::GitUrl;
|
||||
use uv_redacted::redacted_url;
|
||||
|
||||
use crate::git::GitRemote;
|
||||
use crate::GIT_STORE;
|
||||
use crate::git::GitRemote;
|
||||
|
||||
/// A remote Git source that can be checked out locally.
|
||||
pub struct GitSource {
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
use globset::{Glob, GlobSet, GlobSetBuilder};
|
||||
use regex_automata::dfa;
|
||||
use regex_automata::dfa::Automaton;
|
||||
use std::path::{Path, MAIN_SEPARATOR, MAIN_SEPARATOR_STR};
|
||||
use std::path::{MAIN_SEPARATOR, MAIN_SEPARATOR_STR, Path};
|
||||
use tracing::warn;
|
||||
|
||||
/// Chosen at a whim -Konsti
|
||||
|
@ -32,14 +32,13 @@ impl GlobDirFilter {
|
|||
.iter()
|
||||
.map(|glob| {
|
||||
let main_separator = regex::escape(MAIN_SEPARATOR_STR);
|
||||
let regex = glob
|
||||
.regex()
|
||||
|
||||
glob.regex()
|
||||
// We are using a custom DFA builder
|
||||
.strip_prefix("(?-u)")
|
||||
.expect("a glob is a non-unicode byte regex")
|
||||
// Match windows paths if applicable
|
||||
.replace('/', &main_separator);
|
||||
regex
|
||||
.replace('/', &main_separator)
|
||||
})
|
||||
.collect();
|
||||
|
||||
|
@ -123,9 +122,9 @@ impl GlobDirFilter {
|
|||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::glob_dir_filter::GlobDirFilter;
|
||||
use crate::PortableGlobParser;
|
||||
use std::path::{Path, MAIN_SEPARATOR};
|
||||
use crate::glob_dir_filter::GlobDirFilter;
|
||||
use std::path::{MAIN_SEPARATOR, Path};
|
||||
use tempfile::tempdir;
|
||||
use walkdir::WalkDir;
|
||||
|
||||
|
|
|
@ -15,8 +15,8 @@ use uv_pypi_types::{DirectUrl, Metadata10};
|
|||
|
||||
use crate::linker::{LinkMode, Locks};
|
||||
use crate::wheel::{
|
||||
dist_info_metadata, find_dist_info, install_data, parse_scripts, parse_wheel_file,
|
||||
read_record_file, write_installer_metadata, write_script_entrypoints, LibKind,
|
||||
LibKind, dist_info_metadata, find_dist_info, install_data, parse_scripts, parse_wheel_file,
|
||||
read_record_file, write_installer_metadata, write_script_entrypoints,
|
||||
};
|
||||
use crate::{Error, Layout};
|
||||
|
||||
|
|
|
@ -12,8 +12,8 @@ use uv_pypi_types::Scheme;
|
|||
|
||||
pub use install::install_wheel;
|
||||
pub use linker::{LinkMode, Locks};
|
||||
pub use uninstall::{uninstall_egg, uninstall_legacy_editable, uninstall_wheel, Uninstall};
|
||||
pub use wheel::{parse_wheel_file, read_record_file, LibKind};
|
||||
pub use uninstall::{Uninstall, uninstall_egg, uninstall_legacy_editable, uninstall_wheel};
|
||||
pub use wheel::{LibKind, parse_wheel_file, read_record_file};
|
||||
|
||||
mod install;
|
||||
mod linker;
|
||||
|
|
|
@ -4,7 +4,7 @@ use rustc_hash::FxHashSet;
|
|||
use serde::Serialize;
|
||||
use std::sync::LazyLock;
|
||||
|
||||
use crate::{wheel, Error};
|
||||
use crate::{Error, wheel};
|
||||
|
||||
/// A script defining the name of the runnable entrypoint and the module and function that should be
|
||||
/// run.
|
||||
|
@ -109,7 +109,7 @@ pub(crate) fn scripts_from_ini(
|
|||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use crate::script::{scripts_from_ini, Script};
|
||||
use crate::script::{Script, scripts_from_ini};
|
||||
|
||||
#[test]
|
||||
fn test_valid_script_names() {
|
||||
|
|
|
@ -6,8 +6,8 @@ use std::sync::{LazyLock, Mutex};
|
|||
use tracing::trace;
|
||||
use uv_fs::write_atomic_sync;
|
||||
|
||||
use crate::wheel::read_record_file;
|
||||
use crate::Error;
|
||||
use crate::wheel::read_record_file;
|
||||
|
||||
/// Uninstall the wheel represented by the given `.dist-info` directory.
|
||||
pub fn uninstall_wheel(dist_info: &Path) -> Result<Uninstall, Error> {
|
||||
|
@ -261,11 +261,7 @@ pub fn uninstall_legacy_editable(egg_link: &Path) -> Result<Uninstall, Error> {
|
|||
.lines()
|
||||
.find_map(|line| {
|
||||
let line = line.trim();
|
||||
if line.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(line)
|
||||
}
|
||||
if line.is_empty() { None } else { Some(line) }
|
||||
})
|
||||
.ok_or_else(|| Error::InvalidEggLink(egg_link.to_path_buf()))?;
|
||||
|
||||
|
|
|
@ -13,7 +13,7 @@ use tracing::{debug, instrument, trace, warn};
|
|||
use walkdir::WalkDir;
|
||||
|
||||
use uv_cache_info::CacheInfo;
|
||||
use uv_fs::{persist_with_retry_sync, relative_to, Simplified};
|
||||
use uv_fs::{Simplified, persist_with_retry_sync, relative_to};
|
||||
use uv_normalize::PackageName;
|
||||
use uv_pypi_types::DirectUrl;
|
||||
use uv_shell::escape_posix_for_single_quotes;
|
||||
|
@ -21,7 +21,7 @@ use uv_trampoline_builder::windows_script_launcher;
|
|||
use uv_warnings::warn_user_once;
|
||||
|
||||
use crate::record::RecordEntry;
|
||||
use crate::script::{scripts_from_ini, Script};
|
||||
use crate::script::{Script, scripts_from_ini};
|
||||
use crate::{Error, Layout};
|
||||
|
||||
/// Wrapper script template function
|
||||
|
@ -210,13 +210,10 @@ pub(crate) fn write_script_entrypoints(
|
|||
|
||||
let entrypoint_relative = pathdiff::diff_paths(&entrypoint_absolute, site_packages)
|
||||
.ok_or_else(|| {
|
||||
Error::Io(io::Error::new(
|
||||
io::ErrorKind::Other,
|
||||
format!(
|
||||
"Could not find relative path for: {}",
|
||||
entrypoint_absolute.simplified_display()
|
||||
),
|
||||
))
|
||||
Error::Io(io::Error::other(format!(
|
||||
"Could not find relative path for: {}",
|
||||
entrypoint_absolute.simplified_display()
|
||||
)))
|
||||
})?;
|
||||
|
||||
// Generate the launcher script.
|
||||
|
@ -407,13 +404,10 @@ fn install_script(
|
|||
let script_absolute = layout.scheme.scripts.join(file.file_name());
|
||||
let script_relative =
|
||||
pathdiff::diff_paths(&script_absolute, site_packages).ok_or_else(|| {
|
||||
Error::Io(io::Error::new(
|
||||
io::ErrorKind::Other,
|
||||
format!(
|
||||
"Could not find relative path for: {}",
|
||||
script_absolute.simplified_display()
|
||||
),
|
||||
))
|
||||
Error::Io(io::Error::other(format!(
|
||||
"Could not find relative path for: {}",
|
||||
script_absolute.simplified_display()
|
||||
)))
|
||||
})?;
|
||||
|
||||
let path = file.path();
|
||||
|
@ -723,13 +717,10 @@ pub(crate) fn get_relocatable_executable(
|
|||
) -> Result<PathBuf, Error> {
|
||||
Ok(if relocatable {
|
||||
pathdiff::diff_paths(&executable, &layout.scheme.scripts).ok_or_else(|| {
|
||||
Error::Io(io::Error::new(
|
||||
io::ErrorKind::Other,
|
||||
format!(
|
||||
"Could not find relative path for: {}",
|
||||
executable.simplified_display()
|
||||
),
|
||||
))
|
||||
Error::Io(io::Error::other(format!(
|
||||
"Could not find relative path for: {}",
|
||||
executable.simplified_display()
|
||||
)))
|
||||
})?
|
||||
} else {
|
||||
executable
|
||||
|
@ -896,12 +887,12 @@ mod test {
|
|||
use assert_fs::prelude::*;
|
||||
use indoc::{formatdoc, indoc};
|
||||
|
||||
use crate::wheel::format_shebang;
|
||||
use crate::Error;
|
||||
use crate::wheel::format_shebang;
|
||||
|
||||
use super::{
|
||||
get_script_executable, parse_email_message_file, parse_wheel_file, read_record_file,
|
||||
write_installer_metadata, RecordEntry, Script,
|
||||
RecordEntry, Script, get_script_executable, parse_email_message_file, parse_wheel_file,
|
||||
read_record_file, write_installer_metadata,
|
||||
};
|
||||
|
||||
#[test]
|
||||
|
|
|
@ -17,8 +17,10 @@ pub struct Installer<'a> {
|
|||
link_mode: LinkMode,
|
||||
cache: Option<&'a Cache>,
|
||||
reporter: Option<Arc<dyn Reporter>>,
|
||||
installer_name: Option<String>,
|
||||
installer_metadata: bool,
|
||||
/// The name of the [`Installer`].
|
||||
name: Option<String>,
|
||||
/// The metadata associated with the [`Installer`].
|
||||
metadata: bool,
|
||||
}
|
||||
|
||||
impl<'a> Installer<'a> {
|
||||
|
@ -29,8 +31,8 @@ impl<'a> Installer<'a> {
|
|||
link_mode: LinkMode::default(),
|
||||
cache: None,
|
||||
reporter: None,
|
||||
installer_name: Some("uv".to_string()),
|
||||
installer_metadata: true,
|
||||
name: Some("uv".to_string()),
|
||||
metadata: true,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -62,7 +64,7 @@ impl<'a> Installer<'a> {
|
|||
#[must_use]
|
||||
pub fn with_installer_name(self, installer_name: Option<String>) -> Self {
|
||||
Self {
|
||||
installer_name,
|
||||
name: installer_name,
|
||||
..self
|
||||
}
|
||||
}
|
||||
|
@ -71,7 +73,7 @@ impl<'a> Installer<'a> {
|
|||
#[must_use]
|
||||
pub fn with_installer_metadata(self, installer_metadata: bool) -> Self {
|
||||
Self {
|
||||
installer_metadata,
|
||||
metadata: installer_metadata,
|
||||
..self
|
||||
}
|
||||
}
|
||||
|
@ -84,8 +86,8 @@ impl<'a> Installer<'a> {
|
|||
cache,
|
||||
link_mode,
|
||||
reporter,
|
||||
installer_name,
|
||||
installer_metadata,
|
||||
name: installer_name,
|
||||
metadata: installer_metadata,
|
||||
} = self;
|
||||
|
||||
if cache.is_some_and(Cache::is_temporary) {
|
||||
|
@ -136,11 +138,11 @@ impl<'a> Installer<'a> {
|
|||
install(
|
||||
wheels,
|
||||
self.venv.interpreter().layout(),
|
||||
self.installer_name,
|
||||
self.name,
|
||||
self.link_mode,
|
||||
self.reporter,
|
||||
self.venv.relocatable(),
|
||||
self.installer_metadata,
|
||||
self.metadata,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
pub use compile::{compile_tree, CompileError};
|
||||
pub use compile::{CompileError, compile_tree};
|
||||
pub use installer::{Installer, Reporter as InstallReporter};
|
||||
pub use plan::{Plan, Planner};
|
||||
pub use preparer::{Error as PrepareError, Preparer, Reporter as PrepareReporter};
|
||||
pub use site_packages::{SatisfiesResult, SitePackages, SitePackagesDiagnostic};
|
||||
pub use uninstall::{uninstall, UninstallError};
|
||||
pub use uninstall::{UninstallError, uninstall};
|
||||
|
||||
mod compile;
|
||||
mod preparer;
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
use anyhow::{bail, Result};
|
||||
use anyhow::{Result, bail};
|
||||
use std::sync::Arc;
|
||||
use tracing::{debug, warn};
|
||||
|
||||
|
@ -18,8 +18,8 @@ use uv_pypi_types::VerbatimParsedUrl;
|
|||
use uv_python::PythonEnvironment;
|
||||
use uv_types::HashStrategy;
|
||||
|
||||
use crate::satisfies::RequirementSatisfaction;
|
||||
use crate::SitePackages;
|
||||
use crate::satisfies::RequirementSatisfaction;
|
||||
|
||||
/// A planner to generate an [`Plan`] based on a set of requirements.
|
||||
#[derive(Debug)]
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
use std::cmp::Reverse;
|
||||
use std::sync::Arc;
|
||||
|
||||
use futures::{stream::FuturesUnordered, FutureExt, Stream, TryFutureExt, TryStreamExt};
|
||||
use futures::{FutureExt, Stream, TryFutureExt, TryStreamExt, stream::FuturesUnordered};
|
||||
use tracing::{debug, instrument};
|
||||
use url::Url;
|
||||
|
||||
|
@ -70,7 +70,7 @@ impl<'a, Context: BuildContext> Preparer<'a, Context> {
|
|||
) -> impl Stream<Item = Result<CachedDist, Error>> + 'stream {
|
||||
distributions
|
||||
.into_iter()
|
||||
.map(|dist| async move {
|
||||
.map(async |dist| {
|
||||
let wheel = self
|
||||
.get_wheel((*dist).clone(), in_flight, resolution)
|
||||
.boxed_local()
|
||||
|
|
|
@ -25,8 +25,7 @@ impl RequirementSatisfaction {
|
|||
pub(crate) fn check(distribution: &InstalledDist, source: &RequirementSource) -> Self {
|
||||
trace!(
|
||||
"Comparing installed with source: {:?} {:?}",
|
||||
distribution,
|
||||
source
|
||||
distribution, source
|
||||
);
|
||||
// Filter out already-installed packages.
|
||||
match source {
|
||||
|
@ -194,8 +193,7 @@ impl RequirementSatisfaction {
|
|||
{
|
||||
trace!(
|
||||
"Path mismatch: {:?} vs. {:?}",
|
||||
requested_path,
|
||||
installed_path,
|
||||
requested_path, installed_path,
|
||||
);
|
||||
return Self::Mismatch;
|
||||
}
|
||||
|
@ -264,8 +262,7 @@ impl RequirementSatisfaction {
|
|||
{
|
||||
trace!(
|
||||
"Path mismatch: {:?} vs. {:?}",
|
||||
requested_path,
|
||||
installed_path,
|
||||
requested_path, installed_path,
|
||||
);
|
||||
return Self::Mismatch;
|
||||
}
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
[package]
|
||||
name = "uv-macros"
|
||||
version = "0.0.1"
|
||||
edition = "2021"
|
||||
edition = { workspace = true }
|
||||
|
||||
[lib]
|
||||
proc-macro = true
|
||||
|
|
|
@ -2,7 +2,7 @@ mod options_metadata;
|
|||
|
||||
use proc_macro::TokenStream;
|
||||
use quote::quote;
|
||||
use syn::{parse_macro_input, Attribute, DeriveInput, ImplItem, ItemImpl, LitStr};
|
||||
use syn::{Attribute, DeriveInput, ImplItem, ItemImpl, LitStr, parse_macro_input};
|
||||
|
||||
#[proc_macro_derive(OptionsMetadata, attributes(option, doc, option_group))]
|
||||
pub fn derive_options_metadata(input: TokenStream) -> TokenStream {
|
||||
|
@ -38,7 +38,7 @@ fn impl_combine(ast: &DeriveInput) -> TokenStream {
|
|||
}
|
||||
});
|
||||
|
||||
let gen = quote! {
|
||||
let stream = quote! {
|
||||
impl crate::Combine for #name {
|
||||
fn combine(self, other: #name) -> #name {
|
||||
#name {
|
||||
|
@ -47,7 +47,7 @@ fn impl_combine(ast: &DeriveInput) -> TokenStream {
|
|||
}
|
||||
}
|
||||
};
|
||||
gen.into()
|
||||
stream.into()
|
||||
}
|
||||
|
||||
fn get_doc_comment(attrs: &[Attribute]) -> String {
|
||||
|
|
|
@ -301,15 +301,24 @@ fn parse_field_attributes(attribute: &Attribute) -> syn::Result<FieldAttributes>
|
|||
})?;
|
||||
|
||||
let Some(default) = default else {
|
||||
return Err(syn::Error::new(attribute.span(), "Mandatory `default` field is missing in `#[option]` attribute. Specify the default using `#[option(default=\"..\")]`."));
|
||||
return Err(syn::Error::new(
|
||||
attribute.span(),
|
||||
"Mandatory `default` field is missing in `#[option]` attribute. Specify the default using `#[option(default=\"..\")]`.",
|
||||
));
|
||||
};
|
||||
|
||||
let Some(value_type) = value_type else {
|
||||
return Err(syn::Error::new(attribute.span(), "Mandatory `value_type` field is missing in `#[option]` attribute. Specify the value type using `#[option(value_type=\"..\")]`."));
|
||||
return Err(syn::Error::new(
|
||||
attribute.span(),
|
||||
"Mandatory `value_type` field is missing in `#[option]` attribute. Specify the value type using `#[option(value_type=\"..\")]`.",
|
||||
));
|
||||
};
|
||||
|
||||
let Some(example) = example else {
|
||||
return Err(syn::Error::new(attribute.span(), "Mandatory `example` field is missing in `#[option]` attribute. Add an example using `#[option(example=\"..\")]`."));
|
||||
return Err(syn::Error::new(
|
||||
attribute.span(),
|
||||
"Mandatory `example` field is missing in `#[option]` attribute. Add an example using `#[option(example=\"..\")]`.",
|
||||
));
|
||||
};
|
||||
|
||||
Ok(FieldAttributes {
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
[package]
|
||||
name = "uv-normalize"
|
||||
version = "0.0.1"
|
||||
edition = "2021"
|
||||
edition = { workspace = true }
|
||||
description = "Normalization for distribution, package and extra names."
|
||||
|
||||
[lib]
|
||||
|
|
|
@ -7,7 +7,7 @@ use serde::{Deserialize, Deserializer, Serialize};
|
|||
|
||||
use uv_small_str::SmallString;
|
||||
|
||||
use crate::{validate_and_normalize_ref, InvalidNameError};
|
||||
use crate::{InvalidNameError, validate_and_normalize_ref};
|
||||
|
||||
/// Either the literal "all" or a list of extras
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)]
|
||||
|
|
|
@ -9,7 +9,7 @@ use serde::{Deserialize, Deserializer, Serialize, Serializer};
|
|||
use uv_small_str::SmallString;
|
||||
|
||||
use crate::{
|
||||
validate_and_normalize_ref, InvalidNameError, InvalidPipGroupError, InvalidPipGroupPathError,
|
||||
InvalidNameError, InvalidPipGroupError, InvalidPipGroupPathError, validate_and_normalize_ref,
|
||||
};
|
||||
|
||||
/// The normalized name of a dependency group.
|
||||
|
|
|
@ -3,7 +3,7 @@ use std::fmt::{Display, Formatter};
|
|||
|
||||
pub use dist_info_name::DistInfoName;
|
||||
pub use extra_name::{DefaultExtras, ExtraName};
|
||||
pub use group_name::{DefaultGroups, GroupName, PipGroupName, DEV_DEPENDENCIES};
|
||||
pub use group_name::{DEV_DEPENDENCIES, DefaultGroups, GroupName, PipGroupName};
|
||||
pub use package_name::PackageName;
|
||||
|
||||
use uv_small_str::SmallString;
|
||||
|
|
|
@ -6,7 +6,7 @@ use serde::{Deserialize, Deserializer, Serialize};
|
|||
|
||||
use uv_small_str::SmallString;
|
||||
|
||||
use crate::{validate_and_normalize_ref, InvalidNameError};
|
||||
use crate::{InvalidNameError, validate_and_normalize_ref};
|
||||
|
||||
/// The normalized name of a package.
|
||||
///
|
||||
|
@ -51,13 +51,11 @@ impl PackageName {
|
|||
owned_string.push('_');
|
||||
|
||||
// Iterate over the rest of the string.
|
||||
owned_string.extend(self.0[dash_position + 1..].chars().map(|character| {
|
||||
if character == '-' {
|
||||
'_'
|
||||
} else {
|
||||
character
|
||||
}
|
||||
}));
|
||||
owned_string.extend(
|
||||
self.0[dash_position + 1..]
|
||||
.chars()
|
||||
.map(|character| if character == '-' { '_' } else { character }),
|
||||
);
|
||||
|
||||
Cow::Owned(owned_string)
|
||||
} else {
|
||||
|
|
|
@ -25,13 +25,13 @@
|
|||
|
||||
#[cfg(feature = "version-ranges")]
|
||||
pub use version_ranges::{
|
||||
release_specifier_to_range, release_specifiers_to_ranges, LowerBound, UpperBound,
|
||||
LowerBound, UpperBound, release_specifier_to_range, release_specifiers_to_ranges,
|
||||
};
|
||||
pub use {
|
||||
version::{
|
||||
LocalSegment, LocalVersion, LocalVersionSlice, Operator, OperatorParseError, Prerelease,
|
||||
PrereleaseKind, Version, VersionParseError, VersionPattern, VersionPatternParseError,
|
||||
MIN_VERSION,
|
||||
LocalSegment, LocalVersion, LocalVersionSlice, MIN_VERSION, Operator, OperatorParseError,
|
||||
Prerelease, PrereleaseKind, Version, VersionParseError, VersionPattern,
|
||||
VersionPatternParseError,
|
||||
},
|
||||
version_specifier::{
|
||||
VersionSpecifier, VersionSpecifierBuildError, VersionSpecifiers,
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
use serde::{de, Deserialize, Deserializer, Serialize, Serializer};
|
||||
use serde::{Deserialize, Deserializer, Serialize, Serializer, de};
|
||||
use std::fmt::Formatter;
|
||||
use std::num::NonZero;
|
||||
use std::ops::Deref;
|
||||
|
|
|
@ -5,9 +5,9 @@ use std::ops::Bound;
|
|||
use std::str::FromStr;
|
||||
|
||||
use crate::{
|
||||
version, Operator, OperatorParseError, Version, VersionPattern, VersionPatternParseError,
|
||||
Operator, OperatorParseError, Version, VersionPattern, VersionPatternParseError, version,
|
||||
};
|
||||
use serde::{de, Deserialize, Deserializer, Serialize, Serializer};
|
||||
use serde::{Deserialize, Deserializer, Serialize, Serializer, de};
|
||||
#[cfg(feature = "tracing")]
|
||||
use tracing::warn;
|
||||
|
||||
|
@ -828,15 +828,21 @@ mod tests {
|
|||
fn test_equal() {
|
||||
let version = Version::from_str("1.1.post1").unwrap();
|
||||
|
||||
assert!(!VersionSpecifier::from_str("== 1.1")
|
||||
.unwrap()
|
||||
.contains(&version));
|
||||
assert!(VersionSpecifier::from_str("== 1.1.post1")
|
||||
.unwrap()
|
||||
.contains(&version));
|
||||
assert!(VersionSpecifier::from_str("== 1.1.*")
|
||||
.unwrap()
|
||||
.contains(&version));
|
||||
assert!(
|
||||
!VersionSpecifier::from_str("== 1.1")
|
||||
.unwrap()
|
||||
.contains(&version)
|
||||
);
|
||||
assert!(
|
||||
VersionSpecifier::from_str("== 1.1.post1")
|
||||
.unwrap()
|
||||
.contains(&version)
|
||||
);
|
||||
assert!(
|
||||
VersionSpecifier::from_str("== 1.1.*")
|
||||
.unwrap()
|
||||
.contains(&version)
|
||||
);
|
||||
}
|
||||
|
||||
const VERSIONS_ALL: &[&str] = &[
|
||||
|
@ -1087,12 +1093,16 @@ mod tests {
|
|||
|
||||
#[test]
|
||||
fn test_arbitrary_equality() {
|
||||
assert!(VersionSpecifier::from_str("=== 1.2a1")
|
||||
.unwrap()
|
||||
.contains(&Version::from_str("1.2a1").unwrap()));
|
||||
assert!(!VersionSpecifier::from_str("=== 1.2a1")
|
||||
.unwrap()
|
||||
.contains(&Version::from_str("1.2a1+local").unwrap()));
|
||||
assert!(
|
||||
VersionSpecifier::from_str("=== 1.2a1")
|
||||
.unwrap()
|
||||
.contains(&Version::from_str("1.2a1").unwrap())
|
||||
);
|
||||
assert!(
|
||||
!VersionSpecifier::from_str("=== 1.2a1")
|
||||
.unwrap()
|
||||
.contains(&Version::from_str("1.2a1+local").unwrap())
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
|
|
@ -21,7 +21,7 @@ use std::fmt::{Debug, Display, Formatter};
|
|||
use std::path::Path;
|
||||
use std::str::FromStr;
|
||||
|
||||
use serde::{de, Deserialize, Deserializer, Serialize, Serializer};
|
||||
use serde::{Deserialize, Deserializer, Serialize, Serializer, de};
|
||||
use thiserror::Error;
|
||||
use url::Url;
|
||||
|
||||
|
@ -42,8 +42,8 @@ pub use uv_normalize::{ExtraName, InvalidNameError, PackageName};
|
|||
pub use uv_pep440;
|
||||
use uv_pep440::{VersionSpecifier, VersionSpecifiers};
|
||||
pub use verbatim_url::{
|
||||
expand_env_vars, looks_like_git_repository, split_scheme, strip_host, Scheme, VerbatimUrl,
|
||||
VerbatimUrlError,
|
||||
Scheme, VerbatimUrl, VerbatimUrlError, expand_env_vars, looks_like_git_repository,
|
||||
split_scheme, strip_host,
|
||||
};
|
||||
|
||||
mod cursor;
|
||||
|
@ -980,7 +980,7 @@ mod tests {
|
|||
use uv_pep440::{Operator, Version, VersionPattern, VersionSpecifier};
|
||||
|
||||
use crate::cursor::Cursor;
|
||||
use crate::marker::{parse, MarkerExpression, MarkerTree, MarkerValueVersion};
|
||||
use crate::marker::{MarkerExpression, MarkerTree, MarkerValueVersion, parse};
|
||||
use crate::{
|
||||
MarkerOperator, MarkerValueString, Requirement, TracingReporter, VerbatimUrl, VersionOrUrl,
|
||||
};
|
||||
|
|
|
@ -55,12 +55,12 @@ use itertools::{Either, Itertools};
|
|||
use rustc_hash::FxHashMap;
|
||||
use version_ranges::Ranges;
|
||||
|
||||
use uv_pep440::{release_specifier_to_range, Operator, Version, VersionSpecifier};
|
||||
use uv_pep440::{Operator, Version, VersionSpecifier, release_specifier_to_range};
|
||||
|
||||
use crate::marker::MarkerValueExtra;
|
||||
use crate::marker::lowering::{
|
||||
CanonicalMarkerValueExtra, CanonicalMarkerValueString, CanonicalMarkerValueVersion,
|
||||
};
|
||||
use crate::marker::MarkerValueExtra;
|
||||
use crate::{
|
||||
ExtraOperator, MarkerExpression, MarkerOperator, MarkerValueString, MarkerValueVersion,
|
||||
};
|
||||
|
@ -153,11 +153,7 @@ impl InternerGuard<'_> {
|
|||
.entry(node.clone())
|
||||
.or_insert_with(|| NodeId::new(self.shared.nodes.push(node), false));
|
||||
|
||||
if flipped {
|
||||
id.not()
|
||||
} else {
|
||||
*id
|
||||
}
|
||||
if flipped { id.not() } else { *id }
|
||||
}
|
||||
|
||||
/// Returns a decision node for a single marker expression.
|
||||
|
@ -1708,7 +1704,7 @@ impl fmt::Debug for NodeId {
|
|||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::{NodeId, INTERNER};
|
||||
use super::{INTERNER, NodeId};
|
||||
use crate::MarkerExpression;
|
||||
|
||||
fn expr(s: &str) -> NodeId {
|
||||
|
|
|
@ -6,13 +6,13 @@ use std::str::FromStr;
|
|||
|
||||
use arcstr::ArcStr;
|
||||
use itertools::Itertools;
|
||||
use serde::{de, Deserialize, Deserializer, Serialize, Serializer};
|
||||
use serde::{Deserialize, Deserializer, Serialize, Serializer, de};
|
||||
use version_ranges::Ranges;
|
||||
|
||||
use uv_normalize::ExtraName;
|
||||
use uv_pep440::{Version, VersionParseError, VersionSpecifier};
|
||||
|
||||
use super::algebra::{Edges, NodeId, Variable, INTERNER};
|
||||
use super::algebra::{Edges, INTERNER, NodeId, Variable};
|
||||
use super::simplify;
|
||||
use crate::cursor::Cursor;
|
||||
use crate::marker::lowering::{
|
||||
|
@ -2967,9 +2967,11 @@ mod test {
|
|||
#[test]
|
||||
fn test_is_false() {
|
||||
assert!(m("python_version < '3.10' and python_version >= '3.10'").is_false());
|
||||
assert!(m("(python_version < '3.10' and python_version >= '3.10') \
|
||||
assert!(
|
||||
m("(python_version < '3.10' and python_version >= '3.10') \
|
||||
or (python_version < '3.9' and python_version >= '3.9')")
|
||||
.is_false());
|
||||
.is_false()
|
||||
);
|
||||
|
||||
assert!(!m("python_version < '3.10'").is_false());
|
||||
assert!(!m("python_version < '0'").is_false());
|
||||
|
@ -3226,11 +3228,13 @@ mod test {
|
|||
m("os_name == 'Linux'"),
|
||||
);
|
||||
|
||||
assert!(m("
|
||||
assert!(
|
||||
m("
|
||||
(os_name == 'Linux' and extra == 'foo')
|
||||
or (os_name != 'Linux' and extra == 'bar')")
|
||||
.without_extras()
|
||||
.is_true());
|
||||
.without_extras()
|
||||
.is_true()
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
m("os_name == 'Linux' and extra != 'foo'").without_extras(),
|
||||
|
@ -3259,11 +3263,13 @@ mod test {
|
|||
m("os_name == 'Linux' and extra == 'foo'").only_extras(),
|
||||
m("extra == 'foo'"),
|
||||
);
|
||||
assert!(m("
|
||||
assert!(
|
||||
m("
|
||||
(os_name == 'foo' and extra == 'foo')
|
||||
or (os_name == 'bar' and extra != 'foo')")
|
||||
.only_extras()
|
||||
.is_true());
|
||||
.only_extras()
|
||||
.is_true()
|
||||
);
|
||||
assert_eq!(
|
||||
m("
|
||||
(os_name == 'Linux' and extra == 'foo')
|
||||
|
|
|
@ -8,9 +8,9 @@ use uv_normalize::ExtraName;
|
|||
|
||||
use crate::marker::parse;
|
||||
use crate::{
|
||||
expand_env_vars, parse_extras_cursor, split_extras, split_scheme, strip_host, Cursor,
|
||||
MarkerEnvironment, MarkerTree, Pep508Error, Pep508ErrorSource, Pep508Url, Reporter,
|
||||
RequirementOrigin, Scheme, TracingReporter, VerbatimUrl, VerbatimUrlError,
|
||||
Cursor, MarkerEnvironment, MarkerTree, Pep508Error, Pep508ErrorSource, Pep508Url, Reporter,
|
||||
RequirementOrigin, Scheme, TracingReporter, VerbatimUrl, VerbatimUrlError, expand_env_vars,
|
||||
parse_extras_cursor, split_extras, split_scheme, strip_host,
|
||||
};
|
||||
|
||||
/// An extension over [`Pep508Url`] that also supports parsing unnamed requirements, namely paths.
|
||||
|
@ -19,7 +19,7 @@ use crate::{
|
|||
pub trait UnnamedRequirementUrl: Pep508Url {
|
||||
/// Parse a URL from a relative or absolute path.
|
||||
fn parse_path(path: impl AsRef<Path>, working_dir: impl AsRef<Path>)
|
||||
-> Result<Self, Self::Err>;
|
||||
-> Result<Self, Self::Err>;
|
||||
|
||||
/// Parse a URL from an absolute path.
|
||||
fn parse_absolute_path(path: impl AsRef<Path>) -> Result<Self, Self::Err>;
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
name = "uv-performance-memory-allocator"
|
||||
version = "0.1.0"
|
||||
publish = false
|
||||
edition = "2021"
|
||||
edition = "2024"
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
|
|
|
@ -236,8 +236,8 @@ pub enum ParseLanguageTagError {
|
|||
mod tests {
|
||||
use std::str::FromStr;
|
||||
|
||||
use crate::language_tag::ParseLanguageTagError;
|
||||
use crate::LanguageTag;
|
||||
use crate::language_tag::ParseLanguageTagError;
|
||||
|
||||
#[test]
|
||||
fn none() {
|
||||
|
|
|
@ -7,7 +7,7 @@ use std::{env, fmt, io};
|
|||
|
||||
use fs_err::tokio::File;
|
||||
use futures::TryStreamExt;
|
||||
use glob::{glob, GlobError, PatternError};
|
||||
use glob::{GlobError, PatternError, glob};
|
||||
use itertools::Itertools;
|
||||
use reqwest::header::AUTHORIZATION;
|
||||
use reqwest::multipart::Part;
|
||||
|
@ -21,15 +21,15 @@ use thiserror::Error;
|
|||
use tokio::io::{AsyncReadExt, BufReader};
|
||||
use tokio::sync::Semaphore;
|
||||
use tokio_util::io::ReaderStream;
|
||||
use tracing::{debug, enabled, trace, warn, Level};
|
||||
use tracing::{Level, debug, enabled, trace, warn};
|
||||
use trusted_publishing::TrustedPublishingToken;
|
||||
use url::Url;
|
||||
|
||||
use uv_auth::Credentials;
|
||||
use uv_cache::{Cache, Refresh};
|
||||
use uv_client::{
|
||||
BaseClient, MetadataFormat, OwnedArchive, RegistryClientBuilder, UvRetryableStrategy,
|
||||
DEFAULT_RETRIES,
|
||||
BaseClient, DEFAULT_RETRIES, MetadataFormat, OwnedArchive, RegistryClientBuilder,
|
||||
UvRetryableStrategy,
|
||||
};
|
||||
use uv_configuration::{KeyringProviderType, TrustedPublishing};
|
||||
use uv_distribution_filename::{DistFilename, SourceDistExtension, SourceDistFilename};
|
||||
|
@ -243,6 +243,7 @@ impl PublishSendError {
|
|||
/// <https://github.com/astral-sh/uv/issues/8030> caused by
|
||||
/// <https://github.com/pypa/setuptools/issues/3777> in combination with
|
||||
/// <https://github.com/pypi/warehouse/blob/50a58f3081e693a3772c0283050a275e350004bf/warehouse/forklift/legacy.py#L1133-L1155>
|
||||
#[allow(clippy::result_large_err)]
|
||||
pub fn files_for_publishing(
|
||||
paths: Vec<String>,
|
||||
) -> Result<Vec<(PathBuf, String, DistFilename)>, PublishError> {
|
||||
|
@ -585,7 +586,7 @@ async fn source_dist_pkg_info(file: &Path) -> Result<Vec<u8>, PublishPrepareErro
|
|||
let mut pkg_infos: Vec<(PathBuf, Vec<u8>)> = archive
|
||||
.entries()?
|
||||
.map_err(PublishPrepareError::from)
|
||||
.try_filter_map(|mut entry| async move {
|
||||
.try_filter_map(async |mut entry| {
|
||||
let path = entry
|
||||
.path()
|
||||
.map_err(PublishPrepareError::from)?
|
||||
|
@ -883,7 +884,7 @@ async fn handle_response(registry: &Url, response: Response) -> Result<(), Publi
|
|||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::{build_request, form_metadata, Reporter};
|
||||
use crate::{Reporter, build_request, form_metadata};
|
||||
use insta::{assert_debug_snapshot, assert_snapshot};
|
||||
use itertools::Itertools;
|
||||
use std::path::PathBuf;
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
//! Trusted publishing (via OIDC) with GitHub actions.
|
||||
|
||||
use base64::prelude::BASE64_URL_SAFE_NO_PAD;
|
||||
use base64::Engine;
|
||||
use reqwest::{header, StatusCode};
|
||||
use base64::prelude::BASE64_URL_SAFE_NO_PAD;
|
||||
use reqwest::{StatusCode, header};
|
||||
use reqwest_middleware::ClientWithMiddleware;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::env;
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
use regex::Regex;
|
||||
use serde::{de, Deserialize, Deserializer, Serialize};
|
||||
use serde::{Deserialize, Deserializer, Serialize, de};
|
||||
use std::borrow::Cow;
|
||||
use std::str::FromStr;
|
||||
use std::sync::LazyLock;
|
||||
|
|
|
@ -2,8 +2,8 @@ use serde::Deserialize;
|
|||
|
||||
use uv_normalize::PackageName;
|
||||
|
||||
use crate::metadata::Headers;
|
||||
use crate::MetadataError;
|
||||
use crate::metadata::Headers;
|
||||
|
||||
/// A subset of the full core metadata specification, including only the
|
||||
/// fields that have been consistent across all versions of the specification.
|
||||
|
|
|
@ -5,8 +5,8 @@ use std::fmt::Write;
|
|||
use std::str;
|
||||
use std::str::FromStr;
|
||||
|
||||
use crate::metadata::Headers;
|
||||
use crate::MetadataError;
|
||||
use crate::metadata::Headers;
|
||||
|
||||
/// Code Metadata 2.3 as specified in
|
||||
/// <https://packaging.python.org/specifications/core-metadata/>.
|
||||
|
|
|
@ -11,9 +11,9 @@ use uv_pep440::{Version, VersionSpecifiers};
|
|||
use uv_pep508::Requirement;
|
||||
|
||||
use crate::lenient_requirement::LenientRequirement;
|
||||
use crate::metadata::pyproject_toml::PyProjectToml;
|
||||
use crate::metadata::Headers;
|
||||
use crate::{metadata, LenientVersionSpecifiers, MetadataError, VerbatimParsedUrl};
|
||||
use crate::metadata::pyproject_toml::PyProjectToml;
|
||||
use crate::{LenientVersionSpecifiers, MetadataError, VerbatimParsedUrl, metadata};
|
||||
|
||||
/// A subset of the full core metadata specification, including only the
|
||||
/// fields that are relevant to dependency resolution.
|
||||
|
|
|
@ -18,9 +18,9 @@ use uv_pep508::Pep508Error;
|
|||
use crate::VerbatimParsedUrl;
|
||||
|
||||
pub use build_requires::BuildRequires;
|
||||
pub use metadata_resolver::ResolutionMetadata;
|
||||
pub use metadata10::Metadata10;
|
||||
pub use metadata23::Metadata23;
|
||||
pub use metadata_resolver::ResolutionMetadata;
|
||||
pub use pyproject_toml::PyProjectToml;
|
||||
pub use requires_dist::RequiresDist;
|
||||
pub use requires_txt::RequiresTxt;
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
use std::str::FromStr;
|
||||
|
||||
use indexmap::IndexMap;
|
||||
use serde::de::IntoDeserializer;
|
||||
use serde::Deserialize;
|
||||
use serde::de::IntoDeserializer;
|
||||
|
||||
use uv_normalize::{ExtraName, PackageName};
|
||||
use uv_pep440::Version;
|
||||
|
|
|
@ -7,7 +7,7 @@ use url::{ParseError, Url};
|
|||
use uv_distribution_filename::{DistExtension, ExtensionError};
|
||||
use uv_git_types::{GitUrl, GitUrlParseError};
|
||||
use uv_pep508::{
|
||||
looks_like_git_repository, Pep508Url, UnnamedRequirementUrl, VerbatimUrl, VerbatimUrlError,
|
||||
Pep508Url, UnnamedRequirementUrl, VerbatimUrl, VerbatimUrlError, looks_like_git_repository,
|
||||
};
|
||||
|
||||
use crate::{ArchiveInfo, DirInfo, DirectUrl, VcsInfo, VcsKind};
|
||||
|
|
|
@ -11,11 +11,11 @@ use tracing::{debug, instrument, trace};
|
|||
use which::{which, which_all};
|
||||
|
||||
use uv_cache::Cache;
|
||||
use uv_fs::which::is_executable;
|
||||
use uv_fs::Simplified;
|
||||
use uv_fs::which::is_executable;
|
||||
use uv_pep440::{
|
||||
release_specifiers_to_ranges, LowerBound, Prerelease, UpperBound, Version, VersionSpecifier,
|
||||
VersionSpecifiers,
|
||||
LowerBound, Prerelease, UpperBound, Version, VersionSpecifier, VersionSpecifiers,
|
||||
release_specifiers_to_ranges,
|
||||
};
|
||||
use uv_static::EnvVars;
|
||||
use uv_warnings::warn_user_once;
|
||||
|
@ -30,11 +30,11 @@ use crate::managed::ManagedPythonInstallations;
|
|||
use crate::microsoft_store::find_microsoft_store_pythons;
|
||||
use crate::virtualenv::Error as VirtualEnvError;
|
||||
use crate::virtualenv::{
|
||||
conda_environment_from_env, virtualenv_from_env, virtualenv_from_working_dir,
|
||||
virtualenv_python_executable, CondaEnvironmentKind,
|
||||
CondaEnvironmentKind, conda_environment_from_env, virtualenv_from_env,
|
||||
virtualenv_from_working_dir, virtualenv_python_executable,
|
||||
};
|
||||
#[cfg(windows)]
|
||||
use crate::windows_registry::{registry_pythons, WindowsPython};
|
||||
use crate::windows_registry::{WindowsPython, registry_pythons};
|
||||
use crate::{BrokenSymlink, Interpreter, PythonVersion};
|
||||
|
||||
/// A request to find a Python installation.
|
||||
|
@ -251,8 +251,8 @@ pub enum Error {
|
|||
/// - Discovered virtual environment (e.g. `.venv` in a parent directory)
|
||||
///
|
||||
/// Notably, "system" environments are excluded. See [`python_executables_from_installed`].
|
||||
fn python_executables_from_virtual_environments<'a>(
|
||||
) -> impl Iterator<Item = Result<(PythonSource, PathBuf), Error>> + 'a {
|
||||
fn python_executables_from_virtual_environments<'a>()
|
||||
-> impl Iterator<Item = Result<(PythonSource, PathBuf), Error>> + 'a {
|
||||
let from_active_environment = iter::once_with(|| {
|
||||
virtualenv_from_env()
|
||||
.into_iter()
|
||||
|
@ -1252,8 +1252,8 @@ pub(crate) fn is_windows_store_shim(path: &Path) -> bool {
|
|||
CreateFileW, FILE_ATTRIBUTE_REPARSE_POINT, FILE_FLAG_BACKUP_SEMANTICS,
|
||||
FILE_FLAG_OPEN_REPARSE_POINT, MAXIMUM_REPARSE_DATA_BUFFER_SIZE, OPEN_EXISTING,
|
||||
};
|
||||
use windows_sys::Win32::System::Ioctl::FSCTL_GET_REPARSE_POINT;
|
||||
use windows_sys::Win32::System::IO::DeviceIoControl;
|
||||
use windows_sys::Win32::System::Ioctl::FSCTL_GET_REPARSE_POINT;
|
||||
|
||||
// The path must be absolute.
|
||||
if !path.is_absolute() {
|
||||
|
@ -2715,7 +2715,7 @@ fn split_wheel_tag_release_version(version: Version) -> Version {
|
|||
mod tests {
|
||||
use std::{path::PathBuf, str::FromStr};
|
||||
|
||||
use assert_fs::{prelude::*, TempDir};
|
||||
use assert_fs::{TempDir, prelude::*};
|
||||
use target_lexicon::{Aarch64Architecture, Architecture};
|
||||
use test_log::test;
|
||||
use uv_pep440::{Prerelease, PrereleaseKind, VersionSpecifiers};
|
||||
|
|
|
@ -21,13 +21,14 @@ use tokio_util::either::Either;
|
|||
use tracing::{debug, instrument};
|
||||
use url::Url;
|
||||
|
||||
use uv_client::{is_extended_transient_error, BaseClient, WrappedReqwestError};
|
||||
use uv_client::{BaseClient, WrappedReqwestError, is_extended_transient_error};
|
||||
use uv_distribution_filename::{ExtensionError, SourceDistExtension};
|
||||
use uv_extract::hash::Hasher;
|
||||
use uv_fs::{rename_with_retry, Simplified};
|
||||
use uv_fs::{Simplified, rename_with_retry};
|
||||
use uv_pypi_types::{HashAlgorithm, HashDigest};
|
||||
use uv_static::EnvVars;
|
||||
|
||||
use crate::PythonVariant;
|
||||
use crate::implementation::{
|
||||
Error as ImplementationError, ImplementationName, LenientImplementationName,
|
||||
};
|
||||
|
@ -35,7 +36,6 @@ use crate::installation::PythonInstallationKey;
|
|||
use crate::libc::LibcDetectionError;
|
||||
use crate::managed::ManagedPythonInstallation;
|
||||
use crate::platform::{self, Arch, Libc, Os};
|
||||
use crate::PythonVariant;
|
||||
use crate::{Interpreter, PythonRequest, PythonVersion, VersionRequest};
|
||||
|
||||
#[derive(Error, Debug)]
|
||||
|
@ -88,9 +88,7 @@ pub enum Error {
|
|||
InvalidRequestPlatform(#[from] platform::Error),
|
||||
#[error("No download found for request: {}", _0.green())]
|
||||
NoDownloadFound(PythonDownloadRequest),
|
||||
#[error(
|
||||
"A mirror was provided via `{0}`, but the URL does not match the expected format: {0}"
|
||||
)]
|
||||
#[error("A mirror was provided via `{0}`, but the URL does not match the expected format: {0}")]
|
||||
Mirror(&'static str, &'static str),
|
||||
#[error(transparent)]
|
||||
LibcDetection(#[from] LibcDetectionError),
|
||||
|
@ -1183,7 +1181,7 @@ async fn read_url(
|
|||
let size = response.content_length();
|
||||
let stream = response
|
||||
.bytes_stream()
|
||||
.map_err(|err| io::Error::new(io::ErrorKind::Other, err))
|
||||
.map_err(io::Error::other)
|
||||
.into_async_read();
|
||||
|
||||
Ok((Either::Right(stream.compat()), size))
|
||||
|
|
|
@ -14,7 +14,7 @@ use uv_pep440::Version;
|
|||
|
||||
use crate::discovery::find_python_installation;
|
||||
use crate::installation::PythonInstallation;
|
||||
use crate::virtualenv::{virtualenv_python_executable, PyVenvConfiguration};
|
||||
use crate::virtualenv::{PyVenvConfiguration, virtualenv_python_executable};
|
||||
use crate::{
|
||||
EnvironmentPreference, Error, Interpreter, Prefix, PythonNotFound, PythonPreference,
|
||||
PythonRequest, Target,
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue