mirror of
https://github.com/astral-sh/uv.git
synced 2025-08-04 02:48:17 +00:00
Use boxed slices for hash vector (#11714)
## Summary We never resize these, and they're stored everywhere (on `File`, etc.). Seems useful to use a more efficient structure for them.
This commit is contained in:
parent
d4a0096c14
commit
21f4b0863d
23 changed files with 264 additions and 139 deletions
|
@ -9,6 +9,7 @@ use uv_cache::{Cache, CacheBucket};
|
|||
use uv_cache_key::cache_digest;
|
||||
use uv_distribution_filename::DistFilename;
|
||||
use uv_distribution_types::{File, FileLocation, IndexUrl, UrlString};
|
||||
use uv_pypi_types::HashDigests;
|
||||
|
||||
use crate::cached_client::{CacheControl, CachedClientError};
|
||||
use crate::html::SimpleHtml;
|
||||
|
@ -283,7 +284,7 @@ impl<'a> FlatIndexClient<'a> {
|
|||
let file = File {
|
||||
dist_info_metadata: false,
|
||||
filename: filename.to_string(),
|
||||
hashes: Vec::new(),
|
||||
hashes: HashDigests::empty(),
|
||||
requires_python: None,
|
||||
size: None,
|
||||
upload_time_utc_ms: None,
|
||||
|
|
|
@ -3,7 +3,7 @@ use std::path::{Path, PathBuf};
|
|||
use uv_cache_info::CacheInfo;
|
||||
use uv_distribution_filename::WheelFilename;
|
||||
use uv_normalize::PackageName;
|
||||
use uv_pypi_types::{HashDigest, VerbatimParsedUrl};
|
||||
use uv_pypi_types::{HashDigest, HashDigests, VerbatimParsedUrl};
|
||||
|
||||
use crate::{
|
||||
BuiltDist, Dist, DistributionMetadata, Hashed, InstalledMetadata, InstalledVersion, Name,
|
||||
|
@ -23,7 +23,7 @@ pub enum CachedDist {
|
|||
pub struct CachedRegistryDist {
|
||||
pub filename: WheelFilename,
|
||||
pub path: PathBuf,
|
||||
pub hashes: Vec<HashDigest>,
|
||||
pub hashes: HashDigests,
|
||||
pub cache_info: CacheInfo,
|
||||
}
|
||||
|
||||
|
@ -32,7 +32,7 @@ pub struct CachedDirectUrlDist {
|
|||
pub filename: WheelFilename,
|
||||
pub url: VerbatimParsedUrl,
|
||||
pub path: PathBuf,
|
||||
pub hashes: Vec<HashDigest>,
|
||||
pub hashes: HashDigests,
|
||||
pub cache_info: CacheInfo,
|
||||
}
|
||||
|
||||
|
@ -41,7 +41,7 @@ impl CachedDist {
|
|||
pub fn from_remote(
|
||||
remote: Dist,
|
||||
filename: WheelFilename,
|
||||
hashes: Vec<HashDigest>,
|
||||
hashes: HashDigests,
|
||||
cache_info: CacheInfo,
|
||||
path: PathBuf,
|
||||
) -> Self {
|
||||
|
@ -156,7 +156,7 @@ impl CachedDist {
|
|||
|
||||
impl Hashed for CachedRegistryDist {
|
||||
fn hashes(&self) -> &[HashDigest] {
|
||||
&self.hashes
|
||||
self.hashes.as_slice()
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -7,7 +7,7 @@ use url::Url;
|
|||
|
||||
use uv_pep440::{VersionSpecifiers, VersionSpecifiersParseError};
|
||||
use uv_pep508::split_scheme;
|
||||
use uv_pypi_types::{CoreMetadata, HashDigest, Yanked};
|
||||
use uv_pypi_types::{CoreMetadata, HashDigests, Yanked};
|
||||
|
||||
/// Error converting [`uv_pypi_types::File`] to [`distribution_type::File`].
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
|
@ -24,7 +24,7 @@ pub enum FileConversionError {
|
|||
pub struct File {
|
||||
pub dist_info_metadata: bool,
|
||||
pub filename: String,
|
||||
pub hashes: Vec<HashDigest>,
|
||||
pub hashes: HashDigests,
|
||||
pub requires_python: Option<VersionSpecifiers>,
|
||||
pub size: Option<u64>,
|
||||
// N.B. We don't use a Jiff timestamp here because it's a little
|
||||
|
@ -47,7 +47,7 @@ impl File {
|
|||
.or(file.data_dist_info_metadata.as_ref())
|
||||
.is_some_and(CoreMetadata::is_available),
|
||||
filename: file.filename,
|
||||
hashes: file.hashes.into_digests(),
|
||||
hashes: HashDigests::from(file.hashes),
|
||||
requires_python: file
|
||||
.requires_python
|
||||
.transpose()
|
||||
|
|
|
@ -1127,19 +1127,19 @@ impl Identifier for Url {
|
|||
|
||||
impl Identifier for File {
|
||||
fn distribution_id(&self) -> DistributionId {
|
||||
if let Some(hash) = self.hashes.first() {
|
||||
DistributionId::Digest(hash.clone())
|
||||
} else {
|
||||
self.url.distribution_id()
|
||||
}
|
||||
self.hashes
|
||||
.first()
|
||||
.cloned()
|
||||
.map(DistributionId::Digest)
|
||||
.unwrap_or_else(|| self.url.distribution_id())
|
||||
}
|
||||
|
||||
fn resource_id(&self) -> ResourceId {
|
||||
if let Some(hash) = self.hashes.first() {
|
||||
ResourceId::Digest(hash.clone())
|
||||
} else {
|
||||
self.url.resource_id()
|
||||
}
|
||||
self.hashes
|
||||
.first()
|
||||
.cloned()
|
||||
.map(ResourceId::Digest)
|
||||
.unwrap_or_else(|| self.url.resource_id())
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
use uv_distribution_filename::DistExtension;
|
||||
use uv_normalize::{ExtraName, GroupName, PackageName};
|
||||
use uv_pep508::MarkerTree;
|
||||
use uv_pypi_types::{HashDigest, RequirementSource};
|
||||
use uv_pypi_types::{HashDigest, HashDigests, RequirementSource};
|
||||
|
||||
use crate::{BuiltDist, Diagnostic, Dist, Name, ResolvedDist, SourceDist};
|
||||
|
||||
|
@ -183,7 +183,7 @@ pub enum Node {
|
|||
Root,
|
||||
Dist {
|
||||
dist: ResolvedDist,
|
||||
hashes: Vec<HashDigest>,
|
||||
hashes: HashDigests,
|
||||
install: bool,
|
||||
},
|
||||
}
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
use uv_cache::{ArchiveId, Cache, ARCHIVE_VERSION};
|
||||
use uv_distribution_types::Hashed;
|
||||
use uv_pypi_types::HashDigest;
|
||||
use uv_pypi_types::{HashDigest, HashDigests};
|
||||
|
||||
/// An archive (unzipped wheel) that exists in the local cache.
|
||||
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
|
||||
|
@ -8,14 +8,14 @@ pub struct Archive {
|
|||
/// The unique ID of the entry in the wheel's archive bucket.
|
||||
pub id: ArchiveId,
|
||||
/// The computed hashes of the archive.
|
||||
pub hashes: Vec<HashDigest>,
|
||||
pub hashes: HashDigests,
|
||||
/// The version of the archive bucket.
|
||||
pub version: u8,
|
||||
}
|
||||
|
||||
impl Archive {
|
||||
/// Create a new [`Archive`] with the given ID and hashes.
|
||||
pub(crate) fn new(id: ArchiveId, hashes: Vec<HashDigest>) -> Self {
|
||||
pub(crate) fn new(id: ArchiveId, hashes: HashDigests) -> Self {
|
||||
Self {
|
||||
id,
|
||||
hashes,
|
||||
|
@ -31,6 +31,6 @@ impl Archive {
|
|||
|
||||
impl Hashed for Archive {
|
||||
fn hashes(&self) -> &[HashDigest] {
|
||||
&self.hashes
|
||||
self.hashes.as_slice()
|
||||
}
|
||||
}
|
||||
|
|
|
@ -26,7 +26,7 @@ use uv_distribution_types::{
|
|||
use uv_extract::hash::Hasher;
|
||||
use uv_fs::write_atomic;
|
||||
use uv_platform_tags::Tags;
|
||||
use uv_pypi_types::HashDigest;
|
||||
use uv_pypi_types::{HashDigest, HashDigests};
|
||||
use uv_types::{BuildContext, BuildStack};
|
||||
|
||||
use crate::archive::Archive;
|
||||
|
@ -719,7 +719,7 @@ impl<'a, Context: BuildContext> DistributionDatabase<'a, Context> {
|
|||
})
|
||||
.await??;
|
||||
|
||||
vec![]
|
||||
HashDigests::empty()
|
||||
} else {
|
||||
// Create a hasher for each hash algorithm.
|
||||
let algorithms = hashes.algorithms();
|
||||
|
@ -843,7 +843,10 @@ impl<'a, Context: BuildContext> DistributionDatabase<'a, Context> {
|
|||
})
|
||||
} else if hashes.is_none() {
|
||||
// Otherwise, unzip the wheel.
|
||||
let archive = Archive::new(self.unzip_wheel(path, wheel_entry.path()).await?, vec![]);
|
||||
let archive = Archive::new(
|
||||
self.unzip_wheel(path, wheel_entry.path()).await?,
|
||||
HashDigests::empty(),
|
||||
);
|
||||
|
||||
// Write the archive pointer to the cache.
|
||||
let pointer = LocalArchivePointer {
|
||||
|
|
|
@ -4,7 +4,7 @@ use crate::Error;
|
|||
use uv_distribution_filename::WheelFilename;
|
||||
use uv_distribution_types::{CachedDist, Dist, Hashed};
|
||||
use uv_metadata::read_flat_wheel_metadata;
|
||||
use uv_pypi_types::{HashDigest, ResolutionMetadata};
|
||||
use uv_pypi_types::{HashDigest, HashDigests, ResolutionMetadata};
|
||||
|
||||
use uv_cache_info::CacheInfo;
|
||||
|
||||
|
@ -21,7 +21,7 @@ pub struct LocalWheel {
|
|||
/// The cache index of the wheel.
|
||||
pub(crate) cache: CacheInfo,
|
||||
/// The computed hashes of the wheel.
|
||||
pub(crate) hashes: Vec<HashDigest>,
|
||||
pub(crate) hashes: HashDigests,
|
||||
}
|
||||
|
||||
impl LocalWheel {
|
||||
|
@ -49,7 +49,7 @@ impl LocalWheel {
|
|||
|
||||
impl Hashed for LocalWheel {
|
||||
fn hashes(&self) -> &[HashDigest] {
|
||||
&self.hashes
|
||||
self.hashes.as_slice()
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -7,7 +7,7 @@ use uv_distribution_types::{
|
|||
CachedDirectUrlDist, CachedRegistryDist, DirectUrlSourceDist, DirectorySourceDist,
|
||||
GitSourceDist, Hashed, PathSourceDist,
|
||||
};
|
||||
use uv_pypi_types::{HashDigest, VerbatimParsedUrl};
|
||||
use uv_pypi_types::{HashDigest, HashDigests, VerbatimParsedUrl};
|
||||
|
||||
use crate::archive::Archive;
|
||||
use crate::{HttpArchivePointer, LocalArchivePointer};
|
||||
|
@ -19,7 +19,7 @@ pub struct CachedWheel {
|
|||
/// The [`CacheEntry`] for the wheel.
|
||||
pub entry: CacheEntry,
|
||||
/// The [`HashDigest`]s for the wheel.
|
||||
pub hashes: Vec<HashDigest>,
|
||||
pub hashes: HashDigests,
|
||||
/// The [`CacheInfo`] for the wheel.
|
||||
pub cache_info: CacheInfo,
|
||||
}
|
||||
|
@ -36,7 +36,7 @@ impl CachedWheel {
|
|||
// Convert to a cached wheel.
|
||||
let archive = cache.resolve_link(path).ok()?;
|
||||
let entry = CacheEntry::from_path(archive);
|
||||
let hashes = Vec::new();
|
||||
let hashes = HashDigests::empty();
|
||||
let cache_info = CacheInfo::default();
|
||||
Some(Self {
|
||||
filename,
|
||||
|
@ -186,6 +186,6 @@ impl CachedWheel {
|
|||
|
||||
impl Hashed for CachedWheel {
|
||||
fn hashes(&self) -> &[HashDigest] {
|
||||
&self.hashes
|
||||
self.hashes.as_slice()
|
||||
}
|
||||
}
|
||||
|
|
|
@ -7,7 +7,7 @@ use uv_configuration::SourceStrategy;
|
|||
use uv_distribution_types::{GitSourceUrl, IndexLocations};
|
||||
use uv_normalize::{ExtraName, GroupName, PackageName};
|
||||
use uv_pep440::{Version, VersionSpecifiers};
|
||||
use uv_pypi_types::{HashDigest, ResolutionMetadata};
|
||||
use uv_pypi_types::{HashDigests, ResolutionMetadata};
|
||||
use uv_workspace::dependency_groups::DependencyGroupError;
|
||||
use uv_workspace::WorkspaceError;
|
||||
|
||||
|
@ -122,7 +122,7 @@ pub struct ArchiveMetadata {
|
|||
/// The [`Metadata`] for the underlying distribution.
|
||||
pub metadata: Metadata,
|
||||
/// The hashes of the source or built archive.
|
||||
pub hashes: Vec<HashDigest>,
|
||||
pub hashes: HashDigests,
|
||||
}
|
||||
|
||||
impl ArchiveMetadata {
|
||||
|
@ -131,12 +131,12 @@ impl ArchiveMetadata {
|
|||
pub fn from_metadata23(metadata: ResolutionMetadata) -> Self {
|
||||
Self {
|
||||
metadata: Metadata::from_metadata23(metadata),
|
||||
hashes: vec![],
|
||||
hashes: HashDigests::empty(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Create an [`ArchiveMetadata`] with the given metadata and hashes.
|
||||
pub fn with_hashes(metadata: Metadata, hashes: Vec<HashDigest>) -> Self {
|
||||
pub fn with_hashes(metadata: Metadata, hashes: HashDigests) -> Self {
|
||||
Self { metadata, hashes }
|
||||
}
|
||||
}
|
||||
|
@ -145,7 +145,7 @@ impl From<Metadata> for ArchiveMetadata {
|
|||
fn from(metadata: Metadata) -> Self {
|
||||
Self {
|
||||
metadata,
|
||||
hashes: vec![],
|
||||
hashes: HashDigests::empty(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -9,7 +9,7 @@ use uv_fs::files;
|
|||
use uv_normalize::PackageName;
|
||||
use uv_pep440::Version;
|
||||
use uv_platform_tags::Tags;
|
||||
use uv_pypi_types::HashDigest;
|
||||
use uv_pypi_types::{HashDigest, HashDigests};
|
||||
|
||||
/// The information about the wheel we either just built or got from the cache.
|
||||
#[derive(Debug, Clone)]
|
||||
|
@ -21,7 +21,7 @@ pub(crate) struct BuiltWheelMetadata {
|
|||
/// The parsed filename.
|
||||
pub(crate) filename: WheelFilename,
|
||||
/// The computed hashes of the source distribution from which the wheel was built.
|
||||
pub(crate) hashes: Vec<HashDigest>,
|
||||
pub(crate) hashes: HashDigests,
|
||||
/// The cache information for the underlying source distribution.
|
||||
pub(crate) cache_info: CacheInfo,
|
||||
}
|
||||
|
@ -49,12 +49,12 @@ impl BuiltWheelMetadata {
|
|||
path,
|
||||
filename,
|
||||
cache_info: CacheInfo::default(),
|
||||
hashes: vec![],
|
||||
hashes: HashDigests::empty(),
|
||||
})
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub(crate) fn with_hashes(mut self, hashes: Vec<HashDigest>) -> Self {
|
||||
pub(crate) fn with_hashes(mut self, hashes: HashDigests) -> Self {
|
||||
self.hashes = hashes;
|
||||
self
|
||||
}
|
||||
|
@ -68,6 +68,6 @@ impl BuiltWheelMetadata {
|
|||
|
||||
impl Hashed for BuiltWheelMetadata {
|
||||
fn hashes(&self) -> &[HashDigest] {
|
||||
&self.hashes
|
||||
self.hashes.as_slice()
|
||||
}
|
||||
}
|
||||
|
|
|
@ -41,7 +41,7 @@ use uv_metadata::read_archive_metadata;
|
|||
use uv_normalize::PackageName;
|
||||
use uv_pep440::{release_specifiers_to_ranges, Version};
|
||||
use uv_platform_tags::Tags;
|
||||
use uv_pypi_types::{HashAlgorithm, HashDigest, PyProjectToml, ResolutionMetadata};
|
||||
use uv_pypi_types::{HashAlgorithm, HashDigest, HashDigests, PyProjectToml, ResolutionMetadata};
|
||||
use uv_types::{BuildContext, BuildStack, SourceBuildTrait};
|
||||
use uv_workspace::pyproject::ToolUvSources;
|
||||
|
||||
|
@ -708,7 +708,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
|
|||
.download_archive(response, source, ext, entry.path(), &algorithms)
|
||||
.await?;
|
||||
|
||||
Ok(revision.with_hashes(hashes))
|
||||
Ok(revision.with_hashes(HashDigests::from(hashes)))
|
||||
}
|
||||
.boxed_local()
|
||||
.instrument(info_span!("download", source_dist = %source))
|
||||
|
@ -1037,7 +1037,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
|
|||
.await?;
|
||||
|
||||
// Include the hashes and cache info in the revision.
|
||||
let revision = revision.with_hashes(hashes);
|
||||
let revision = revision.with_hashes(HashDigests::from(hashes));
|
||||
|
||||
// Persist the revision.
|
||||
let pointer = LocalRevisionPointer {
|
||||
|
@ -1509,7 +1509,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
|
|||
path: cache_shard.join(&disk_filename),
|
||||
target: cache_shard.join(filename.stem()),
|
||||
filename,
|
||||
hashes: vec![],
|
||||
hashes: HashDigests::empty(),
|
||||
cache_info: CacheInfo::default(),
|
||||
})
|
||||
}
|
||||
|
@ -1561,7 +1561,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
|
|||
debug!("Found static metadata via GitHub fast path for: {source}");
|
||||
return Ok(ArchiveMetadata {
|
||||
metadata: Metadata::from_metadata23(metadata),
|
||||
hashes: vec![],
|
||||
hashes: HashDigests::empty(),
|
||||
});
|
||||
}
|
||||
Err(err) => {
|
||||
|
@ -1974,7 +1974,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
|
|||
return Err(Error::CacheHeal(source.to_string(), existing.algorithm()));
|
||||
}
|
||||
}
|
||||
Ok(revision.with_hashes(hashes))
|
||||
Ok(revision.with_hashes(HashDigests::from(hashes)))
|
||||
}
|
||||
|
||||
/// Heal a [`Revision`] for a remote archive.
|
||||
|
@ -2011,7 +2011,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
|
|||
return Err(Error::CacheHeal(source.to_string(), existing.algorithm()));
|
||||
}
|
||||
}
|
||||
Ok(revision.clone().with_hashes(hashes))
|
||||
Ok(revision.clone().with_hashes(HashDigests::from(hashes)))
|
||||
}
|
||||
.boxed_local()
|
||||
.instrument(info_span!("download", source_dist = %source))
|
||||
|
|
|
@ -2,7 +2,7 @@ use serde::{Deserialize, Serialize};
|
|||
use std::path::Path;
|
||||
use uv_distribution_types::Hashed;
|
||||
|
||||
use uv_pypi_types::HashDigest;
|
||||
use uv_pypi_types::{HashDigest, HashDigests};
|
||||
|
||||
/// The [`Revision`] is a thin wrapper around a unique identifier for the source distribution.
|
||||
///
|
||||
|
@ -13,7 +13,7 @@ use uv_pypi_types::HashDigest;
|
|||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub(crate) struct Revision {
|
||||
id: RevisionId,
|
||||
hashes: Vec<HashDigest>,
|
||||
hashes: HashDigests,
|
||||
}
|
||||
|
||||
impl Revision {
|
||||
|
@ -21,7 +21,7 @@ impl Revision {
|
|||
pub(crate) fn new() -> Self {
|
||||
Self {
|
||||
id: RevisionId::new(),
|
||||
hashes: vec![],
|
||||
hashes: HashDigests::empty(),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -32,17 +32,17 @@ impl Revision {
|
|||
|
||||
/// Return the computed hashes of the archive.
|
||||
pub(crate) fn hashes(&self) -> &[HashDigest] {
|
||||
&self.hashes
|
||||
self.hashes.as_slice()
|
||||
}
|
||||
|
||||
/// Return the computed hashes of the archive.
|
||||
pub(crate) fn into_hashes(self) -> Vec<HashDigest> {
|
||||
pub(crate) fn into_hashes(self) -> HashDigests {
|
||||
self.hashes
|
||||
}
|
||||
|
||||
/// Set the computed hashes of the archive.
|
||||
#[must_use]
|
||||
pub(crate) fn with_hashes(mut self, hashes: Vec<HashDigest>) -> Self {
|
||||
pub(crate) fn with_hashes(mut self, hashes: HashDigests) -> Self {
|
||||
self.hashes = hashes;
|
||||
self
|
||||
}
|
||||
|
@ -50,7 +50,7 @@ impl Revision {
|
|||
|
||||
impl Hashed for Revision {
|
||||
fn hashes(&self) -> &[HashDigest] {
|
||||
&self.hashes
|
||||
self.hashes.as_slice()
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -143,36 +143,6 @@ pub struct Hashes {
|
|||
}
|
||||
|
||||
impl Hashes {
|
||||
/// Convert a set of [`Hashes`] into a list of [`HashDigest`]s.
|
||||
pub fn into_digests(self) -> Vec<HashDigest> {
|
||||
let mut digests = Vec::new();
|
||||
if let Some(sha512) = self.sha512 {
|
||||
digests.push(HashDigest {
|
||||
algorithm: HashAlgorithm::Sha512,
|
||||
digest: sha512,
|
||||
});
|
||||
}
|
||||
if let Some(sha384) = self.sha384 {
|
||||
digests.push(HashDigest {
|
||||
algorithm: HashAlgorithm::Sha384,
|
||||
digest: sha384,
|
||||
});
|
||||
}
|
||||
if let Some(sha256) = self.sha256 {
|
||||
digests.push(HashDigest {
|
||||
algorithm: HashAlgorithm::Sha256,
|
||||
digest: sha256,
|
||||
});
|
||||
}
|
||||
if let Some(md5) = self.md5 {
|
||||
digests.push(HashDigest {
|
||||
algorithm: HashAlgorithm::Md5,
|
||||
digest: md5,
|
||||
});
|
||||
}
|
||||
digests
|
||||
}
|
||||
|
||||
/// Parse the hash from a fragment, as in: `sha256=6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61`
|
||||
pub fn parse_fragment(fragment: &str) -> Result<Self, HashError> {
|
||||
let mut parts = fragment.split('=');
|
||||
|
@ -403,6 +373,131 @@ impl FromStr for HashDigest {
|
|||
}
|
||||
}
|
||||
|
||||
/// A collection of [`HashDigest`] entities.
|
||||
#[derive(
|
||||
Debug,
|
||||
Clone,
|
||||
Ord,
|
||||
PartialOrd,
|
||||
Eq,
|
||||
PartialEq,
|
||||
Hash,
|
||||
Serialize,
|
||||
Deserialize,
|
||||
rkyv::Archive,
|
||||
rkyv::Deserialize,
|
||||
rkyv::Serialize,
|
||||
)]
|
||||
#[rkyv(derive(Debug))]
|
||||
pub struct HashDigests(Box<[HashDigest]>);
|
||||
|
||||
impl HashDigests {
|
||||
/// Initialize an empty collection of [`HashDigest`] entities.
|
||||
pub fn empty() -> Self {
|
||||
Self(Box::new([]))
|
||||
}
|
||||
|
||||
/// Return the [`HashDigest`] entities as a slice.
|
||||
pub fn as_slice(&self) -> &[HashDigest] {
|
||||
self.0.as_ref()
|
||||
}
|
||||
|
||||
/// Returns `true` if the [`HashDigests`] are empty.
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.0.is_empty()
|
||||
}
|
||||
|
||||
/// Returns the first [`HashDigest`] entity.
|
||||
pub fn first(&self) -> Option<&HashDigest> {
|
||||
self.0.first()
|
||||
}
|
||||
|
||||
/// Return the [`HashDigest`] entities as a vector.
|
||||
pub fn to_vec(&self) -> Vec<HashDigest> {
|
||||
self.0.to_vec()
|
||||
}
|
||||
|
||||
/// Returns an [`Iterator`] over the [`HashDigest`] entities.
|
||||
pub fn iter(&self) -> impl Iterator<Item = &HashDigest> {
|
||||
self.0.iter()
|
||||
}
|
||||
|
||||
/// Sort the underlying [`HashDigest`] entities.
|
||||
pub fn sort_unstable(&mut self) {
|
||||
self.0.sort_unstable();
|
||||
}
|
||||
}
|
||||
|
||||
/// Convert a set of [`Hashes`] into a list of [`HashDigest`]s.
|
||||
impl From<Hashes> for HashDigests {
|
||||
fn from(value: Hashes) -> Self {
|
||||
let mut digests = Vec::with_capacity(
|
||||
usize::from(value.sha512.is_some())
|
||||
+ usize::from(value.sha384.is_some())
|
||||
+ usize::from(value.sha256.is_some())
|
||||
+ usize::from(value.md5.is_some()),
|
||||
);
|
||||
if let Some(sha512) = value.sha512 {
|
||||
digests.push(HashDigest {
|
||||
algorithm: HashAlgorithm::Sha512,
|
||||
digest: sha512,
|
||||
});
|
||||
}
|
||||
if let Some(sha384) = value.sha384 {
|
||||
digests.push(HashDigest {
|
||||
algorithm: HashAlgorithm::Sha384,
|
||||
digest: sha384,
|
||||
});
|
||||
}
|
||||
if let Some(sha256) = value.sha256 {
|
||||
digests.push(HashDigest {
|
||||
algorithm: HashAlgorithm::Sha256,
|
||||
digest: sha256,
|
||||
});
|
||||
}
|
||||
if let Some(md5) = value.md5 {
|
||||
digests.push(HashDigest {
|
||||
algorithm: HashAlgorithm::Md5,
|
||||
digest: md5,
|
||||
});
|
||||
}
|
||||
Self::from(digests)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<HashDigest> for HashDigests {
|
||||
fn from(value: HashDigest) -> Self {
|
||||
Self(Box::new([value]))
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&[HashDigest]> for HashDigests {
|
||||
fn from(value: &[HashDigest]) -> Self {
|
||||
Self(Box::from(value))
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Vec<HashDigest>> for HashDigests {
|
||||
fn from(value: Vec<HashDigest>) -> Self {
|
||||
Self(value.into_boxed_slice())
|
||||
}
|
||||
}
|
||||
|
||||
impl FromIterator<HashDigest> for HashDigests {
|
||||
fn from_iter<T: IntoIterator<Item = HashDigest>>(iter: T) -> Self {
|
||||
Self(iter.into_iter().collect())
|
||||
}
|
||||
}
|
||||
|
||||
impl IntoIterator for HashDigests {
|
||||
type Item = HashDigest;
|
||||
type IntoIter = std::vec::IntoIter<HashDigest>;
|
||||
|
||||
fn into_iter(self) -> Self::IntoIter {
|
||||
self.0.into_vec().into_iter()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(thiserror::Error, Debug)]
|
||||
pub enum HashError {
|
||||
#[error("Unexpected hash (expected `<algorithm>:<hash>`): {0}")]
|
||||
|
|
|
@ -74,8 +74,13 @@ impl FlatIndex {
|
|||
DistFilename::WheelFilename(filename) => {
|
||||
let version = filename.version.clone();
|
||||
|
||||
let compatibility =
|
||||
Self::wheel_compatibility(&filename, &file.hashes, tags, hasher, build_options);
|
||||
let compatibility = Self::wheel_compatibility(
|
||||
&filename,
|
||||
file.hashes.as_slice(),
|
||||
tags,
|
||||
hasher,
|
||||
build_options,
|
||||
);
|
||||
let dist = RegistryBuiltWheel {
|
||||
filename,
|
||||
file: Box::new(file),
|
||||
|
@ -91,8 +96,12 @@ impl FlatIndex {
|
|||
}
|
||||
}
|
||||
DistFilename::SourceDistFilename(filename) => {
|
||||
let compatibility =
|
||||
Self::source_dist_compatibility(&filename, &file.hashes, hasher, build_options);
|
||||
let compatibility = Self::source_dist_compatibility(
|
||||
&filename,
|
||||
file.hashes.as_slice(),
|
||||
hasher,
|
||||
build_options,
|
||||
);
|
||||
let dist = RegistrySourceDist {
|
||||
name: filename.name.clone(),
|
||||
version: filename.version.clone(),
|
||||
|
|
|
@ -40,8 +40,8 @@ use uv_platform_tags::{
|
|||
AbiTag, IncompatibleTag, LanguageTag, PlatformTag, TagCompatibility, TagPriority, Tags,
|
||||
};
|
||||
use uv_pypi_types::{
|
||||
redact_credentials, ConflictPackage, Conflicts, HashDigest, ParsedArchiveUrl, ParsedGitUrl,
|
||||
Requirement, RequirementSource,
|
||||
redact_credentials, ConflictPackage, Conflicts, HashDigest, HashDigests, ParsedArchiveUrl,
|
||||
ParsedGitUrl, Requirement, RequirementSource,
|
||||
};
|
||||
use uv_types::{BuildContext, HashStrategy};
|
||||
use uv_workspace::WorkspaceMember;
|
||||
|
@ -2396,10 +2396,9 @@ impl Package {
|
|||
let file = Box::new(uv_distribution_types::File {
|
||||
dist_info_metadata: false,
|
||||
filename: filename.to_string(),
|
||||
hashes: sdist
|
||||
.hash()
|
||||
.map(|hash| vec![hash.0.clone()])
|
||||
.unwrap_or_default(),
|
||||
hashes: sdist.hash().map_or(HashDigests::empty(), |hash| {
|
||||
HashDigests::from(hash.0.clone())
|
||||
}),
|
||||
requires_python: None,
|
||||
size: sdist.size(),
|
||||
upload_time_utc_ms: None,
|
||||
|
@ -2448,10 +2447,9 @@ impl Package {
|
|||
let file = Box::new(uv_distribution_types::File {
|
||||
dist_info_metadata: false,
|
||||
filename: filename.to_string(),
|
||||
hashes: sdist
|
||||
.hash()
|
||||
.map(|hash| vec![hash.0.clone()])
|
||||
.unwrap_or_default(),
|
||||
hashes: sdist.hash().map_or(HashDigests::empty(), |hash| {
|
||||
HashDigests::from(hash.0.clone())
|
||||
}),
|
||||
requires_python: None,
|
||||
size: sdist.size(),
|
||||
upload_time_utc_ms: None,
|
||||
|
@ -2725,8 +2723,15 @@ impl Package {
|
|||
}
|
||||
|
||||
/// Returns all the hashes associated with this [`Package`].
|
||||
fn hashes(&self) -> Vec<HashDigest> {
|
||||
let mut hashes = Vec::new();
|
||||
fn hashes(&self) -> HashDigests {
|
||||
let mut hashes = Vec::with_capacity(
|
||||
usize::from(self.sdist.as_ref().and_then(|sdist| sdist.hash()).is_some())
|
||||
+ self
|
||||
.wheels
|
||||
.iter()
|
||||
.map(|wheel| usize::from(wheel.hash.is_some()))
|
||||
.sum::<usize>(),
|
||||
);
|
||||
if let Some(ref sdist) = self.sdist {
|
||||
if let Some(hash) = sdist.hash() {
|
||||
hashes.push(hash.0.clone());
|
||||
|
@ -2735,7 +2740,7 @@ impl Package {
|
|||
for wheel in &self.wheels {
|
||||
hashes.extend(wheel.hash.as_ref().map(|h| h.0.clone()));
|
||||
}
|
||||
hashes
|
||||
HashDigests::from(hashes)
|
||||
}
|
||||
|
||||
/// Returns the [`ResolvedRepositoryReference`] for the package, if it is a Git source.
|
||||
|
@ -3579,9 +3584,12 @@ impl SourceDist {
|
|||
match annotated_dist.dist {
|
||||
// We pass empty installed packages for locking.
|
||||
ResolvedDist::Installed { .. } => unreachable!(),
|
||||
ResolvedDist::Installable { ref dist, .. } => {
|
||||
SourceDist::from_dist(id, dist, &annotated_dist.hashes, annotated_dist.index())
|
||||
}
|
||||
ResolvedDist::Installable { ref dist, .. } => SourceDist::from_dist(
|
||||
id,
|
||||
dist,
|
||||
annotated_dist.hashes.as_slice(),
|
||||
annotated_dist.index(),
|
||||
),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -3877,9 +3885,11 @@ impl Wheel {
|
|||
match annotated_dist.dist {
|
||||
// We pass empty installed packages for locking.
|
||||
ResolvedDist::Installed { .. } => unreachable!(),
|
||||
ResolvedDist::Installable { ref dist, .. } => {
|
||||
Wheel::from_dist(dist, &annotated_dist.hashes, annotated_dist.index())
|
||||
}
|
||||
ResolvedDist::Installable { ref dist, .. } => Wheel::from_dist(
|
||||
dist,
|
||||
annotated_dist.hashes.as_slice(),
|
||||
annotated_dist.index(),
|
||||
),
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -576,7 +576,7 @@ impl std::fmt::Display for RequirementsTxtExport<'_> {
|
|||
let mut hashes = package.hashes();
|
||||
hashes.sort_unstable();
|
||||
if !hashes.is_empty() {
|
||||
for hash in &hashes {
|
||||
for hash in hashes.iter() {
|
||||
writeln!(f, " \\")?;
|
||||
write!(f, " --hash=")?;
|
||||
write!(f, "{hash}")?;
|
||||
|
|
|
@ -8,7 +8,7 @@ use uv_distribution_types::IndexUrl;
|
|||
use uv_normalize::PackageName;
|
||||
use uv_pep440::{Operator, Version};
|
||||
use uv_pep508::{MarkerTree, VersionOrUrl};
|
||||
use uv_pypi_types::{HashDigest, HashError};
|
||||
use uv_pypi_types::{HashDigest, HashDigests, HashError};
|
||||
use uv_requirements_txt::{RequirementEntry, RequirementsTxtRequirement};
|
||||
|
||||
use crate::universal_marker::UniversalMarker;
|
||||
|
@ -32,7 +32,7 @@ pub struct Preference {
|
|||
/// If coming from a package with diverging versions, the markers of the forks this preference
|
||||
/// is part of, otherwise `None`.
|
||||
fork_markers: Vec<UniversalMarker>,
|
||||
hashes: Vec<HashDigest>,
|
||||
hashes: HashDigests,
|
||||
}
|
||||
|
||||
impl Preference {
|
||||
|
@ -89,7 +89,7 @@ impl Preference {
|
|||
marker: MarkerTree::TRUE,
|
||||
index: PreferenceIndex::from(package.index(install_path)?),
|
||||
fork_markers: package.fork_markers().to_vec(),
|
||||
hashes: Vec::new(),
|
||||
hashes: HashDigests::empty(),
|
||||
}))
|
||||
}
|
||||
|
||||
|
@ -293,7 +293,7 @@ impl std::fmt::Display for Preference {
|
|||
#[derive(Debug, Clone)]
|
||||
pub(crate) struct Pin {
|
||||
version: Version,
|
||||
hashes: Vec<HashDigest>,
|
||||
hashes: HashDigests,
|
||||
}
|
||||
|
||||
impl Pin {
|
||||
|
@ -304,7 +304,7 @@ impl Pin {
|
|||
|
||||
/// Return the hashes of the pinned package.
|
||||
pub(crate) fn hashes(&self) -> &[HashDigest] {
|
||||
&self.hashes
|
||||
self.hashes.as_slice()
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -312,7 +312,7 @@ impl From<Version> for Pin {
|
|||
fn from(version: Version) -> Self {
|
||||
Self {
|
||||
version,
|
||||
hashes: Vec::new(),
|
||||
hashes: HashDigests::empty(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -7,7 +7,7 @@ use uv_distribution_types::{
|
|||
};
|
||||
use uv_normalize::{ExtraName, GroupName, PackageName};
|
||||
use uv_pep440::Version;
|
||||
use uv_pypi_types::HashDigest;
|
||||
use uv_pypi_types::HashDigests;
|
||||
|
||||
pub use crate::resolution::display::{AnnotationStyle, DisplayResolutionGraph};
|
||||
pub(crate) use crate::resolution::output::ResolutionGraphNode;
|
||||
|
@ -29,7 +29,7 @@ pub(crate) struct AnnotatedDist {
|
|||
pub(crate) version: Version,
|
||||
pub(crate) extra: Option<ExtraName>,
|
||||
pub(crate) dev: Option<GroupName>,
|
||||
pub(crate) hashes: Vec<HashDigest>,
|
||||
pub(crate) hashes: HashDigests,
|
||||
pub(crate) metadata: Option<Metadata>,
|
||||
/// The "full" marker for this distribution. It precisely describes all
|
||||
/// marker environments for which this distribution _can_ be installed.
|
||||
|
|
|
@ -20,7 +20,7 @@ use uv_normalize::{ExtraName, GroupName, PackageName};
|
|||
use uv_pep440::{Version, VersionSpecifier};
|
||||
use uv_pep508::{MarkerEnvironment, MarkerTree, MarkerTreeKind};
|
||||
use uv_pypi_types::{
|
||||
Conflicts, HashDigest, ParsedUrlError, Requirement, VerbatimParsedUrl, Yanked,
|
||||
Conflicts, HashDigests, ParsedUrlError, Requirement, VerbatimParsedUrl, Yanked,
|
||||
};
|
||||
|
||||
use crate::graph_ops::{marker_reachability, simplify_conflict_markers};
|
||||
|
@ -414,7 +414,7 @@ impl ResolverOutput {
|
|||
preferences: &Preferences,
|
||||
in_memory: &InMemoryIndex,
|
||||
git: &GitResolver,
|
||||
) -> Result<(ResolvedDist, Vec<HashDigest>, Option<Metadata>), ResolveError> {
|
||||
) -> Result<(ResolvedDist, HashDigests, Option<Metadata>), ResolveError> {
|
||||
Ok(if let Some(url) = url {
|
||||
// Create the distribution.
|
||||
let dist = Dist::from_url(name.clone(), url_to_precise(url.clone(), git))?;
|
||||
|
@ -520,11 +520,11 @@ impl ResolverOutput {
|
|||
version: &Version,
|
||||
preferences: &Preferences,
|
||||
in_memory: &InMemoryIndex,
|
||||
) -> Vec<HashDigest> {
|
||||
) -> HashDigests {
|
||||
// 1. Look for hashes from the lockfile.
|
||||
if let Some(digests) = preferences.match_hashes(name, version) {
|
||||
if !digests.is_empty() {
|
||||
return digests.to_vec();
|
||||
return HashDigests::from(digests);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -552,7 +552,8 @@ impl ResolverOutput {
|
|||
if let Some(digests) = version_maps
|
||||
.iter()
|
||||
.find_map(|version_map| version_map.hashes(version))
|
||||
.map(|mut digests| {
|
||||
.map(|digests| {
|
||||
let mut digests = HashDigests::from(digests);
|
||||
digests.sort_unstable();
|
||||
digests
|
||||
})
|
||||
|
@ -565,7 +566,7 @@ impl ResolverOutput {
|
|||
}
|
||||
}
|
||||
|
||||
vec![]
|
||||
HashDigests::empty()
|
||||
}
|
||||
|
||||
/// Returns an iterator over the distinct packages in the graph.
|
||||
|
|
|
@ -177,7 +177,7 @@ impl<'dist> RequirementsTxtDist<'dist> {
|
|||
Self {
|
||||
dist: &annotated.dist,
|
||||
version: &annotated.version,
|
||||
hashes: &annotated.hashes,
|
||||
hashes: annotated.hashes.as_slice(),
|
||||
// OK because we've asserted above that this dist
|
||||
// does not have a non-trivial conflicting marker
|
||||
// that we would otherwise need to care about.
|
||||
|
|
|
@ -201,12 +201,15 @@ impl VersionMap {
|
|||
}
|
||||
|
||||
/// Return the [`Hashes`] for the given version, if any.
|
||||
pub(crate) fn hashes(&self, version: &Version) -> Option<Vec<HashDigest>> {
|
||||
pub(crate) fn hashes(&self, version: &Version) -> Option<&[HashDigest]> {
|
||||
match self.inner {
|
||||
VersionMapInner::Eager(ref eager) => {
|
||||
eager.map.get(version).map(|file| file.hashes().to_vec())
|
||||
}
|
||||
VersionMapInner::Lazy(ref lazy) => lazy.get(version).map(|file| file.hashes().to_vec()),
|
||||
VersionMapInner::Eager(ref eager) => eager
|
||||
.map
|
||||
.get(version)
|
||||
.map(uv_distribution_types::PrioritizedDist::hashes),
|
||||
VersionMapInner::Lazy(ref lazy) => lazy
|
||||
.get(version)
|
||||
.map(uv_distribution_types::PrioritizedDist::hashes),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -422,7 +425,7 @@ impl VersionMapLazy {
|
|||
&filename,
|
||||
&filename.name,
|
||||
&filename.version,
|
||||
&hashes,
|
||||
hashes.as_slice(),
|
||||
yanked,
|
||||
excluded,
|
||||
upload_time,
|
||||
|
@ -438,7 +441,7 @@ impl VersionMapLazy {
|
|||
let compatibility = self.source_dist_compatibility(
|
||||
&filename.name,
|
||||
&filename.version,
|
||||
&hashes,
|
||||
hashes.as_slice(),
|
||||
yanked,
|
||||
excluded,
|
||||
upload_time,
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
use rustc_hash::FxHashMap;
|
||||
use std::str::FromStr;
|
||||
use std::sync::Arc;
|
||||
|
||||
use rustc_hash::FxHashMap;
|
||||
use url::Url;
|
||||
|
||||
use uv_configuration::HashCheckingMode;
|
||||
|
@ -11,7 +12,7 @@ use uv_distribution_types::{
|
|||
use uv_normalize::PackageName;
|
||||
use uv_pep440::Version;
|
||||
use uv_pypi_types::{
|
||||
HashDigest, HashError, Hashes, Requirement, RequirementSource, ResolverMarkerEnvironment,
|
||||
HashDigest, HashDigests, HashError, Requirement, RequirementSource, ResolverMarkerEnvironment,
|
||||
};
|
||||
|
||||
#[derive(Debug, Default, Clone)]
|
||||
|
@ -157,7 +158,8 @@ impl HashStrategy {
|
|||
// it from the fragment.
|
||||
requirement
|
||||
.hashes()
|
||||
.map(Hashes::into_digests)
|
||||
.map(HashDigests::from)
|
||||
.map(|hashes| hashes.to_vec())
|
||||
.unwrap_or_default()
|
||||
} else {
|
||||
// Parse the hashes.
|
||||
|
@ -210,7 +212,8 @@ impl HashStrategy {
|
|||
// it from the fragment.
|
||||
requirement
|
||||
.hashes()
|
||||
.map(Hashes::into_digests)
|
||||
.map(HashDigests::from)
|
||||
.map(|hashes| hashes.to_vec())
|
||||
.unwrap_or_default()
|
||||
} else {
|
||||
// Parse the hashes.
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue