Add support for URL requirements in --generate-hashes (#2952)

## Summary

This PR enables hash generation for URL requirements when the user
provides `--generate-hashes` to `pip compile`. While we include the
hashes from the registry already, today, we omit hashes for URLs.

To power hash generation, we introduce a `HashPolicy` abstraction:

```rust
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum HashPolicy<'a> {
    /// No hash policy is specified.
    None,
    /// Hashes should be generated (specifically, a SHA-256 hash), but not validated.
    Generate,
    /// Hashes should be validated against a pre-defined list of hashes. If necessary, hashes should
    /// be generated so as to ensure that the archive is valid.
    Validate(&'a [HashDigest]),
}
```

All of the methods on the distribution database now accept this policy,
instead of accepting `&'a [HashDigest]`.

Closes #2378.
This commit is contained in:
Charlie Marsh 2024-04-10 16:02:45 -04:00 committed by GitHub
parent 8513d603b4
commit 006379c50c
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
31 changed files with 546 additions and 495 deletions

View file

@ -8,7 +8,7 @@ use pypi_types::HashDigest;
use uv_normalize::PackageName; use uv_normalize::PackageName;
use crate::direct_url::{DirectUrl, LocalFileUrl}; use crate::direct_url::{DirectUrl, LocalFileUrl};
use crate::hashed::Hashed; use crate::hash::Hashed;
use crate::{ use crate::{
BuiltDist, Dist, DistributionMetadata, InstalledMetadata, InstalledVersion, Name, SourceDist, BuiltDist, Dist, DistributionMetadata, InstalledMetadata, InstalledVersion, Name, SourceDist,
VersionOrUrl, VersionOrUrl,

View file

@ -0,0 +1,84 @@
use pypi_types::{HashAlgorithm, HashDigest};
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum HashPolicy<'a> {
/// No hash policy is specified.
None,
/// Hashes should be generated (specifically, a SHA-256 hash), but not validated.
Generate,
/// Hashes should be validated against a pre-defined list of hashes. If necessary, hashes should
/// be generated so as to ensure that the archive is valid.
Validate(&'a [HashDigest]),
}
impl<'a> HashPolicy<'a> {
/// Returns `true` if the hash policy is `None`.
pub fn is_none(&self) -> bool {
matches!(self, Self::None)
}
/// Returns `true` if the hash policy is `Generate`.
pub fn is_generate(&self) -> bool {
matches!(self, Self::Generate)
}
/// Returns `true` if the hash policy is `Validate`.
pub fn is_validate(&self) -> bool {
matches!(self, Self::Validate(_))
}
/// Return the algorithms used in the hash policy.
pub fn algorithms(&self) -> Vec<HashAlgorithm> {
match self {
Self::None => vec![],
Self::Generate => vec![HashAlgorithm::Sha256],
Self::Validate(hashes) => {
let mut algorithms = hashes.iter().map(HashDigest::algorithm).collect::<Vec<_>>();
algorithms.sort();
algorithms.dedup();
algorithms
}
}
}
/// Return the digests used in the hash policy.
pub fn digests(&self) -> &[HashDigest] {
match self {
Self::None => &[],
Self::Generate => &[],
Self::Validate(hashes) => hashes,
}
}
}
pub trait Hashed {
/// Return the [`HashDigest`]s for the archive.
fn hashes(&self) -> &[HashDigest];
/// Returns `true` if the archive satisfies the given hash policy.
fn satisfies(&self, hashes: HashPolicy) -> bool {
match hashes {
HashPolicy::None => true,
HashPolicy::Generate => self
.hashes()
.iter()
.any(|hash| hash.algorithm == HashAlgorithm::Sha256),
HashPolicy::Validate(hashes) => self.hashes().iter().any(|hash| hashes.contains(hash)),
}
}
/// Returns `true` if the archive includes a hash for at least one of the given algorithms.
fn has_digests(&self, hashes: HashPolicy) -> bool {
match hashes {
HashPolicy::None => true,
HashPolicy::Generate => self
.hashes()
.iter()
.any(|hash| hash.algorithm == HashAlgorithm::Sha256),
HashPolicy::Validate(hashes) => hashes
.iter()
.map(HashDigest::algorithm)
.any(|algorithm| self.hashes().iter().any(|hash| hash.algorithm == algorithm)),
}
}
}

View file

@ -1,27 +0,0 @@
use pypi_types::HashDigest;
pub trait Hashed {
/// Return the [`HashDigest`]s for the archive.
fn hashes(&self) -> &[HashDigest];
/// Returns `true` if the archive satisfies the given hashes.
fn satisfies(&self, hashes: &[HashDigest]) -> bool {
if hashes.is_empty() {
true
} else {
self.hashes().iter().any(|hash| hashes.contains(hash))
}
}
/// Returns `true` if the archive includes a hash for at least one of the given algorithms.
fn has_digests(&self, hashes: &[HashDigest]) -> bool {
if hashes.is_empty() {
true
} else {
hashes
.iter()
.map(HashDigest::algorithm)
.any(|algorithm| self.hashes().iter().any(|hash| hash.algorithm == algorithm))
}
}
}

View file

@ -51,7 +51,7 @@ pub use crate::direct_url::*;
pub use crate::editable::*; pub use crate::editable::*;
pub use crate::error::*; pub use crate::error::*;
pub use crate::file::*; pub use crate::file::*;
pub use crate::hashed::*; pub use crate::hash::*;
pub use crate::id::*; pub use crate::id::*;
pub use crate::index_url::*; pub use crate::index_url::*;
pub use crate::installed::*; pub use crate::installed::*;
@ -67,7 +67,7 @@ mod direct_url;
mod editable; mod editable;
mod error; mod error;
mod file; mod file;
mod hashed; mod hash;
mod id; mod id;
mod index_url; mod index_url;
mod installed; mod installed;

View file

@ -18,7 +18,7 @@ use uv_dispatch::BuildDispatch;
use uv_installer::SitePackages; use uv_installer::SitePackages;
use uv_interpreter::PythonEnvironment; use uv_interpreter::PythonEnvironment;
use uv_resolver::{FlatIndex, InMemoryIndex, Manifest, Options, Resolver}; use uv_resolver::{FlatIndex, InMemoryIndex, Manifest, Options, Resolver};
use uv_types::{BuildIsolation, InFlight, RequiredHashes}; use uv_types::{BuildIsolation, HashStrategy, InFlight};
#[derive(ValueEnum, Default, Clone)] #[derive(ValueEnum, Default, Clone)]
pub(crate) enum ResolveCliFormat { pub(crate) enum ResolveCliFormat {
@ -58,7 +58,6 @@ pub(crate) async fn resolve_cli(args: ResolveCliArgs) -> Result<()> {
let index_locations = let index_locations =
IndexLocations::new(args.index_url, args.extra_index_url, args.find_links, false); IndexLocations::new(args.index_url, args.extra_index_url, args.find_links, false);
let index = InMemoryIndex::default(); let index = InMemoryIndex::default();
let hashes = RequiredHashes::default();
let in_flight = InFlight::default(); let in_flight = InFlight::default();
let no_build = if args.no_build { let no_build = if args.no_build {
NoBuild::All NoBuild::All
@ -74,7 +73,7 @@ pub(crate) async fn resolve_cli(args: ResolveCliArgs) -> Result<()> {
FlatIndex::from_entries( FlatIndex::from_entries(
entries, entries,
venv.interpreter().tags()?, venv.interpreter().tags()?,
&RequiredHashes::default(), &HashStrategy::None,
&no_build, &no_build,
&NoBinary::None, &NoBinary::None,
) )
@ -109,7 +108,7 @@ pub(crate) async fn resolve_cli(args: ResolveCliArgs) -> Result<()> {
&client, &client,
&flat_index, &flat_index,
&index, &index,
&hashes, &HashStrategy::None,
&build_dispatch, &build_dispatch,
&site_packages, &site_packages,
)?; )?;

View file

@ -21,7 +21,7 @@ use uv_configuration::{BuildKind, ConfigSettings, NoBinary, NoBuild, Reinstall,
use uv_installer::{Downloader, Installer, Plan, Planner, SitePackages}; use uv_installer::{Downloader, Installer, Plan, Planner, SitePackages};
use uv_interpreter::{Interpreter, PythonEnvironment}; use uv_interpreter::{Interpreter, PythonEnvironment};
use uv_resolver::{FlatIndex, InMemoryIndex, Manifest, Options, Resolver}; use uv_resolver::{FlatIndex, InMemoryIndex, Manifest, Options, Resolver};
use uv_types::{BuildContext, BuildIsolation, EmptyInstalledPackages, InFlight, RequiredHashes}; use uv_types::{BuildContext, BuildIsolation, EmptyInstalledPackages, HashStrategy, InFlight};
/// The main implementation of [`BuildContext`], used by the CLI, see [`BuildContext`] /// The main implementation of [`BuildContext`], used by the CLI, see [`BuildContext`]
/// documentation. /// documentation.
@ -134,7 +134,6 @@ impl<'a> BuildContext for BuildDispatch<'a> {
async fn resolve<'data>(&'data self, requirements: &'data [Requirement]) -> Result<Resolution> { async fn resolve<'data>(&'data self, requirements: &'data [Requirement]) -> Result<Resolution> {
let markers = self.interpreter.markers(); let markers = self.interpreter.markers();
let tags = self.interpreter.tags()?; let tags = self.interpreter.tags()?;
let hashes = RequiredHashes::default();
let resolver = Resolver::new( let resolver = Resolver::new(
Manifest::simple(requirements.to_vec()), Manifest::simple(requirements.to_vec()),
self.options, self.options,
@ -144,7 +143,7 @@ impl<'a> BuildContext for BuildDispatch<'a> {
self.client, self.client,
self.flat_index, self.flat_index,
self.index, self.index,
&hashes, &HashStrategy::None,
self, self,
&EmptyInstalledPackages, &EmptyInstalledPackages,
)?; )?;
@ -178,9 +177,6 @@ impl<'a> BuildContext for BuildDispatch<'a> {
venv.root().display(), venv.root().display(),
); );
// Don't enforce hashes for build dependencies.
let hashes = RequiredHashes::default();
// Determine the current environment markers. // Determine the current environment markers.
let tags = self.interpreter.tags()?; let tags = self.interpreter.tags()?;
@ -197,7 +193,7 @@ impl<'a> BuildContext for BuildDispatch<'a> {
site_packages, site_packages,
&Reinstall::None, &Reinstall::None,
&NoBinary::None, &NoBinary::None,
&RequiredHashes::default(), &HashStrategy::None,
self.index_locations, self.index_locations,
self.cache(), self.cache(),
venv, venv,
@ -226,7 +222,8 @@ impl<'a> BuildContext for BuildDispatch<'a> {
vec![] vec![]
} else { } else {
// TODO(konstin): Check that there is no endless recursion. // TODO(konstin): Check that there is no endless recursion.
let downloader = Downloader::new(self.cache, tags, &hashes, self.client, self); let downloader =
Downloader::new(self.cache, tags, &HashStrategy::None, self.client, self);
debug!( debug!(
"Downloading and building requirement{} for build: {}", "Downloading and building requirement{} for build: {}",
if remote.len() == 1 { "" } else { "s" }, if remote.len() == 1 { "" } else { "s" },

View file

@ -11,8 +11,8 @@ use url::Url;
use distribution_filename::WheelFilename; use distribution_filename::WheelFilename;
use distribution_types::{ use distribution_types::{
BuildableSource, BuiltDist, Dist, FileLocation, Hashed, IndexLocations, LocalEditable, Name, BuildableSource, BuiltDist, Dist, FileLocation, HashPolicy, Hashed, IndexLocations,
SourceDist, LocalEditable, Name, SourceDist,
}; };
use platform_tags::Tags; use platform_tags::Tags;
use pypi_types::{HashDigest, Metadata23}; use pypi_types::{HashDigest, Metadata23};
@ -91,7 +91,7 @@ impl<'a, Context: BuildContext + Send + Sync> DistributionDatabase<'a, Context>
&self, &self,
dist: &Dist, dist: &Dist,
tags: &Tags, tags: &Tags,
hashes: &[HashDigest], hashes: HashPolicy<'_>,
) -> Result<LocalWheel, Error> { ) -> Result<LocalWheel, Error> {
match dist { match dist {
Dist::Built(built) => self.get_wheel(built, hashes).await, Dist::Built(built) => self.get_wheel(built, hashes).await,
@ -108,7 +108,7 @@ impl<'a, Context: BuildContext + Send + Sync> DistributionDatabase<'a, Context>
pub async fn get_or_build_wheel_metadata( pub async fn get_or_build_wheel_metadata(
&self, &self,
dist: &Dist, dist: &Dist,
hashes: &[HashDigest], hashes: HashPolicy<'_>,
) -> Result<ArchiveMetadata, Error> { ) -> Result<ArchiveMetadata, Error> {
match dist { match dist {
Dist::Built(built) => self.get_wheel_metadata(built, hashes).await, Dist::Built(built) => self.get_wheel_metadata(built, hashes).await,
@ -147,12 +147,12 @@ impl<'a, Context: BuildContext + Send + Sync> DistributionDatabase<'a, Context>
/// Fetch a wheel from the cache or download it from the index. /// Fetch a wheel from the cache or download it from the index.
/// ///
/// While hashes will be generated in some cases, hash-checking is _not_ enforced and should /// While hashes will be generated in all cases, hash-checking is _not_ enforced and should
/// instead be enforced by the caller. /// instead be enforced by the caller.
async fn get_wheel( async fn get_wheel(
&self, &self,
dist: &BuiltDist, dist: &BuiltDist,
hashes: &[HashDigest], hashes: HashPolicy<'_>,
) -> Result<LocalWheel, Error> { ) -> Result<LocalWheel, Error> {
let no_binary = match self.build_context.no_binary() { let no_binary = match self.build_context.no_binary() {
NoBinary::None => false, NoBinary::None => false,
@ -298,7 +298,7 @@ impl<'a, Context: BuildContext + Send + Sync> DistributionDatabase<'a, Context>
&self, &self,
dist: &SourceDist, dist: &SourceDist,
tags: &Tags, tags: &Tags,
hashes: &[HashDigest], hashes: HashPolicy<'_>,
) -> Result<LocalWheel, Error> { ) -> Result<LocalWheel, Error> {
let lock = self.locks.acquire(&Dist::Source(dist.clone())).await; let lock = self.locks.acquire(&Dist::Source(dist.clone())).await;
let _guard = lock.lock().await; let _guard = lock.lock().await;
@ -342,8 +342,21 @@ impl<'a, Context: BuildContext + Send + Sync> DistributionDatabase<'a, Context>
pub async fn get_wheel_metadata( pub async fn get_wheel_metadata(
&self, &self,
dist: &BuiltDist, dist: &BuiltDist,
hashes: &[HashDigest], hashes: HashPolicy<'_>,
) -> Result<ArchiveMetadata, Error> { ) -> Result<ArchiveMetadata, Error> {
// If hash generation is enabled, and the distribution isn't hosted on an index, get the
// entire wheel to ensure that the hashes are included in the response. If the distribution
// is hosted on an index, the hashes will be included in the simple metadata response.
// For hash _validation_, callers are expected to enforce the policy when retrieving the
// wheel.
// TODO(charlie): Request the hashes via a separate method, to reduce the coupling in this API.
if hashes.is_generate() && matches!(dist, BuiltDist::DirectUrl(_) | BuiltDist::Path(_)) {
let wheel = self.get_wheel(dist, hashes).await?;
let metadata = wheel.metadata()?;
let hashes = wheel.hashes;
return Ok(ArchiveMetadata { metadata, hashes });
}
match self.client.wheel_metadata(dist).boxed().await { match self.client.wheel_metadata(dist).boxed().await {
Ok(metadata) => Ok(ArchiveMetadata::from(metadata)), Ok(metadata) => Ok(ArchiveMetadata::from(metadata)),
Err(err) if err.is_http_streaming_unsupported() => { Err(err) if err.is_http_streaming_unsupported() => {
@ -367,7 +380,7 @@ impl<'a, Context: BuildContext + Send + Sync> DistributionDatabase<'a, Context>
pub async fn build_wheel_metadata( pub async fn build_wheel_metadata(
&self, &self,
source: &BuildableSource<'_>, source: &BuildableSource<'_>,
hashes: &[HashDigest], hashes: HashPolicy<'_>,
) -> Result<ArchiveMetadata, Error> { ) -> Result<ArchiveMetadata, Error> {
let no_build = match self.build_context.no_build() { let no_build = match self.build_context.no_build() {
NoBuild::All => true, NoBuild::All => true,
@ -400,7 +413,7 @@ impl<'a, Context: BuildContext + Send + Sync> DistributionDatabase<'a, Context>
filename: &WheelFilename, filename: &WheelFilename,
wheel_entry: &CacheEntry, wheel_entry: &CacheEntry,
dist: &BuiltDist, dist: &BuiltDist,
hashes: &[HashDigest], hashes: HashPolicy<'_>,
) -> Result<Archive, Error> { ) -> Result<Archive, Error> {
// Create an entry for the HTTP cache. // Create an entry for the HTTP cache.
let http_entry = wheel_entry.with_file(format!("{}.http", filename.stem())); let http_entry = wheel_entry.with_file(format!("{}.http", filename.stem()));
@ -413,12 +426,7 @@ impl<'a, Context: BuildContext + Send + Sync> DistributionDatabase<'a, Context>
.into_async_read(); .into_async_read();
// Create a hasher for each hash algorithm. // Create a hasher for each hash algorithm.
let algorithms = { let algorithms = hashes.algorithms();
let mut hash = hashes.iter().map(HashDigest::algorithm).collect::<Vec<_>>();
hash.sort();
hash.dedup();
hash
};
let mut hashers = algorithms.into_iter().map(Hasher::from).collect::<Vec<_>>(); let mut hashers = algorithms.into_iter().map(Hasher::from).collect::<Vec<_>>();
let mut hasher = uv_extract::hash::HashReader::new(reader.compat(), &mut hashers); let mut hasher = uv_extract::hash::HashReader::new(reader.compat(), &mut hashers);
@ -428,7 +436,7 @@ impl<'a, Context: BuildContext + Send + Sync> DistributionDatabase<'a, Context>
uv_extract::stream::unzip(&mut hasher, temp_dir.path()).await?; uv_extract::stream::unzip(&mut hasher, temp_dir.path()).await?;
// If necessary, exhaust the reader to compute the hash. // If necessary, exhaust the reader to compute the hash.
if !hashes.is_empty() { if !hashes.is_none() {
hasher.finish().await.map_err(Error::HashExhaustion)?; hasher.finish().await.map_err(Error::HashExhaustion)?;
} }
@ -492,7 +500,7 @@ impl<'a, Context: BuildContext + Send + Sync> DistributionDatabase<'a, Context>
filename: &WheelFilename, filename: &WheelFilename,
wheel_entry: &CacheEntry, wheel_entry: &CacheEntry,
dist: &BuiltDist, dist: &BuiltDist,
hashes: &[HashDigest], hashes: HashPolicy<'_>,
) -> Result<Archive, Error> { ) -> Result<Archive, Error> {
// Create an entry for the HTTP cache. // Create an entry for the HTTP cache.
let http_entry = wheel_entry.with_file(format!("{}.http", filename.stem())); let http_entry = wheel_entry.with_file(format!("{}.http", filename.stem()));
@ -521,7 +529,7 @@ impl<'a, Context: BuildContext + Send + Sync> DistributionDatabase<'a, Context>
.map_err(Error::CacheWrite)?; .map_err(Error::CacheWrite)?;
// If no hashes are required, parallelize the unzip operation. // If no hashes are required, parallelize the unzip operation.
let hashes = if hashes.is_empty() { let hashes = if hashes.is_none() {
let file = file.into_std().await; let file = file.into_std().await;
tokio::task::spawn_blocking({ tokio::task::spawn_blocking({
let target = temp_dir.path().to_owned(); let target = temp_dir.path().to_owned();
@ -536,12 +544,7 @@ impl<'a, Context: BuildContext + Send + Sync> DistributionDatabase<'a, Context>
vec![] vec![]
} else { } else {
// Create a hasher for each hash algorithm. // Create a hasher for each hash algorithm.
let algorithms = { let algorithms = hashes.algorithms();
let mut hash = hashes.iter().map(HashDigest::algorithm).collect::<Vec<_>>();
hash.sort();
hash.dedup();
hash
};
let mut hashers = algorithms.into_iter().map(Hasher::from).collect::<Vec<_>>(); let mut hashers = algorithms.into_iter().map(Hasher::from).collect::<Vec<_>>();
let mut hasher = uv_extract::hash::HashReader::new(file, &mut hashers); let mut hasher = uv_extract::hash::HashReader::new(file, &mut hashers);
uv_extract::stream::unzip(&mut hasher, temp_dir.path()).await?; uv_extract::stream::unzip(&mut hasher, temp_dir.path()).await?;
@ -609,7 +612,7 @@ impl<'a, Context: BuildContext + Send + Sync> DistributionDatabase<'a, Context>
filename: &WheelFilename, filename: &WheelFilename,
wheel_entry: CacheEntry, wheel_entry: CacheEntry,
dist: &BuiltDist, dist: &BuiltDist,
hashes: &[HashDigest], hashes: HashPolicy<'_>,
) -> Result<LocalWheel, Error> { ) -> Result<LocalWheel, Error> {
// Determine the last-modified time of the wheel. // Determine the last-modified time of the wheel.
let modified = ArchiveTimestamp::from_file(path).map_err(Error::CacheRead)?; let modified = ArchiveTimestamp::from_file(path).map_err(Error::CacheRead)?;
@ -626,7 +629,7 @@ impl<'a, Context: BuildContext + Send + Sync> DistributionDatabase<'a, Context>
hashes: archive.hashes, hashes: archive.hashes,
filename: filename.clone(), filename: filename.clone(),
}) })
} else if hashes.is_empty() { } else if hashes.is_none() {
// Otherwise, unzip the wheel. // Otherwise, unzip the wheel.
let archive = Archive::new(self.unzip_wheel(path, wheel_entry.path()).await?, vec![]); let archive = Archive::new(self.unzip_wheel(path, wheel_entry.path()).await?, vec![]);
write_timestamped_archive(&archive_entry, archive.clone(), modified).await?; write_timestamped_archive(&archive_entry, archive.clone(), modified).await?;
@ -646,12 +649,7 @@ impl<'a, Context: BuildContext + Send + Sync> DistributionDatabase<'a, Context>
.map_err(Error::CacheWrite)?; .map_err(Error::CacheWrite)?;
// Create a hasher for each hash algorithm. // Create a hasher for each hash algorithm.
let algorithms = { let algorithms = hashes.algorithms();
let mut hash = hashes.iter().map(HashDigest::algorithm).collect::<Vec<_>>();
hash.sort();
hash.dedup();
hash
};
let mut hashers = algorithms.into_iter().map(Hasher::from).collect::<Vec<_>>(); let mut hashers = algorithms.into_iter().map(Hasher::from).collect::<Vec<_>>();
let mut hasher = uv_extract::hash::HashReader::new(file, &mut hashers); let mut hasher = uv_extract::hash::HashReader::new(file, &mut hashers);

View file

@ -4,7 +4,7 @@ use distribution_types::{
use platform_tags::Tags; use platform_tags::Tags;
use uv_cache::{ArchiveTimestamp, Cache, CacheBucket, CacheShard, WheelCache}; use uv_cache::{ArchiveTimestamp, Cache, CacheBucket, CacheShard, WheelCache};
use uv_fs::symlinks; use uv_fs::symlinks;
use uv_types::RequiredHashes; use uv_types::HashStrategy;
use crate::index::cached_wheel::CachedWheel; use crate::index::cached_wheel::CachedWheel;
use crate::source::{read_http_revision, read_timestamped_revision, REVISION}; use crate::source::{read_http_revision, read_timestamped_revision, REVISION};
@ -15,16 +15,16 @@ use crate::Error;
pub struct BuiltWheelIndex<'a> { pub struct BuiltWheelIndex<'a> {
cache: &'a Cache, cache: &'a Cache,
tags: &'a Tags, tags: &'a Tags,
hashes: &'a RequiredHashes, hasher: &'a HashStrategy,
} }
impl<'a> BuiltWheelIndex<'a> { impl<'a> BuiltWheelIndex<'a> {
/// Initialize an index of built distributions. /// Initialize an index of built distributions.
pub fn new(cache: &'a Cache, tags: &'a Tags, hashes: &'a RequiredHashes) -> Self { pub fn new(cache: &'a Cache, tags: &'a Tags, hasher: &'a HashStrategy) -> Self {
Self { Self {
cache, cache,
tags, tags,
hashes, hasher,
} }
} }
@ -46,10 +46,8 @@ impl<'a> BuiltWheelIndex<'a> {
}; };
// Enforce hash-checking by omitting any wheels that don't satisfy the required hashes. // Enforce hash-checking by omitting any wheels that don't satisfy the required hashes.
if let Some(hashes) = self.hashes.get(&source_dist.name) { if !revision.satisfies(self.hasher.get(&source_dist.name)) {
if !revision.satisfies(hashes) { return Ok(None);
return Ok(None);
}
} }
Ok(self.find(&cache_shard.shard(revision.id()))) Ok(self.find(&cache_shard.shard(revision.id())))
@ -76,10 +74,8 @@ impl<'a> BuiltWheelIndex<'a> {
}; };
// Enforce hash-checking by omitting any wheels that don't satisfy the required hashes. // Enforce hash-checking by omitting any wheels that don't satisfy the required hashes.
if let Some(hashes) = self.hashes.get(&source_dist.name) { if !revision.satisfies(self.hasher.get(&source_dist.name)) {
if !revision.satisfies(hashes) { return Ok(None);
return Ok(None);
}
} }
Ok(self.find(&cache_shard.shard(revision.id()))) Ok(self.find(&cache_shard.shard(revision.id())))
@ -88,7 +84,7 @@ impl<'a> BuiltWheelIndex<'a> {
/// Return the most compatible [`CachedWheel`] for a given source distribution at a git URL. /// Return the most compatible [`CachedWheel`] for a given source distribution at a git URL.
pub fn git(&self, source_dist: &GitSourceDist) -> Option<CachedWheel> { pub fn git(&self, source_dist: &GitSourceDist) -> Option<CachedWheel> {
// Enforce hash-checking, which isn't supported for Git distributions. // Enforce hash-checking, which isn't supported for Git distributions.
if self.hashes.get(&source_dist.name).is_some() { if self.hasher.get(&source_dist.name).is_validate() {
return None; return None;
} }

View file

@ -10,7 +10,7 @@ use platform_tags::Tags;
use uv_cache::{Cache, CacheBucket, WheelCache}; use uv_cache::{Cache, CacheBucket, WheelCache};
use uv_fs::{directories, files, symlinks}; use uv_fs::{directories, files, symlinks};
use uv_normalize::PackageName; use uv_normalize::PackageName;
use uv_types::RequiredHashes; use uv_types::HashStrategy;
use crate::index::cached_wheel::CachedWheel; use crate::index::cached_wheel::CachedWheel;
use crate::source::{read_http_revision, REVISION}; use crate::source::{read_http_revision, REVISION};
@ -21,7 +21,7 @@ pub struct RegistryWheelIndex<'a> {
cache: &'a Cache, cache: &'a Cache,
tags: &'a Tags, tags: &'a Tags,
index_locations: &'a IndexLocations, index_locations: &'a IndexLocations,
hashes: &'a RequiredHashes, hasher: &'a HashStrategy,
index: FxHashMap<&'a PackageName, BTreeMap<Version, CachedRegistryDist>>, index: FxHashMap<&'a PackageName, BTreeMap<Version, CachedRegistryDist>>,
} }
@ -31,13 +31,13 @@ impl<'a> RegistryWheelIndex<'a> {
cache: &'a Cache, cache: &'a Cache,
tags: &'a Tags, tags: &'a Tags,
index_locations: &'a IndexLocations, index_locations: &'a IndexLocations,
hashes: &'a RequiredHashes, hasher: &'a HashStrategy,
) -> Self { ) -> Self {
Self { Self {
cache, cache,
tags, tags,
index_locations, index_locations,
hashes, hasher,
index: FxHashMap::default(), index: FxHashMap::default(),
} }
} }
@ -72,7 +72,7 @@ impl<'a> RegistryWheelIndex<'a> {
self.cache, self.cache,
self.tags, self.tags,
self.index_locations, self.index_locations,
self.hashes, self.hasher,
)), )),
}; };
versions versions
@ -84,10 +84,9 @@ impl<'a> RegistryWheelIndex<'a> {
cache: &Cache, cache: &Cache,
tags: &Tags, tags: &Tags,
index_locations: &IndexLocations, index_locations: &IndexLocations,
hashes: &RequiredHashes, hasher: &HashStrategy,
) -> BTreeMap<Version, CachedRegistryDist> { ) -> BTreeMap<Version, CachedRegistryDist> {
let mut versions = BTreeMap::new(); let mut versions = BTreeMap::new();
let hashes = hashes.get(package).unwrap_or_default();
// Collect into owned `IndexUrl` // Collect into owned `IndexUrl`
let flat_index_urls: Vec<IndexUrl> = index_locations let flat_index_urls: Vec<IndexUrl> = index_locations
@ -119,7 +118,7 @@ impl<'a> RegistryWheelIndex<'a> {
{ {
if let Some(wheel) = CachedWheel::from_http_pointer(&wheel_dir.join(&file)) { if let Some(wheel) = CachedWheel::from_http_pointer(&wheel_dir.join(&file)) {
// Enforce hash-checking based on the built distribution. // Enforce hash-checking based on the built distribution.
if wheel.satisfies(hashes) { if wheel.satisfies(hasher.get(package)) {
Self::add_wheel(wheel, tags, &mut versions); Self::add_wheel(wheel, tags, &mut versions);
} }
} }
@ -132,7 +131,7 @@ impl<'a> RegistryWheelIndex<'a> {
if let Some(wheel) = CachedWheel::from_revision_pointer(&wheel_dir.join(&file)) if let Some(wheel) = CachedWheel::from_revision_pointer(&wheel_dir.join(&file))
{ {
// Enforce hash-checking based on the built distribution. // Enforce hash-checking based on the built distribution.
if wheel.satisfies(hashes) { if wheel.satisfies(hasher.get(package)) {
Self::add_wheel(wheel, tags, &mut versions); Self::add_wheel(wheel, tags, &mut versions);
} }
} }
@ -153,7 +152,7 @@ impl<'a> RegistryWheelIndex<'a> {
let revision_entry = cache_shard.entry(REVISION); let revision_entry = cache_shard.entry(REVISION);
if let Ok(Some(revision)) = read_http_revision(&revision_entry) { if let Ok(Some(revision)) = read_http_revision(&revision_entry) {
// Enforce hash-checking based on the source distribution. // Enforce hash-checking based on the source distribution.
if revision.satisfies(hashes) { if revision.satisfies(hasher.get(package)) {
for wheel_dir in symlinks(cache_shard.join(revision.id())) { for wheel_dir in symlinks(cache_shard.join(revision.id())) {
if let Some(wheel) = CachedWheel::from_built_source(&wheel_dir) { if let Some(wheel) = CachedWheel::from_built_source(&wheel_dir) {
Self::add_wheel(wheel, tags, &mut versions); Self::add_wheel(wheel, tags, &mut versions);

View file

@ -16,8 +16,8 @@ use zip::ZipArchive;
use distribution_filename::WheelFilename; use distribution_filename::WheelFilename;
use distribution_types::{ use distribution_types::{
BuildableSource, DirectArchiveUrl, Dist, FileLocation, GitSourceUrl, Hashed, LocalEditable, BuildableSource, DirectArchiveUrl, Dist, FileLocation, GitSourceUrl, HashPolicy, Hashed,
PathSourceDist, PathSourceUrl, RemoteSource, SourceDist, SourceUrl, LocalEditable, PathSourceDist, PathSourceUrl, RemoteSource, SourceDist, SourceUrl,
}; };
use install_wheel_rs::metadata::read_archive_metadata; use install_wheel_rs::metadata::read_archive_metadata;
use platform_tags::Tags; use platform_tags::Tags;
@ -79,7 +79,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
&self, &self,
source: &BuildableSource<'_>, source: &BuildableSource<'_>,
tags: &Tags, tags: &Tags,
hashes: &[HashDigest], hashes: HashPolicy<'_>,
) -> Result<BuiltWheelMetadata, Error> { ) -> Result<BuiltWheelMetadata, Error> {
let built_wheel_metadata = match &source { let built_wheel_metadata = match &source {
BuildableSource::Dist(SourceDist::Registry(dist)) => { BuildableSource::Dist(SourceDist::Registry(dist)) => {
@ -214,7 +214,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
pub(super) async fn download_and_build_metadata( pub(super) async fn download_and_build_metadata(
&self, &self,
source: &BuildableSource<'_>, source: &BuildableSource<'_>,
hashes: &[HashDigest], hashes: HashPolicy<'_>,
) -> Result<ArchiveMetadata, Error> { ) -> Result<ArchiveMetadata, Error> {
let metadata = match &source { let metadata = match &source {
BuildableSource::Dist(SourceDist::Registry(dist)) => { BuildableSource::Dist(SourceDist::Registry(dist)) => {
@ -350,7 +350,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
cache_shard: &CacheShard, cache_shard: &CacheShard,
subdirectory: Option<&'data Path>, subdirectory: Option<&'data Path>,
tags: &Tags, tags: &Tags,
hashes: &[HashDigest], hashes: HashPolicy<'_>,
) -> Result<BuiltWheelMetadata, Error> { ) -> Result<BuiltWheelMetadata, Error> {
// Fetch the revision for the source distribution. // Fetch the revision for the source distribution.
let revision = self let revision = self
@ -361,7 +361,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
if !revision.satisfies(hashes) { if !revision.satisfies(hashes) {
return Err(Error::hash_mismatch( return Err(Error::hash_mismatch(
source.to_string(), source.to_string(),
hashes, hashes.digests(),
revision.hashes(), revision.hashes(),
)); ));
} }
@ -418,7 +418,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
url: &'data Url, url: &'data Url,
cache_shard: &CacheShard, cache_shard: &CacheShard,
subdirectory: Option<&'data Path>, subdirectory: Option<&'data Path>,
hashes: &[HashDigest], hashes: HashPolicy<'_>,
) -> Result<ArchiveMetadata, Error> { ) -> Result<ArchiveMetadata, Error> {
// Fetch the revision for the source distribution. // Fetch the revision for the source distribution.
let revision = self let revision = self
@ -429,7 +429,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
if !revision.satisfies(hashes) { if !revision.satisfies(hashes) {
return Err(Error::hash_mismatch( return Err(Error::hash_mismatch(
source.to_string(), source.to_string(),
hashes, hashes.digests(),
revision.hashes(), revision.hashes(),
)); ));
} }
@ -507,7 +507,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
filename: &str, filename: &str,
url: &Url, url: &Url,
cache_shard: &CacheShard, cache_shard: &CacheShard,
hashes: &[HashDigest], hashes: HashPolicy<'_>,
) -> Result<Revision, Error> { ) -> Result<Revision, Error> {
let cache_entry = cache_shard.entry(REVISION); let cache_entry = cache_shard.entry(REVISION);
let cache_control = match self.client.connectivity() { let cache_control = match self.client.connectivity() {
@ -570,7 +570,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
source: &BuildableSource<'_>, source: &BuildableSource<'_>,
resource: &PathSourceUrl<'_>, resource: &PathSourceUrl<'_>,
tags: &Tags, tags: &Tags,
hashes: &[HashDigest], hashes: HashPolicy<'_>,
) -> Result<BuiltWheelMetadata, Error> { ) -> Result<BuiltWheelMetadata, Error> {
let cache_shard = self.build_context.cache().shard( let cache_shard = self.build_context.cache().shard(
CacheBucket::BuiltWheels, CacheBucket::BuiltWheels,
@ -586,7 +586,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
if !revision.satisfies(hashes) { if !revision.satisfies(hashes) {
return Err(Error::hash_mismatch( return Err(Error::hash_mismatch(
source.to_string(), source.to_string(),
hashes, hashes.digests(),
revision.hashes(), revision.hashes(),
)); ));
} }
@ -640,7 +640,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
&self, &self,
source: &BuildableSource<'_>, source: &BuildableSource<'_>,
resource: &PathSourceUrl<'_>, resource: &PathSourceUrl<'_>,
hashes: &[HashDigest], hashes: HashPolicy<'_>,
) -> Result<ArchiveMetadata, Error> { ) -> Result<ArchiveMetadata, Error> {
let cache_shard = self.build_context.cache().shard( let cache_shard = self.build_context.cache().shard(
CacheBucket::BuiltWheels, CacheBucket::BuiltWheels,
@ -656,7 +656,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
if !revision.satisfies(hashes) { if !revision.satisfies(hashes) {
return Err(Error::hash_mismatch( return Err(Error::hash_mismatch(
source.to_string(), source.to_string(),
hashes, hashes.digests(),
revision.hashes(), revision.hashes(),
)); ));
} }
@ -732,7 +732,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
source: &BuildableSource<'_>, source: &BuildableSource<'_>,
resource: &PathSourceUrl<'_>, resource: &PathSourceUrl<'_>,
cache_shard: &CacheShard, cache_shard: &CacheShard,
hashes: &[HashDigest], hashes: HashPolicy<'_>,
) -> Result<Revision, Error> { ) -> Result<Revision, Error> {
// Determine the last-modified time of the source distribution. // Determine the last-modified time of the source distribution.
let modified = ArchiveTimestamp::from_file(&resource.path).map_err(Error::CacheRead)?; let modified = ArchiveTimestamp::from_file(&resource.path).map_err(Error::CacheRead)?;
@ -779,10 +779,10 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
source: &BuildableSource<'_>, source: &BuildableSource<'_>,
resource: &PathSourceUrl<'_>, resource: &PathSourceUrl<'_>,
tags: &Tags, tags: &Tags,
hashes: &[HashDigest], hashes: HashPolicy<'_>,
) -> Result<BuiltWheelMetadata, Error> { ) -> Result<BuiltWheelMetadata, Error> {
// Before running the build, check that the hashes match. // Before running the build, check that the hashes match.
if !hashes.is_empty() { if hashes.is_validate() {
return Err(Error::HashesNotSupportedSourceTree(source.to_string())); return Err(Error::HashesNotSupportedSourceTree(source.to_string()));
} }
@ -843,10 +843,10 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
&self, &self,
source: &BuildableSource<'_>, source: &BuildableSource<'_>,
resource: &PathSourceUrl<'_>, resource: &PathSourceUrl<'_>,
hashes: &[HashDigest], hashes: HashPolicy<'_>,
) -> Result<ArchiveMetadata, Error> { ) -> Result<ArchiveMetadata, Error> {
// Before running the build, check that the hashes match. // Before running the build, check that the hashes match.
if !hashes.is_empty() { if hashes.is_validate() {
return Err(Error::HashesNotSupportedSourceTree(source.to_string())); return Err(Error::HashesNotSupportedSourceTree(source.to_string()));
} }
@ -945,10 +945,10 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
source: &BuildableSource<'_>, source: &BuildableSource<'_>,
resource: &GitSourceUrl<'_>, resource: &GitSourceUrl<'_>,
tags: &Tags, tags: &Tags,
hashes: &[HashDigest], hashes: HashPolicy<'_>,
) -> Result<BuiltWheelMetadata, Error> { ) -> Result<BuiltWheelMetadata, Error> {
// Before running the build, check that the hashes match. // Before running the build, check that the hashes match.
if !hashes.is_empty() { if hashes.is_validate() {
return Err(Error::HashesNotSupportedGit(source.to_string())); return Err(Error::HashesNotSupportedGit(source.to_string()));
} }
@ -1017,10 +1017,10 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
&self, &self,
source: &BuildableSource<'_>, source: &BuildableSource<'_>,
resource: &GitSourceUrl<'_>, resource: &GitSourceUrl<'_>,
hashes: &[HashDigest], hashes: HashPolicy<'_>,
) -> Result<ArchiveMetadata, Error> { ) -> Result<ArchiveMetadata, Error> {
// Before running the build, check that the hashes match. // Before running the build, check that the hashes match.
if !hashes.is_empty() { if hashes.is_validate() {
return Err(Error::HashesNotSupportedGit(source.to_string())); return Err(Error::HashesNotSupportedGit(source.to_string()));
} }
@ -1111,7 +1111,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
source: &BuildableSource<'_>, source: &BuildableSource<'_>,
filename: &str, filename: &str,
target: &Path, target: &Path,
hashes: &[HashDigest], hashes: HashPolicy<'_>,
) -> Result<Vec<HashDigest>, Error> { ) -> Result<Vec<HashDigest>, Error> {
let temp_dir = let temp_dir =
tempfile::tempdir_in(self.build_context.cache().bucket(CacheBucket::BuiltWheels)) tempfile::tempdir_in(self.build_context.cache().bucket(CacheBucket::BuiltWheels))
@ -1122,12 +1122,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
.into_async_read(); .into_async_read();
// Create a hasher for each hash algorithm. // Create a hasher for each hash algorithm.
let algorithms = { let algorithms = hashes.algorithms();
let mut hash = hashes.iter().map(HashDigest::algorithm).collect::<Vec<_>>();
hash.sort();
hash.dedup();
hash
};
let mut hashers = algorithms.into_iter().map(Hasher::from).collect::<Vec<_>>(); let mut hashers = algorithms.into_iter().map(Hasher::from).collect::<Vec<_>>();
let mut hasher = uv_extract::hash::HashReader::new(reader.compat(), &mut hashers); let mut hasher = uv_extract::hash::HashReader::new(reader.compat(), &mut hashers);
@ -1137,7 +1132,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
drop(span); drop(span);
// If necessary, exhaust the reader to compute the hash. // If necessary, exhaust the reader to compute the hash.
if !hashes.is_empty() { if !hashes.is_none() {
hasher.finish().await.map_err(Error::HashExhaustion)?; hasher.finish().await.map_err(Error::HashExhaustion)?;
} }
@ -1166,7 +1161,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
&self, &self,
path: &Path, path: &Path,
target: &Path, target: &Path,
hashes: &[HashDigest], hashes: HashPolicy<'_>,
) -> Result<Vec<HashDigest>, Error> { ) -> Result<Vec<HashDigest>, Error> {
debug!("Unpacking for build: {}", path.display()); debug!("Unpacking for build: {}", path.display());
@ -1178,12 +1173,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
.map_err(Error::CacheRead)?; .map_err(Error::CacheRead)?;
// Create a hasher for each hash algorithm. // Create a hasher for each hash algorithm.
let algorithms = { let algorithms = hashes.algorithms();
let mut hash = hashes.iter().map(HashDigest::algorithm).collect::<Vec<_>>();
hash.sort();
hash.dedup();
hash
};
let mut hashers = algorithms.into_iter().map(Hasher::from).collect::<Vec<_>>(); let mut hashers = algorithms.into_iter().map(Hasher::from).collect::<Vec<_>>();
let mut hasher = uv_extract::hash::HashReader::new(reader, &mut hashers); let mut hasher = uv_extract::hash::HashReader::new(reader, &mut hashers);
@ -1191,7 +1181,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
uv_extract::stream::archive(&mut hasher, path, &temp_dir.path()).await?; uv_extract::stream::archive(&mut hasher, path, &temp_dir.path()).await?;
// If necessary, exhaust the reader to compute the hash. // If necessary, exhaust the reader to compute the hash.
if !hashes.is_empty() { if !hashes.is_none() {
hasher.finish().await.map_err(Error::HashExhaustion)?; hasher.finish().await.map_err(Error::HashExhaustion)?;
} }

View file

@ -15,7 +15,7 @@ use platform_tags::Tags;
use uv_cache::Cache; use uv_cache::Cache;
use uv_client::RegistryClient; use uv_client::RegistryClient;
use uv_distribution::{DistributionDatabase, LocalWheel}; use uv_distribution::{DistributionDatabase, LocalWheel};
use uv_types::{BuildContext, InFlight, RequiredHashes}; use uv_types::{BuildContext, HashStrategy, InFlight};
use crate::editable::BuiltEditable; use crate::editable::BuiltEditable;
@ -40,7 +40,7 @@ pub enum Error {
pub struct Downloader<'a, Context: BuildContext + Send + Sync> { pub struct Downloader<'a, Context: BuildContext + Send + Sync> {
tags: &'a Tags, tags: &'a Tags,
cache: &'a Cache, cache: &'a Cache,
hashes: &'a RequiredHashes, hashes: &'a HashStrategy,
database: DistributionDatabase<'a, Context>, database: DistributionDatabase<'a, Context>,
reporter: Option<Arc<dyn Reporter>>, reporter: Option<Arc<dyn Reporter>>,
} }
@ -49,7 +49,7 @@ impl<'a, Context: BuildContext + Send + Sync> Downloader<'a, Context> {
pub fn new( pub fn new(
cache: &'a Cache, cache: &'a Cache,
tags: &'a Tags, tags: &'a Tags,
hashes: &'a RequiredHashes, hashes: &'a HashStrategy,
client: &'a RegistryClient, client: &'a RegistryClient,
build_context: &'a Context, build_context: &'a Context,
) -> Self { ) -> Self {
@ -170,22 +170,22 @@ impl<'a, Context: BuildContext + Send + Sync> Downloader<'a, Context> {
pub async fn get_wheel(&self, dist: Dist, in_flight: &InFlight) -> Result<CachedDist, Error> { pub async fn get_wheel(&self, dist: Dist, in_flight: &InFlight) -> Result<CachedDist, Error> {
let id = dist.distribution_id(); let id = dist.distribution_id();
if in_flight.downloads.register(id.clone()) { if in_flight.downloads.register(id.clone()) {
let hashes = self.hashes.get(dist.name()).unwrap_or_default(); let policy = self.hashes.get(dist.name());
let result = self let result = self
.database .database
.get_or_build_wheel(&dist, self.tags, hashes) .get_or_build_wheel(&dist, self.tags, policy)
.boxed() .boxed()
.map_err(|err| Error::Fetch(dist.clone(), err)) .map_err(|err| Error::Fetch(dist.clone(), err))
.await .await
.and_then(|wheel: LocalWheel| { .and_then(|wheel: LocalWheel| {
if wheel.satisfies(hashes) { if wheel.satisfies(policy) {
Ok(wheel) Ok(wheel)
} else { } else {
Err(Error::Fetch( Err(Error::Fetch(
dist.clone(), dist.clone(),
uv_distribution::Error::hash_mismatch( uv_distribution::Error::hash_mismatch(
dist.to_string(), dist.to_string(),
hashes, policy.digests(),
wheel.hashes(), wheel.hashes(),
), ),
)) ))

View file

@ -19,7 +19,7 @@ use uv_configuration::{NoBinary, Reinstall};
use uv_distribution::{read_timestamped_archive, Archive, BuiltWheelIndex, RegistryWheelIndex}; use uv_distribution::{read_timestamped_archive, Archive, BuiltWheelIndex, RegistryWheelIndex};
use uv_fs::Simplified; use uv_fs::Simplified;
use uv_interpreter::PythonEnvironment; use uv_interpreter::PythonEnvironment;
use uv_types::RequiredHashes; use uv_types::HashStrategy;
use crate::{ResolvedEditable, SitePackages}; use crate::{ResolvedEditable, SitePackages};
@ -66,15 +66,15 @@ impl<'a> Planner<'a> {
mut site_packages: SitePackages<'_>, mut site_packages: SitePackages<'_>,
reinstall: &Reinstall, reinstall: &Reinstall,
no_binary: &NoBinary, no_binary: &NoBinary,
hashes: &RequiredHashes, hasher: &HashStrategy,
index_locations: &IndexLocations, index_locations: &IndexLocations,
cache: &Cache, cache: &Cache,
venv: &PythonEnvironment, venv: &PythonEnvironment,
tags: &Tags, tags: &Tags,
) -> Result<Plan> { ) -> Result<Plan> {
// Index all the already-downloaded wheels in the cache. // Index all the already-downloaded wheels in the cache.
let mut registry_index = RegistryWheelIndex::new(cache, tags, index_locations, hashes); let mut registry_index = RegistryWheelIndex::new(cache, tags, index_locations, hasher);
let built_index = BuiltWheelIndex::new(cache, tags, hashes); let built_index = BuiltWheelIndex::new(cache, tags, hasher);
let mut cached = vec![]; let mut cached = vec![];
let mut remote = vec![]; let mut remote = vec![];
@ -262,8 +262,7 @@ impl<'a> Planner<'a> {
let archive = rmp_serde::from_slice::<Archive>(&data)?; let archive = rmp_serde::from_slice::<Archive>(&data)?;
// Enforce hash checking. // Enforce hash checking.
let hashes = hashes.get(&requirement.name).unwrap_or_default(); if archive.satisfies(hasher.get(&requirement.name)) {
if archive.satisfies(hashes) {
let cached_dist = CachedDirectUrlDist::from_url( let cached_dist = CachedDirectUrlDist::from_url(
wheel.filename, wheel.filename,
wheel.url, wheel.url,
@ -312,8 +311,7 @@ impl<'a> Planner<'a> {
&cache_entry, &cache_entry,
ArchiveTimestamp::from_file(&wheel.path)?, ArchiveTimestamp::from_file(&wheel.path)?,
)? { )? {
let hashes = hashes.get(&requirement.name).unwrap_or_default(); if archive.satisfies(hasher.get(&requirement.name)) {
if archive.satisfies(hashes) {
let cached_dist = CachedDirectUrlDist::from_url( let cached_dist = CachedDirectUrlDist::from_url(
wheel.filename, wheel.filename,
wheel.url, wheel.url,

View file

@ -12,7 +12,7 @@ use uv_client::RegistryClient;
use uv_configuration::{Constraints, Overrides}; use uv_configuration::{Constraints, Overrides};
use uv_distribution::{DistributionDatabase, Reporter}; use uv_distribution::{DistributionDatabase, Reporter};
use uv_resolver::{InMemoryIndex, MetadataResponse}; use uv_resolver::{InMemoryIndex, MetadataResponse};
use uv_types::{BuildContext, RequestedRequirements, RequiredHashes}; use uv_types::{BuildContext, HashStrategy, RequestedRequirements};
/// A resolver for resolving lookahead requirements from direct URLs. /// A resolver for resolving lookahead requirements from direct URLs.
/// ///
@ -40,7 +40,7 @@ pub struct LookaheadResolver<'a, Context: BuildContext + Send + Sync> {
/// The editable requirements for the project. /// The editable requirements for the project.
editables: &'a [(LocalEditable, Metadata23)], editables: &'a [(LocalEditable, Metadata23)],
/// The required hashes for the project. /// The required hashes for the project.
hashes: &'a RequiredHashes, hasher: &'a HashStrategy,
/// The in-memory index for resolving dependencies. /// The in-memory index for resolving dependencies.
index: &'a InMemoryIndex, index: &'a InMemoryIndex,
/// The database for fetching and building distributions. /// The database for fetching and building distributions.
@ -55,7 +55,7 @@ impl<'a, Context: BuildContext + Send + Sync> LookaheadResolver<'a, Context> {
constraints: &'a Constraints, constraints: &'a Constraints,
overrides: &'a Overrides, overrides: &'a Overrides,
editables: &'a [(LocalEditable, Metadata23)], editables: &'a [(LocalEditable, Metadata23)],
hashes: &'a RequiredHashes, hasher: &'a HashStrategy,
context: &'a Context, context: &'a Context,
client: &'a RegistryClient, client: &'a RegistryClient,
index: &'a InMemoryIndex, index: &'a InMemoryIndex,
@ -65,7 +65,7 @@ impl<'a, Context: BuildContext + Send + Sync> LookaheadResolver<'a, Context> {
constraints, constraints,
overrides, overrides,
editables, editables,
hashes, hasher,
index, index,
database: DistributionDatabase::new(client, context), database: DistributionDatabase::new(client, context),
} }
@ -155,10 +155,9 @@ impl<'a, Context: BuildContext + Send + Sync> LookaheadResolver<'a, Context> {
archive.metadata.requires_dist.clone() archive.metadata.requires_dist.clone()
} else { } else {
// Run the PEP 517 build process to extract metadata from the source distribution. // Run the PEP 517 build process to extract metadata from the source distribution.
let hashes = self.hashes.get(dist.name()).unwrap_or_default();
let archive = self let archive = self
.database .database
.get_or_build_wheel_metadata(&dist, hashes) .get_or_build_wheel_metadata(&dist, self.hasher.get(dist.name()))
.await .await
.with_context(|| match &dist { .with_context(|| match &dist {
Dist::Built(built) => format!("Failed to download: {built}"), Dist::Built(built) => format!("Failed to download: {built}"),

View file

@ -5,13 +5,13 @@ use anyhow::{Context, Result};
use futures::{StreamExt, TryStreamExt}; use futures::{StreamExt, TryStreamExt};
use url::Url; use url::Url;
use distribution_types::{BuildableSource, PackageId, PathSourceUrl, SourceUrl}; use distribution_types::{BuildableSource, HashPolicy, PackageId, PathSourceUrl, SourceUrl};
use pep508_rs::Requirement; use pep508_rs::Requirement;
use uv_client::RegistryClient; use uv_client::RegistryClient;
use uv_distribution::{DistributionDatabase, Reporter}; use uv_distribution::{DistributionDatabase, Reporter};
use uv_fs::Simplified; use uv_fs::Simplified;
use uv_resolver::{InMemoryIndex, MetadataResponse}; use uv_resolver::{InMemoryIndex, MetadataResponse};
use uv_types::BuildContext; use uv_types::{BuildContext, HashStrategy};
use crate::ExtrasSpecification; use crate::ExtrasSpecification;
@ -24,8 +24,8 @@ pub struct SourceTreeResolver<'a, Context: BuildContext + Send + Sync> {
source_trees: Vec<PathBuf>, source_trees: Vec<PathBuf>,
/// The extras to include when resolving requirements. /// The extras to include when resolving requirements.
extras: &'a ExtrasSpecification<'a>, extras: &'a ExtrasSpecification<'a>,
/// Whether to require hashes for all dependencies. /// The hash policy to enforce.
require_hashes: bool, hasher: &'a HashStrategy,
/// The in-memory index for resolving dependencies. /// The in-memory index for resolving dependencies.
index: &'a InMemoryIndex, index: &'a InMemoryIndex,
/// The database for fetching and building distributions. /// The database for fetching and building distributions.
@ -37,7 +37,7 @@ impl<'a, Context: BuildContext + Send + Sync> SourceTreeResolver<'a, Context> {
pub fn new( pub fn new(
source_trees: Vec<PathBuf>, source_trees: Vec<PathBuf>,
extras: &'a ExtrasSpecification<'a>, extras: &'a ExtrasSpecification<'a>,
require_hashes: bool, hasher: &'a HashStrategy,
context: &'a Context, context: &'a Context,
client: &'a RegistryClient, client: &'a RegistryClient,
index: &'a InMemoryIndex, index: &'a InMemoryIndex,
@ -45,7 +45,7 @@ impl<'a, Context: BuildContext + Send + Sync> SourceTreeResolver<'a, Context> {
Self { Self {
source_trees, source_trees,
extras, extras,
require_hashes, hasher,
index, index,
database: DistributionDatabase::new(client, context), database: DistributionDatabase::new(client, context),
} }
@ -87,15 +87,19 @@ impl<'a, Context: BuildContext + Send + Sync> SourceTreeResolver<'a, Context> {
path: Cow::Owned(path), path: Cow::Owned(path),
}); });
// TODO(charlie): Should we enforce this earlier? If the metadata can be extracted // Determine the hash policy. Since we don't have a package name, we perform a
// statically, it won't go through this resolver. But we'll fail anyway, since the // manual match.
// dependencies (when extracted from a `pyproject.toml` or `setup.py`) won't include hashes. let hashes = match self.hasher {
if self.require_hashes { HashStrategy::None => HashPolicy::None,
return Err(anyhow::anyhow!( HashStrategy::Generate => HashPolicy::Generate,
"Hash-checking is not supported for local directories: {}", HashStrategy::Validate(_) => {
source_tree.user_display() // TODO(charlie): Support `--require-hashes` for unnamed requirements.
)); return Err(anyhow::anyhow!(
} "Hash-checking is not supported for local directories: {}",
source_tree.user_display()
));
}
};
// Fetch the metadata for the distribution. // Fetch the metadata for the distribution.
let metadata = { let metadata = {
@ -117,7 +121,7 @@ impl<'a, Context: BuildContext + Send + Sync> SourceTreeResolver<'a, Context> {
} else { } else {
// Run the PEP 517 build process to extract metadata from the source distribution. // Run the PEP 517 build process to extract metadata from the source distribution.
let source = BuildableSource::Url(source); let source = BuildableSource::Url(source);
let archive = self.database.build_wheel_metadata(&source, &[]).await?; let archive = self.database.build_wheel_metadata(&source, hashes).await?;
// Insert the metadata into the index. // Insert the metadata into the index.
self.index self.index

View file

@ -10,8 +10,8 @@ use tracing::debug;
use distribution_filename::{SourceDistFilename, WheelFilename}; use distribution_filename::{SourceDistFilename, WheelFilename};
use distribution_types::{ use distribution_types::{
BuildableSource, DirectSourceUrl, GitSourceUrl, PackageId, PathSourceUrl, RemoteSource, BuildableSource, DirectSourceUrl, GitSourceUrl, HashPolicy, PackageId, PathSourceUrl,
SourceUrl, RemoteSource, SourceUrl,
}; };
use pep508_rs::{ use pep508_rs::{
Requirement, RequirementsTxtRequirement, Scheme, UnnamedRequirement, VersionOrUrl, Requirement, RequirementsTxtRequirement, Scheme, UnnamedRequirement, VersionOrUrl,
@ -21,14 +21,14 @@ use uv_client::RegistryClient;
use uv_distribution::{DistributionDatabase, Reporter}; use uv_distribution::{DistributionDatabase, Reporter};
use uv_normalize::PackageName; use uv_normalize::PackageName;
use uv_resolver::{InMemoryIndex, MetadataResponse}; use uv_resolver::{InMemoryIndex, MetadataResponse};
use uv_types::BuildContext; use uv_types::{BuildContext, HashStrategy};
/// Like [`RequirementsSpecification`], but with concrete names for all requirements. /// Like [`RequirementsSpecification`], but with concrete names for all requirements.
pub struct NamedRequirementsResolver<'a, Context: BuildContext + Send + Sync> { pub struct NamedRequirementsResolver<'a, Context: BuildContext + Send + Sync> {
/// The requirements for the project. /// The requirements for the project.
requirements: Vec<RequirementsTxtRequirement>, requirements: Vec<RequirementsTxtRequirement>,
/// Whether to check hashes for distributions. /// Whether to check hashes for distributions.
require_hashes: bool, hasher: &'a HashStrategy,
/// The in-memory index for resolving dependencies. /// The in-memory index for resolving dependencies.
index: &'a InMemoryIndex, index: &'a InMemoryIndex,
/// The database for fetching and building distributions. /// The database for fetching and building distributions.
@ -39,14 +39,14 @@ impl<'a, Context: BuildContext + Send + Sync> NamedRequirementsResolver<'a, Cont
/// Instantiate a new [`NamedRequirementsResolver`] for a given set of requirements. /// Instantiate a new [`NamedRequirementsResolver`] for a given set of requirements.
pub fn new( pub fn new(
requirements: Vec<RequirementsTxtRequirement>, requirements: Vec<RequirementsTxtRequirement>,
require_hashes: bool, hasher: &'a HashStrategy,
context: &'a Context, context: &'a Context,
client: &'a RegistryClient, client: &'a RegistryClient,
index: &'a InMemoryIndex, index: &'a InMemoryIndex,
) -> Self { ) -> Self {
Self { Self {
requirements, requirements,
require_hashes, hasher,
index, index,
database: DistributionDatabase::new(client, context), database: DistributionDatabase::new(client, context),
} }
@ -65,7 +65,7 @@ impl<'a, Context: BuildContext + Send + Sync> NamedRequirementsResolver<'a, Cont
pub async fn resolve(self) -> Result<Vec<Requirement>> { pub async fn resolve(self) -> Result<Vec<Requirement>> {
let Self { let Self {
requirements, requirements,
require_hashes, hasher,
index, index,
database, database,
} = self; } = self;
@ -74,8 +74,7 @@ impl<'a, Context: BuildContext + Send + Sync> NamedRequirementsResolver<'a, Cont
match requirement { match requirement {
RequirementsTxtRequirement::Pep508(requirement) => Ok(requirement), RequirementsTxtRequirement::Pep508(requirement) => Ok(requirement),
RequirementsTxtRequirement::Unnamed(requirement) => { RequirementsTxtRequirement::Unnamed(requirement) => {
Self::resolve_requirement(requirement, require_hashes, index, &database) Self::resolve_requirement(requirement, hasher, index, &database).await
.await
} }
} }
}) })
@ -87,7 +86,7 @@ impl<'a, Context: BuildContext + Send + Sync> NamedRequirementsResolver<'a, Cont
/// Infer the package name for a given "unnamed" requirement. /// Infer the package name for a given "unnamed" requirement.
async fn resolve_requirement( async fn resolve_requirement(
requirement: UnnamedRequirement, requirement: UnnamedRequirement,
require_hashes: bool, hasher: &HashStrategy,
index: &InMemoryIndex, index: &InMemoryIndex,
database: &DistributionDatabase<'a, Context>, database: &DistributionDatabase<'a, Context>,
) -> Result<Requirement> { ) -> Result<Requirement> {
@ -240,13 +239,6 @@ impl<'a, Context: BuildContext + Send + Sync> NamedRequirementsResolver<'a, Cont
} }
}; };
// TODO(charlie): Support `--require-hashes` for unnamed requirements.
if require_hashes {
return Err(anyhow::anyhow!(
"Unnamed requirements are not supported with `--require-hashes`"
));
}
// Fetch the metadata for the distribution. // Fetch the metadata for the distribution.
let name = { let name = {
let id = PackageId::from_url(source.url()); let id = PackageId::from_url(source.url());
@ -260,9 +252,22 @@ impl<'a, Context: BuildContext + Send + Sync> NamedRequirementsResolver<'a, Cont
// If the metadata is already in the index, return it. // If the metadata is already in the index, return it.
archive.metadata.name.clone() archive.metadata.name.clone()
} else { } else {
// Determine the hash policy. Since we don't have a package name, we perform a
// manual match.
let hashes = match hasher {
HashStrategy::None => HashPolicy::None,
HashStrategy::Generate => HashPolicy::Generate,
HashStrategy::Validate(_) => {
// TODO(charlie): Support `--require-hashes` for unnamed requirements.
return Err(anyhow::anyhow!(
"Unnamed requirements are not supported with `--require-hashes`"
));
}
};
// Run the PEP 517 build process to extract metadata from the source distribution. // Run the PEP 517 build process to extract metadata from the source distribution.
let source = BuildableSource::Url(source); let source = BuildableSource::Url(source);
let archive = database.build_wheel_metadata(&source, &[]).await?; let archive = database.build_wheel_metadata(&source, hashes).await?;
let name = archive.metadata.name.clone(); let name = archive.metadata.name.clone();

View file

@ -6,8 +6,9 @@ use tracing::instrument;
use distribution_filename::{DistFilename, SourceDistFilename, WheelFilename}; use distribution_filename::{DistFilename, SourceDistFilename, WheelFilename};
use distribution_types::{ use distribution_types::{
BuiltDist, Dist, File, Hash, IncompatibleSource, IncompatibleWheel, IndexUrl, PrioritizedDist, BuiltDist, Dist, File, Hash, HashPolicy, IncompatibleSource, IncompatibleWheel, IndexUrl,
RegistryBuiltDist, RegistrySourceDist, SourceDist, SourceDistCompatibility, WheelCompatibility, PrioritizedDist, RegistryBuiltDist, RegistrySourceDist, SourceDist, SourceDistCompatibility,
WheelCompatibility,
}; };
use pep440_rs::Version; use pep440_rs::Version;
use platform_tags::{TagCompatibility, Tags}; use platform_tags::{TagCompatibility, Tags};
@ -15,7 +16,7 @@ use pypi_types::HashDigest;
use uv_client::FlatIndexEntries; use uv_client::FlatIndexEntries;
use uv_configuration::{NoBinary, NoBuild}; use uv_configuration::{NoBinary, NoBuild};
use uv_normalize::PackageName; use uv_normalize::PackageName;
use uv_types::RequiredHashes; use uv_types::HashStrategy;
/// A set of [`PrioritizedDist`] from a `--find-links` entry, indexed by [`PackageName`] /// A set of [`PrioritizedDist`] from a `--find-links` entry, indexed by [`PackageName`]
/// and [`Version`]. /// and [`Version`].
@ -34,7 +35,7 @@ impl FlatIndex {
pub fn from_entries( pub fn from_entries(
entries: FlatIndexEntries, entries: FlatIndexEntries,
tags: &Tags, tags: &Tags,
required_hashes: &RequiredHashes, hasher: &HashStrategy,
no_build: &NoBuild, no_build: &NoBuild,
no_binary: &NoBinary, no_binary: &NoBinary,
) -> Self { ) -> Self {
@ -47,7 +48,7 @@ impl FlatIndex {
file, file,
filename, filename,
tags, tags,
required_hashes, hasher,
no_build, no_build,
no_binary, no_binary,
url, url,
@ -66,7 +67,7 @@ impl FlatIndex {
file: File, file: File,
filename: DistFilename, filename: DistFilename,
tags: &Tags, tags: &Tags,
required_hashes: &RequiredHashes, hasher: &HashStrategy,
no_build: &NoBuild, no_build: &NoBuild,
no_binary: &NoBinary, no_binary: &NoBinary,
index: IndexUrl, index: IndexUrl,
@ -77,13 +78,8 @@ impl FlatIndex {
DistFilename::WheelFilename(filename) => { DistFilename::WheelFilename(filename) => {
let version = filename.version.clone(); let version = filename.version.clone();
let compatibility = Self::wheel_compatibility( let compatibility =
&filename, Self::wheel_compatibility(&filename, &file.hashes, tags, hasher, no_binary);
&file.hashes,
tags,
required_hashes,
no_binary,
);
let dist = Dist::Built(BuiltDist::Registry(RegistryBuiltDist { let dist = Dist::Built(BuiltDist::Registry(RegistryBuiltDist {
filename, filename,
file: Box::new(file), file: Box::new(file),
@ -99,12 +95,8 @@ impl FlatIndex {
} }
} }
DistFilename::SourceDistFilename(filename) => { DistFilename::SourceDistFilename(filename) => {
let compatibility = Self::source_dist_compatibility( let compatibility =
&filename, Self::source_dist_compatibility(&filename, &file.hashes, hasher, no_build);
&file.hashes,
required_hashes,
no_build,
);
let dist = Dist::Source(SourceDist::Registry(RegistrySourceDist { let dist = Dist::Source(SourceDist::Registry(RegistrySourceDist {
filename: filename.clone(), filename: filename.clone(),
file: Box::new(file), file: Box::new(file),
@ -125,7 +117,7 @@ impl FlatIndex {
fn source_dist_compatibility( fn source_dist_compatibility(
filename: &SourceDistFilename, filename: &SourceDistFilename,
hashes: &[HashDigest], hashes: &[HashDigest],
required_hashes: &RequiredHashes, hasher: &HashStrategy,
no_build: &NoBuild, no_build: &NoBuild,
) -> SourceDistCompatibility { ) -> SourceDistCompatibility {
// Check if source distributions are allowed for this package. // Check if source distributions are allowed for this package.
@ -140,10 +132,10 @@ impl FlatIndex {
} }
// Check if hashes line up // Check if hashes line up
let hash = if let Some(required_hashes) = required_hashes.get(&filename.name) { let hash = if let HashPolicy::Validate(required) = hasher.get(&filename.name) {
if hashes.is_empty() { if hashes.is_empty() {
Hash::Missing Hash::Missing
} else if hashes.iter().any(|hash| required_hashes.contains(hash)) { } else if required.iter().any(|hash| hashes.contains(hash)) {
Hash::Matched Hash::Matched
} else { } else {
Hash::Mismatched Hash::Mismatched
@ -159,7 +151,7 @@ impl FlatIndex {
filename: &WheelFilename, filename: &WheelFilename,
hashes: &[HashDigest], hashes: &[HashDigest],
tags: &Tags, tags: &Tags,
required_hashes: &RequiredHashes, hasher: &HashStrategy,
no_binary: &NoBinary, no_binary: &NoBinary,
) -> WheelCompatibility { ) -> WheelCompatibility {
// Check if binaries are allowed for this package. // Check if binaries are allowed for this package.
@ -182,10 +174,10 @@ impl FlatIndex {
}; };
// Check if hashes line up // Check if hashes line up
let hash = if let Some(required_hashes) = required_hashes.get(&filename.name) { let hash = if let HashPolicy::Validate(required) = hasher.get(&filename.name) {
if hashes.is_empty() { if hashes.is_empty() {
Hash::Missing Hash::Missing
} else if hashes.iter().any(|hash| required_hashes.contains(hash)) { } else if required.iter().any(|hash| hashes.contains(hash)) {
Hash::Matched Hash::Matched
} else { } else {
Hash::Mismatched Hash::Mismatched

View file

@ -1,15 +0,0 @@
#[derive(Debug, Default, Clone, Copy)]
pub enum HashCheckingMode {
/// Hash-checking mode is disabled.
#[default]
Disabled,
/// Hash-checking mode is enabled.
Enabled,
}
impl HashCheckingMode {
/// Returns `true` if hash-checking is enabled.
pub fn is_enabled(self) -> bool {
matches!(self, Self::Enabled)
}
}

View file

@ -2,7 +2,6 @@ pub use dependency_mode::DependencyMode;
pub use error::ResolveError; pub use error::ResolveError;
pub use exclusions::Exclusions; pub use exclusions::Exclusions;
pub use flat_index::FlatIndex; pub use flat_index::FlatIndex;
pub use hash_checking_mode::HashCheckingMode;
pub use manifest::Manifest; pub use manifest::Manifest;
pub use options::{Options, OptionsBuilder}; pub use options::{Options, OptionsBuilder};
pub use preferences::{Preference, PreferenceError}; pub use preferences::{Preference, PreferenceError};
@ -27,7 +26,6 @@ mod editables;
mod error; mod error;
mod exclusions; mod exclusions;
mod flat_index; mod flat_index;
mod hash_checking_mode;
mod manifest; mod manifest;
mod options; mod options;
mod pins; mod pins;

View file

@ -1,6 +1,5 @@
use chrono::{DateTime, Utc}; use chrono::{DateTime, Utc};
use crate::hash_checking_mode::HashCheckingMode;
use crate::{DependencyMode, PreReleaseMode, ResolutionMode}; use crate::{DependencyMode, PreReleaseMode, ResolutionMode};
/// Options for resolving a manifest. /// Options for resolving a manifest.
@ -9,7 +8,6 @@ pub struct Options {
pub resolution_mode: ResolutionMode, pub resolution_mode: ResolutionMode,
pub prerelease_mode: PreReleaseMode, pub prerelease_mode: PreReleaseMode,
pub dependency_mode: DependencyMode, pub dependency_mode: DependencyMode,
pub hash_checking_mode: HashCheckingMode,
pub exclude_newer: Option<DateTime<Utc>>, pub exclude_newer: Option<DateTime<Utc>>,
} }
@ -19,7 +17,6 @@ pub struct OptionsBuilder {
resolution_mode: ResolutionMode, resolution_mode: ResolutionMode,
prerelease_mode: PreReleaseMode, prerelease_mode: PreReleaseMode,
dependency_mode: DependencyMode, dependency_mode: DependencyMode,
hash_checking_mode: HashCheckingMode,
exclude_newer: Option<DateTime<Utc>>, exclude_newer: Option<DateTime<Utc>>,
} }
@ -50,13 +47,6 @@ impl OptionsBuilder {
self self
} }
/// Sets the hash-checking mode.
#[must_use]
pub fn hash_checking_mode(mut self, hash_checking_mode: HashCheckingMode) -> Self {
self.hash_checking_mode = hash_checking_mode;
self
}
/// Sets the exclusion date. /// Sets the exclusion date.
#[must_use] #[must_use]
pub fn exclude_newer(mut self, exclude_newer: Option<DateTime<Utc>>) -> Self { pub fn exclude_newer(mut self, exclude_newer: Option<DateTime<Utc>>) -> Self {
@ -70,7 +60,6 @@ impl OptionsBuilder {
resolution_mode: self.resolution_mode, resolution_mode: self.resolution_mode,
prerelease_mode: self.prerelease_mode, prerelease_mode: self.prerelease_mode,
dependency_mode: self.dependency_mode, dependency_mode: self.dependency_mode,
hash_checking_mode: self.hash_checking_mode,
exclude_newer: self.exclude_newer, exclude_newer: self.exclude_newer,
} }
} }

View file

@ -134,15 +134,13 @@ impl ResolutionGraph {
.filter(|digests| !digests.is_empty()) .filter(|digests| !digests.is_empty())
{ {
hashes.insert(package_name.clone(), digests.to_vec()); hashes.insert(package_name.clone(), digests.to_vec());
} else if let Some(versions_response) = packages.get(package_name) { } else if let Some(metadata_response) =
if let VersionsResponse::Found(ref version_maps) = *versions_response { distributions.get(&pinned_package.package_id())
for version_map in version_maps { {
if let Some(mut digests) = version_map.hashes(version) { if let MetadataResponse::Found(ref archive) = *metadata_response {
digests.sort_unstable(); let mut digests = archive.hashes.clone();
hashes.insert(package_name.clone(), digests); digests.sort_unstable();
break; hashes.insert(package_name.clone(), digests);
}
}
} }
} }

View file

@ -31,12 +31,11 @@ use uv_configuration::{Constraints, Overrides};
use uv_distribution::{ArchiveMetadata, DistributionDatabase}; use uv_distribution::{ArchiveMetadata, DistributionDatabase};
use uv_interpreter::Interpreter; use uv_interpreter::Interpreter;
use uv_normalize::PackageName; use uv_normalize::PackageName;
use uv_types::{BuildContext, InstalledPackagesProvider, RequiredHashes}; use uv_types::{BuildContext, HashStrategy, InstalledPackagesProvider};
use crate::candidate_selector::{CandidateDist, CandidateSelector}; use crate::candidate_selector::{CandidateDist, CandidateSelector};
use crate::editables::Editables; use crate::editables::Editables;
use crate::error::ResolveError; use crate::error::ResolveError;
use crate::hash_checking_mode::HashCheckingMode;
use crate::manifest::Manifest; use crate::manifest::Manifest;
use crate::pins::FilePins; use crate::pins::FilePins;
use crate::preferences::Preferences; use crate::preferences::Preferences;
@ -122,8 +121,7 @@ pub struct Resolver<
urls: Urls, urls: Urls,
locals: Locals, locals: Locals,
dependency_mode: DependencyMode, dependency_mode: DependencyMode,
hash_checking_mode: HashCheckingMode, hasher: &'a HashStrategy,
hashes: &'a RequiredHashes,
markers: &'a MarkerEnvironment, markers: &'a MarkerEnvironment,
python_requirement: PythonRequirement, python_requirement: PythonRequirement,
selector: CandidateSelector, selector: CandidateSelector,
@ -158,7 +156,7 @@ impl<
client: &'a RegistryClient, client: &'a RegistryClient,
flat_index: &'a FlatIndex, flat_index: &'a FlatIndex,
index: &'a InMemoryIndex, index: &'a InMemoryIndex,
hashes: &'a RequiredHashes, hasher: &'a HashStrategy,
build_context: &'a Context, build_context: &'a Context,
installed_packages: &'a InstalledPackages, installed_packages: &'a InstalledPackages,
) -> Result<Self, ResolveError> { ) -> Result<Self, ResolveError> {
@ -169,7 +167,7 @@ impl<
tags, tags,
PythonRequirement::new(interpreter, markers), PythonRequirement::new(interpreter, markers),
AllowedYanks::from_manifest(&manifest, markers), AllowedYanks::from_manifest(&manifest, markers),
hashes, hasher,
options.exclude_newer, options.exclude_newer,
build_context.no_binary(), build_context.no_binary(),
build_context.no_build(), build_context.no_build(),
@ -177,7 +175,7 @@ impl<
Self::new_custom_io( Self::new_custom_io(
manifest, manifest,
options, options,
hashes, hasher,
markers, markers,
PythonRequirement::new(interpreter, markers), PythonRequirement::new(interpreter, markers),
index, index,
@ -198,7 +196,7 @@ impl<
pub fn new_custom_io( pub fn new_custom_io(
manifest: Manifest, manifest: Manifest,
options: Options, options: Options,
hashes: &'a RequiredHashes, hasher: &'a HashStrategy,
markers: &'a MarkerEnvironment, markers: &'a MarkerEnvironment,
python_requirement: PythonRequirement, python_requirement: PythonRequirement,
index: &'a InMemoryIndex, index: &'a InMemoryIndex,
@ -212,7 +210,6 @@ impl<
visited: DashSet::default(), visited: DashSet::default(),
selector: CandidateSelector::for_resolution(options, &manifest, markers), selector: CandidateSelector::for_resolution(options, &manifest, markers),
dependency_mode: options.dependency_mode, dependency_mode: options.dependency_mode,
hash_checking_mode: options.hash_checking_mode,
urls: Urls::from_manifest(&manifest, markers)?, urls: Urls::from_manifest(&manifest, markers)?,
locals: Locals::from_manifest(&manifest, markers), locals: Locals::from_manifest(&manifest, markers),
project: manifest.project, project: manifest.project,
@ -222,7 +219,7 @@ impl<
preferences: Preferences::from_iter(manifest.preferences, markers), preferences: Preferences::from_iter(manifest.preferences, markers),
exclusions: manifest.exclusions, exclusions: manifest.exclusions,
editables: Editables::from_requirements(manifest.editables), editables: Editables::from_requirements(manifest.editables),
hashes, hasher,
markers, markers,
python_requirement, python_requirement,
reporter: None, reporter: None,
@ -528,10 +525,8 @@ impl<
PubGrubPackage::Python(_) => {} PubGrubPackage::Python(_) => {}
PubGrubPackage::Package(package_name, _extra, None) => { PubGrubPackage::Package(package_name, _extra, None) => {
// Validate that the package is permitted under hash-checking mode. // Validate that the package is permitted under hash-checking mode.
if self.hash_checking_mode.is_enabled() { if !self.hasher.allows(package_name) {
if !self.hashes.contains(package_name) { return Err(ResolveError::UnhashedPackage(package_name.clone()));
return Err(ResolveError::UnhashedPackage(package_name.clone()));
}
} }
// Emit a request to fetch the metadata for this package. // Emit a request to fetch the metadata for this package.
@ -544,10 +539,8 @@ impl<
} }
PubGrubPackage::Package(package_name, _extra, Some(url)) => { PubGrubPackage::Package(package_name, _extra, Some(url)) => {
// Validate that the package is permitted under hash-checking mode. // Validate that the package is permitted under hash-checking mode.
if self.hash_checking_mode.is_enabled() { if !self.hasher.allows(package_name) {
if !self.hashes.contains(package_name) { return Err(ResolveError::UnhashedPackage(package_name.clone()));
return Err(ResolveError::UnhashedPackage(package_name.clone()));
}
} }
// Emit a request to fetch the metadata for this distribution. // Emit a request to fetch the metadata for this distribution.

View file

@ -10,7 +10,7 @@ use uv_client::RegistryClient;
use uv_configuration::{NoBinary, NoBuild}; use uv_configuration::{NoBinary, NoBuild};
use uv_distribution::{ArchiveMetadata, DistributionDatabase}; use uv_distribution::{ArchiveMetadata, DistributionDatabase};
use uv_normalize::PackageName; use uv_normalize::PackageName;
use uv_types::{BuildContext, RequiredHashes}; use uv_types::{BuildContext, HashStrategy};
use crate::flat_index::FlatIndex; use crate::flat_index::FlatIndex;
use crate::python_requirement::PythonRequirement; use crate::python_requirement::PythonRequirement;
@ -83,7 +83,7 @@ pub struct DefaultResolverProvider<'a, Context: BuildContext + Send + Sync> {
tags: Tags, tags: Tags,
python_requirement: PythonRequirement, python_requirement: PythonRequirement,
allowed_yanks: AllowedYanks, allowed_yanks: AllowedYanks,
required_hashes: RequiredHashes, hasher: HashStrategy,
exclude_newer: Option<DateTime<Utc>>, exclude_newer: Option<DateTime<Utc>>,
no_binary: NoBinary, no_binary: NoBinary,
no_build: NoBuild, no_build: NoBuild,
@ -99,7 +99,7 @@ impl<'a, Context: BuildContext + Send + Sync> DefaultResolverProvider<'a, Contex
tags: &'a Tags, tags: &'a Tags,
python_requirement: PythonRequirement, python_requirement: PythonRequirement,
allowed_yanks: AllowedYanks, allowed_yanks: AllowedYanks,
required_hashes: &'a RequiredHashes, hasher: &'a HashStrategy,
exclude_newer: Option<DateTime<Utc>>, exclude_newer: Option<DateTime<Utc>>,
no_binary: &'a NoBinary, no_binary: &'a NoBinary,
no_build: &'a NoBuild, no_build: &'a NoBuild,
@ -111,7 +111,7 @@ impl<'a, Context: BuildContext + Send + Sync> DefaultResolverProvider<'a, Contex
tags: tags.clone(), tags: tags.clone(),
python_requirement, python_requirement,
allowed_yanks, allowed_yanks,
required_hashes: required_hashes.clone(), hasher: hasher.clone(),
exclude_newer, exclude_newer,
no_binary: no_binary.clone(), no_binary: no_binary.clone(),
no_build: no_build.clone(), no_build: no_build.clone(),
@ -139,7 +139,7 @@ impl<'a, Context: BuildContext + Send + Sync> ResolverProvider
&self.tags, &self.tags,
&self.python_requirement, &self.python_requirement,
&self.allowed_yanks, &self.allowed_yanks,
&self.required_hashes, &self.hasher,
self.exclude_newer.as_ref(), self.exclude_newer.as_ref(),
self.flat_index.get(package_name).cloned(), self.flat_index.get(package_name).cloned(),
&self.no_binary, &self.no_binary,
@ -179,8 +179,11 @@ impl<'a, Context: BuildContext + Send + Sync> ResolverProvider
/// Fetch the metadata for a distribution, building it if necessary. /// Fetch the metadata for a distribution, building it if necessary.
async fn get_or_build_wheel_metadata<'io>(&'io self, dist: &'io Dist) -> WheelMetadataResult { async fn get_or_build_wheel_metadata<'io>(&'io self, dist: &'io Dist) -> WheelMetadataResult {
let hashes = self.required_hashes.get(dist.name()).unwrap_or_default(); match self
match self.fetcher.get_or_build_wheel_metadata(dist, hashes).await { .fetcher
.get_or_build_wheel_metadata(dist, self.hasher.get(dist.name()))
.await
{
Ok(metadata) => Ok(MetadataResponse::Found(metadata)), Ok(metadata) => Ok(MetadataResponse::Found(metadata)),
Err(err) => match err { Err(err) => match err {
uv_distribution::Error::Client(client) => match client.into_kind() { uv_distribution::Error::Client(client) => match client.into_kind() {

View file

@ -17,7 +17,7 @@ use pypi_types::{HashDigest, Yanked};
use uv_client::{OwnedArchive, SimpleMetadata, VersionFiles}; use uv_client::{OwnedArchive, SimpleMetadata, VersionFiles};
use uv_configuration::{NoBinary, NoBuild}; use uv_configuration::{NoBinary, NoBuild};
use uv_normalize::PackageName; use uv_normalize::PackageName;
use uv_types::RequiredHashes; use uv_types::HashStrategy;
use uv_warnings::warn_user_once; use uv_warnings::warn_user_once;
use crate::flat_index::FlatDistributions; use crate::flat_index::FlatDistributions;
@ -48,7 +48,7 @@ impl VersionMap {
tags: &Tags, tags: &Tags,
python_requirement: &PythonRequirement, python_requirement: &PythonRequirement,
allowed_yanks: &AllowedYanks, allowed_yanks: &AllowedYanks,
required_hashes: &RequiredHashes, hasher: &HashStrategy,
exclude_newer: Option<&DateTime<Utc>>, exclude_newer: Option<&DateTime<Utc>>,
flat_index: Option<FlatDistributions>, flat_index: Option<FlatDistributions>,
no_binary: &NoBinary, no_binary: &NoBinary,
@ -112,10 +112,7 @@ impl VersionMap {
.allowed_versions(package_name) .allowed_versions(package_name)
.cloned() .cloned()
.unwrap_or_default(); .unwrap_or_default();
let required_hashes = required_hashes let required_hashes = hasher.get(package_name).digests().to_vec();
.get(package_name)
.unwrap_or_default()
.to_vec();
Self { Self {
inner: VersionMapInner::Lazy(VersionMapLazy { inner: VersionMapInner::Lazy(VersionMapLazy {
map, map,

View file

@ -22,7 +22,7 @@ use uv_resolver::{
OptionsBuilder, PreReleaseMode, Preference, ResolutionGraph, ResolutionMode, Resolver, OptionsBuilder, PreReleaseMode, Preference, ResolutionGraph, ResolutionMode, Resolver,
}; };
use uv_types::{ use uv_types::{
BuildContext, BuildIsolation, EmptyInstalledPackages, RequiredHashes, SourceBuildTrait, BuildContext, BuildIsolation, EmptyInstalledPackages, HashStrategy, SourceBuildTrait,
}; };
// Exclude any packages uploaded after this date. // Exclude any packages uploaded after this date.
@ -125,7 +125,7 @@ async fn resolve(
find_default_python(&Cache::temp().unwrap()).expect("Expected a python to be installed"); find_default_python(&Cache::temp().unwrap()).expect("Expected a python to be installed");
let interpreter = Interpreter::artificial(real_interpreter.platform().clone(), markers.clone()); let interpreter = Interpreter::artificial(real_interpreter.platform().clone(), markers.clone());
let build_context = DummyContext::new(Cache::temp()?, interpreter.clone()); let build_context = DummyContext::new(Cache::temp()?, interpreter.clone());
let hashes = RequiredHashes::default(); let hashes = HashStrategy::None;
let installed_packages = EmptyInstalledPackages; let installed_packages = EmptyInstalledPackages;
let resolver = Resolver::new( let resolver = Resolver::new(
manifest, manifest,

View file

@ -1,3 +1,4 @@
use distribution_types::HashPolicy;
use rustc_hash::FxHashMap; use rustc_hash::FxHashMap;
use std::str::FromStr; use std::str::FromStr;
@ -5,18 +6,45 @@ use pep508_rs::{MarkerEnvironment, Requirement, VersionOrUrl};
use pypi_types::{HashDigest, HashError}; use pypi_types::{HashDigest, HashError};
use uv_normalize::PackageName; use uv_normalize::PackageName;
/// A set of package versions that are permitted, even if they're marked as yanked by the #[derive(Debug, Clone)]
/// relevant index. pub enum HashStrategy {
#[derive(Debug, Default, Clone)] /// No hash policy is specified.
pub struct RequiredHashes(FxHashMap<PackageName, Vec<HashDigest>>); None,
/// Hashes should be generated (specifically, a SHA-256 hash), but not validated.
Generate,
/// Hashes should be validated against a pre-defined list of hashes. If necessary, hashes should
/// be generated so as to ensure that the archive is valid.
Validate(FxHashMap<PackageName, Vec<HashDigest>>),
}
impl RequiredHashes { impl HashStrategy {
/// Generate the [`RequiredHashes`] from a set of requirement entries. /// Return the [`HashPolicy`] for the given package.
pub fn get(&self, package_name: &PackageName) -> HashPolicy {
match self {
Self::None => HashPolicy::None,
Self::Generate => HashPolicy::Generate,
Self::Validate(hashes) => hashes
.get(package_name)
.map(Vec::as_slice)
.map_or(HashPolicy::None, HashPolicy::Validate),
}
}
/// Returns `true` if the given package is allowed.
pub fn allows(&self, package_name: &PackageName) -> bool {
match self {
Self::None => true,
Self::Generate => true,
Self::Validate(hashes) => hashes.contains_key(package_name),
}
}
/// Generate the required hashes from a set of [`Requirement`] entries.
pub fn from_requirements( pub fn from_requirements(
requirements: impl Iterator<Item = (Requirement, Vec<String>)>, requirements: impl Iterator<Item = (Requirement, Vec<String>)>,
markers: &MarkerEnvironment, markers: &MarkerEnvironment,
) -> Result<Self, RequiredHashesError> { ) -> Result<Self, HashStrategyError> {
let mut allowed_hashes = FxHashMap::<PackageName, Vec<HashDigest>>::default(); let mut hashes = FxHashMap::<PackageName, Vec<HashDigest>>::default();
// For each requirement, map from name to allowed hashes. We use the last entry for each // For each requirement, map from name to allowed hashes. We use the last entry for each
// package. // package.
@ -26,7 +54,7 @@ impl RequiredHashes {
// //
// TODO(charlie): Preserve hashes from `requirements.txt` through to this pass, so that we // TODO(charlie): Preserve hashes from `requirements.txt` through to this pass, so that we
// can iterate over requirements directly, rather than iterating over the entries. // can iterate over requirements directly, rather than iterating over the entries.
for (requirement, hashes) in requirements { for (requirement, digests) in requirements {
if !requirement.evaluate_markers(markers, &[]) { if !requirement.evaluate_markers(markers, &[]) {
continue; continue;
} }
@ -43,51 +71,40 @@ impl RequiredHashes {
{ {
// Pinned versions are allowed. // Pinned versions are allowed.
} else { } else {
return Err(RequiredHashesError::UnpinnedRequirement( return Err(HashStrategyError::UnpinnedRequirement(
requirement.to_string(), requirement.to_string(),
)); ));
} }
} }
None => { None => {
return Err(RequiredHashesError::UnpinnedRequirement( return Err(HashStrategyError::UnpinnedRequirement(
requirement.to_string(), requirement.to_string(),
)) ))
} }
} }
// Every requirement must include a hash. // Every requirement must include a hash.
if hashes.is_empty() { if digests.is_empty() {
return Err(RequiredHashesError::MissingHashes(requirement.to_string())); return Err(HashStrategyError::MissingHashes(requirement.to_string()));
} }
// Parse the hashes. // Parse the hashes.
let hashes = hashes let digests = digests
.iter() .iter()
.map(|hash| HashDigest::from_str(hash)) .map(|digest| HashDigest::from_str(digest))
.collect::<Result<Vec<_>, _>>() .collect::<Result<Vec<_>, _>>()
.unwrap(); .unwrap();
// TODO(charlie): Extract hashes from URL fragments. // TODO(charlie): Extract hashes from URL fragments.
allowed_hashes.insert(requirement.name, hashes); hashes.insert(requirement.name, digests);
} }
Ok(Self(allowed_hashes)) Ok(Self::Validate(hashes))
}
/// Returns versions for the given package which are allowed even if marked as yanked by the
/// relevant index.
pub fn get(&self, package_name: &PackageName) -> Option<&[HashDigest]> {
self.0.get(package_name).map(Vec::as_slice)
}
/// Returns whether the given package is allowed even if marked as yanked by the relevant index.
pub fn contains(&self, package_name: &PackageName) -> bool {
self.0.contains_key(package_name)
} }
} }
#[derive(thiserror::Error, Debug)] #[derive(thiserror::Error, Debug)]
pub enum RequiredHashesError { pub enum HashStrategyError {
#[error(transparent)] #[error(transparent)]
Hash(#[from] HashError), Hash(#[from] HashError),
#[error("Unnamed requirements are not supported in `--require-hashes`")] #[error("Unnamed requirements are not supported in `--require-hashes`")]

View file

@ -1,12 +1,12 @@
//! Fundamental types shared across `uv` crates. //! Fundamental types shared across `uv` crates.
pub use builds::*; pub use builds::*;
pub use downloads::*; pub use downloads::*;
pub use hashes::*; pub use hash::*;
pub use requirements::*; pub use requirements::*;
pub use traits::*; pub use traits::*;
mod builds; mod builds;
mod downloads; mod downloads;
mod hashes; mod hash;
mod requirements; mod requirements;
mod traits; mod traits;

View file

@ -38,7 +38,7 @@ use uv_resolver::{
Manifest, OptionsBuilder, PreReleaseMode, PythonRequirement, ResolutionMode, Resolver, Manifest, OptionsBuilder, PreReleaseMode, PythonRequirement, ResolutionMode, Resolver,
}; };
use uv_toolchain::PythonVersion; use uv_toolchain::PythonVersion;
use uv_types::{BuildIsolation, EmptyInstalledPackages, InFlight, RequiredHashes}; use uv_types::{BuildIsolation, EmptyInstalledPackages, HashStrategy, InFlight};
use uv_warnings::warn_user; use uv_warnings::warn_user;
use crate::commands::reporters::{DownloadReporter, ResolverReporter}; use crate::commands::reporters::{DownloadReporter, ResolverReporter};
@ -199,8 +199,12 @@ pub(crate) async fn pip_compile(
|python_version| Cow::Owned(python_version.markers(interpreter.markers())), |python_version| Cow::Owned(python_version.markers(interpreter.markers())),
); );
// Don't enforce hashes during resolution. // Generate, but don't enforce hashes for the requirements.
let hashes = RequiredHashes::default(); let hasher = if generate_hashes {
HashStrategy::Generate
} else {
HashStrategy::None
};
// Incorporate any index locations from the provided sources. // Incorporate any index locations from the provided sources.
let index_locations = let index_locations =
@ -233,7 +237,7 @@ pub(crate) async fn pip_compile(
let flat_index = { let flat_index = {
let client = FlatIndexClient::new(&client, &cache); let client = FlatIndexClient::new(&client, &cache);
let entries = client.fetch(index_locations.flat_index()).await?; let entries = client.fetch(index_locations.flat_index()).await?;
FlatIndex::from_entries(entries, &tags, &hashes, &no_build, &NoBinary::None) FlatIndex::from_entries(entries, &tags, &hasher, &no_build, &NoBinary::None)
}; };
// Track in-flight downloads, builds, etc., across resolutions. // Track in-flight downloads, builds, etc., across resolutions.
@ -272,7 +276,7 @@ pub(crate) async fn pip_compile(
// Convert from unnamed to named requirements. // Convert from unnamed to named requirements.
let mut requirements = NamedRequirementsResolver::new( let mut requirements = NamedRequirementsResolver::new(
requirements, requirements,
false, &hasher,
&build_dispatch, &build_dispatch,
&client, &client,
&top_level_index, &top_level_index,
@ -287,7 +291,7 @@ pub(crate) async fn pip_compile(
SourceTreeResolver::new( SourceTreeResolver::new(
source_trees, source_trees,
&extras, &extras,
false, &hasher,
&build_dispatch, &build_dispatch,
&client, &client,
&top_level_index, &top_level_index,
@ -312,7 +316,7 @@ pub(crate) async fn pip_compile(
LocalEditable { url, path, extras } LocalEditable { url, path, extras }
})); }));
let downloader = Downloader::new(&cache, &tags, &hashes, &client, &build_dispatch) let downloader = Downloader::new(&cache, &tags, &hasher, &client, &build_dispatch)
.with_reporter(DownloadReporter::from(printer).with_length(editables.len() as u64)); .with_reporter(DownloadReporter::from(printer).with_length(editables.len() as u64));
// Build all editables. // Build all editables.
@ -360,7 +364,7 @@ pub(crate) async fn pip_compile(
&constraints, &constraints,
&overrides, &overrides,
&editables, &editables,
&hashes, &hasher,
&build_dispatch, &build_dispatch,
&client, &client,
&top_level_index, &top_level_index,
@ -399,7 +403,7 @@ pub(crate) async fn pip_compile(
&client, &client,
&flat_index, &flat_index,
&top_level_index, &top_level_index,
&hashes, &hasher,
&build_dispatch, &build_dispatch,
&EmptyInstalledPackages, &EmptyInstalledPackages,
)? )?

View file

@ -1,4 +1,5 @@
use std::fmt::Write; use std::fmt::Write;
use std::path::Path; use std::path::Path;
use anstream::eprint; use anstream::eprint;
@ -37,10 +38,10 @@ use uv_requirements::{
RequirementsSpecification, SourceTreeResolver, RequirementsSpecification, SourceTreeResolver,
}; };
use uv_resolver::{ use uv_resolver::{
DependencyMode, Exclusions, FlatIndex, HashCheckingMode, InMemoryIndex, Manifest, Options, DependencyMode, Exclusions, FlatIndex, InMemoryIndex, Manifest, Options, OptionsBuilder,
OptionsBuilder, PreReleaseMode, Preference, ResolutionGraph, ResolutionMode, Resolver, PreReleaseMode, Preference, ResolutionGraph, ResolutionMode, Resolver,
}; };
use uv_types::{BuildIsolation, InFlight, RequiredHashes}; use uv_types::{BuildIsolation, HashStrategy, InFlight};
use uv_warnings::warn_user; use uv_warnings::warn_user;
use crate::commands::reporters::{DownloadReporter, InstallReporter, ResolverReporter}; use crate::commands::reporters::{DownloadReporter, InstallReporter, ResolverReporter};
@ -186,8 +187,8 @@ pub(crate) async fn pip_install(
let markers = venv.interpreter().markers(); let markers = venv.interpreter().markers();
// Collect the set of required hashes. // Collect the set of required hashes.
let hashes = if require_hashes { let hasher = if require_hashes {
RequiredHashes::from_requirements( HashStrategy::from_requirements(
entries entries
.into_iter() .into_iter()
.filter_map(|requirement| match requirement.requirement { .filter_map(|requirement| match requirement.requirement {
@ -197,7 +198,7 @@ pub(crate) async fn pip_install(
markers, markers,
)? )?
} else { } else {
RequiredHashes::default() HashStrategy::None
}; };
// Incorporate any index locations from the provided sources. // Incorporate any index locations from the provided sources.
@ -224,7 +225,7 @@ pub(crate) async fn pip_install(
let flat_index = { let flat_index = {
let client = FlatIndexClient::new(&client, &cache); let client = FlatIndexClient::new(&client, &cache);
let entries = client.fetch(index_locations.flat_index()).await?; let entries = client.fetch(index_locations.flat_index()).await?;
FlatIndex::from_entries(entries, tags, &hashes, &no_build, &no_binary) FlatIndex::from_entries(entries, tags, &hasher, &no_build, &no_binary)
}; };
// Determine whether to enable build isolation. // Determine whether to enable build isolation.
@ -266,7 +267,7 @@ pub(crate) async fn pip_install(
// Convert from unnamed to named requirements. // Convert from unnamed to named requirements.
let mut requirements = NamedRequirementsResolver::new( let mut requirements = NamedRequirementsResolver::new(
requirements, requirements,
require_hashes, &hasher,
&resolve_dispatch, &resolve_dispatch,
&client, &client,
&index, &index,
@ -281,7 +282,7 @@ pub(crate) async fn pip_install(
SourceTreeResolver::new( SourceTreeResolver::new(
source_trees, source_trees,
extras, extras,
require_hashes, &hasher,
&resolve_dispatch, &resolve_dispatch,
&client, &client,
&index, &index,
@ -306,7 +307,7 @@ pub(crate) async fn pip_install(
build_editables( build_editables(
&editables, &editables,
editable_wheel_dir.path(), editable_wheel_dir.path(),
&hashes, &hasher,
&cache, &cache,
&interpreter, &interpreter,
tags, tags,
@ -321,11 +322,6 @@ pub(crate) async fn pip_install(
.resolution_mode(resolution_mode) .resolution_mode(resolution_mode)
.prerelease_mode(prerelease_mode) .prerelease_mode(prerelease_mode)
.dependency_mode(dependency_mode) .dependency_mode(dependency_mode)
.hash_checking_mode(if require_hashes {
HashCheckingMode::Enabled
} else {
HashCheckingMode::Disabled
})
.exclude_newer(exclude_newer) .exclude_newer(exclude_newer)
.build(); .build();
@ -336,7 +332,7 @@ pub(crate) async fn pip_install(
overrides, overrides,
project, project,
&editables, &editables,
&hashes, &hasher,
&site_packages, &site_packages,
&reinstall, &reinstall,
&upgrade, &upgrade,
@ -397,7 +393,7 @@ pub(crate) async fn pip_install(
link_mode, link_mode,
compile, compile,
&index_locations, &index_locations,
&hashes, &hasher,
tags, tags,
&client, &client,
&in_flight, &in_flight,
@ -473,7 +469,7 @@ async fn read_requirements(
async fn build_editables( async fn build_editables(
editables: &[EditableRequirement], editables: &[EditableRequirement],
editable_wheel_dir: &Path, editable_wheel_dir: &Path,
hashes: &RequiredHashes, hasher: &HashStrategy,
cache: &Cache, cache: &Cache,
interpreter: &Interpreter, interpreter: &Interpreter,
tags: &Tags, tags: &Tags,
@ -483,7 +479,7 @@ async fn build_editables(
) -> Result<Vec<BuiltEditable>, Error> { ) -> Result<Vec<BuiltEditable>, Error> {
let start = std::time::Instant::now(); let start = std::time::Instant::now();
let downloader = Downloader::new(cache, tags, hashes, client, build_dispatch) let downloader = Downloader::new(cache, tags, hasher, client, build_dispatch)
.with_reporter(DownloadReporter::from(printer).with_length(editables.len() as u64)); .with_reporter(DownloadReporter::from(printer).with_length(editables.len() as u64));
let editables = LocalEditables::from_editables(editables.iter().map(|editable| { let editables = LocalEditables::from_editables(editables.iter().map(|editable| {
@ -540,7 +536,7 @@ async fn resolve(
overrides: Vec<Requirement>, overrides: Vec<Requirement>,
project: Option<PackageName>, project: Option<PackageName>,
editables: &[BuiltEditable], editables: &[BuiltEditable],
hashes: &RequiredHashes, hasher: &HashStrategy,
site_packages: &SitePackages<'_>, site_packages: &SitePackages<'_>,
reinstall: &Reinstall, reinstall: &Reinstall,
upgrade: &Upgrade, upgrade: &Upgrade,
@ -587,7 +583,7 @@ async fn resolve(
&constraints, &constraints,
&overrides, &overrides,
&editables, &editables,
hashes, hasher,
build_dispatch, build_dispatch,
client, client,
index, index,
@ -618,7 +614,7 @@ async fn resolve(
client, client,
flat_index, flat_index,
index, index,
hashes, hasher,
build_dispatch, build_dispatch,
site_packages, site_packages,
)? )?
@ -662,7 +658,7 @@ async fn install(
link_mode: LinkMode, link_mode: LinkMode,
compile: bool, compile: bool,
index_urls: &IndexLocations, index_urls: &IndexLocations,
hashes: &RequiredHashes, hasher: &HashStrategy,
tags: &Tags, tags: &Tags,
client: &RegistryClient, client: &RegistryClient,
in_flight: &InFlight, in_flight: &InFlight,
@ -690,7 +686,7 @@ async fn install(
site_packages, site_packages,
reinstall, reinstall,
no_binary, no_binary,
hashes, hasher,
index_urls, index_urls,
cache, cache,
venv, venv,
@ -743,7 +739,7 @@ async fn install(
} else { } else {
let start = std::time::Instant::now(); let start = std::time::Instant::now();
let downloader = Downloader::new(cache, tags, hashes, client, build_dispatch) let downloader = Downloader::new(cache, tags, hasher, client, build_dispatch)
.with_reporter(DownloadReporter::from(printer).with_length(remote.len() as u64)); .with_reporter(DownloadReporter::from(printer).with_length(remote.len() as u64));
let wheels = downloader let wheels = downloader
@ -1060,7 +1056,7 @@ enum Error {
Platform(#[from] platform_tags::PlatformError), Platform(#[from] platform_tags::PlatformError),
#[error(transparent)] #[error(transparent)]
RequiredHashes(#[from] uv_types::RequiredHashesError), Hash(#[from] uv_types::HashStrategyError),
#[error(transparent)] #[error(transparent)]
Io(#[from] std::io::Error), Io(#[from] std::io::Error),

View file

@ -30,10 +30,8 @@ use uv_requirements::{
ExtrasSpecification, NamedRequirementsResolver, RequirementsSource, RequirementsSpecification, ExtrasSpecification, NamedRequirementsResolver, RequirementsSource, RequirementsSpecification,
SourceTreeResolver, SourceTreeResolver,
}; };
use uv_resolver::{ use uv_resolver::{DependencyMode, FlatIndex, InMemoryIndex, Manifest, OptionsBuilder, Resolver};
DependencyMode, FlatIndex, HashCheckingMode, InMemoryIndex, Manifest, OptionsBuilder, Resolver, use uv_types::{BuildIsolation, EmptyInstalledPackages, HashStrategy, InFlight};
};
use uv_types::{BuildIsolation, EmptyInstalledPackages, InFlight, RequiredHashes};
use uv_warnings::warn_user; use uv_warnings::warn_user;
use crate::commands::reporters::{DownloadReporter, InstallReporter, ResolverReporter}; use crate::commands::reporters::{DownloadReporter, InstallReporter, ResolverReporter};
@ -138,8 +136,8 @@ pub(crate) async fn pip_sync(
let markers = venv.interpreter().markers(); let markers = venv.interpreter().markers();
// Collect the set of required hashes. // Collect the set of required hashes.
let hashes = if require_hashes { let hasher = if require_hashes {
RequiredHashes::from_requirements( HashStrategy::from_requirements(
entries entries
.into_iter() .into_iter()
.filter_map(|requirement| match requirement.requirement { .filter_map(|requirement| match requirement.requirement {
@ -149,7 +147,7 @@ pub(crate) async fn pip_sync(
markers, markers,
)? )?
} else { } else {
RequiredHashes::default() HashStrategy::None
}; };
// Incorporate any index locations from the provided sources. // Incorporate any index locations from the provided sources.
@ -176,7 +174,7 @@ pub(crate) async fn pip_sync(
let flat_index = { let flat_index = {
let client = FlatIndexClient::new(&client, &cache); let client = FlatIndexClient::new(&client, &cache);
let entries = client.fetch(index_locations.flat_index()).await?; let entries = client.fetch(index_locations.flat_index()).await?;
FlatIndex::from_entries(entries, tags, &hashes, &no_build, &no_binary) FlatIndex::from_entries(entries, tags, &hasher, &no_build, &no_binary)
}; };
// Create a shared in-memory index. // Create a shared in-memory index.
@ -218,16 +216,11 @@ pub(crate) async fn pip_sync(
// Convert from unnamed to named requirements. // Convert from unnamed to named requirements.
let requirements = { let requirements = {
// Convert from unnamed to named requirements. // Convert from unnamed to named requirements.
let mut requirements = NamedRequirementsResolver::new( let mut requirements =
requirements, NamedRequirementsResolver::new(requirements, &hasher, &build_dispatch, &client, &index)
require_hashes, .with_reporter(ResolverReporter::from(printer))
&build_dispatch, .resolve()
&client, .await?;
&index,
)
.with_reporter(ResolverReporter::from(printer))
.resolve()
.await?;
// Resolve any source trees into requirements. // Resolve any source trees into requirements.
if !source_trees.is_empty() { if !source_trees.is_empty() {
@ -235,7 +228,7 @@ pub(crate) async fn pip_sync(
SourceTreeResolver::new( SourceTreeResolver::new(
source_trees, source_trees,
&ExtrasSpecification::None, &ExtrasSpecification::None,
require_hashes, &hasher,
&build_dispatch, &build_dispatch,
&client, &client,
&index, &index,
@ -254,7 +247,7 @@ pub(crate) async fn pip_sync(
editables, editables,
&site_packages, &site_packages,
reinstall, reinstall,
&hashes, &hasher,
venv.interpreter(), venv.interpreter(),
tags, tags,
&cache, &cache,
@ -278,7 +271,7 @@ pub(crate) async fn pip_sync(
site_packages, site_packages,
reinstall, reinstall,
&no_binary, &no_binary,
&hashes, &hasher,
&index_locations, &index_locations,
&cache, &cache,
&venv, &venv,
@ -317,11 +310,6 @@ pub(crate) async fn pip_sync(
// Resolve with `--no-deps`. // Resolve with `--no-deps`.
let options = OptionsBuilder::new() let options = OptionsBuilder::new()
.dependency_mode(DependencyMode::Direct) .dependency_mode(DependencyMode::Direct)
.hash_checking_mode(if require_hashes {
HashCheckingMode::Enabled
} else {
HashCheckingMode::Disabled
})
.build(); .build();
// Create a bound on the progress bar, since we know the number of packages upfront. // Create a bound on the progress bar, since we know the number of packages upfront.
@ -337,7 +325,7 @@ pub(crate) async fn pip_sync(
&client, &client,
&flat_index, &flat_index,
&index, &index,
&hashes, &hasher,
&build_dispatch, &build_dispatch,
// TODO(zanieb): We should consider support for installed packages in pip sync // TODO(zanieb): We should consider support for installed packages in pip sync
&EmptyInstalledPackages, &EmptyInstalledPackages,
@ -381,7 +369,7 @@ pub(crate) async fn pip_sync(
} else { } else {
let start = std::time::Instant::now(); let start = std::time::Instant::now();
let downloader = Downloader::new(&cache, tags, &hashes, &client, &build_dispatch) let downloader = Downloader::new(&cache, tags, &hasher, &client, &build_dispatch)
.with_reporter(DownloadReporter::from(printer).with_length(remote.len() as u64)); .with_reporter(DownloadReporter::from(printer).with_length(remote.len() as u64));
let wheels = downloader let wheels = downloader
@ -577,7 +565,7 @@ async fn resolve_editables(
editables: Vec<EditableRequirement>, editables: Vec<EditableRequirement>,
site_packages: &SitePackages<'_>, site_packages: &SitePackages<'_>,
reinstall: &Reinstall, reinstall: &Reinstall,
hashes: &RequiredHashes, hasher: &HashStrategy,
interpreter: &Interpreter, interpreter: &Interpreter,
tags: &Tags, tags: &Tags,
cache: &Cache, cache: &Cache,
@ -644,7 +632,7 @@ async fn resolve_editables(
} else { } else {
let start = std::time::Instant::now(); let start = std::time::Instant::now();
let downloader = Downloader::new(cache, tags, hashes, client, build_dispatch) let downloader = Downloader::new(cache, tags, hasher, client, build_dispatch)
.with_reporter(DownloadReporter::from(printer).with_length(uninstalled.len() as u64)); .with_reporter(DownloadReporter::from(printer).with_length(uninstalled.len() as u64));
let editables = LocalEditables::from_editables(uninstalled.iter().map(|editable| { let editables = LocalEditables::from_editables(uninstalled.iter().map(|editable| {

View file

@ -21,7 +21,7 @@ use uv_dispatch::BuildDispatch;
use uv_fs::Simplified; use uv_fs::Simplified;
use uv_interpreter::{find_default_python, find_requested_python, Error}; use uv_interpreter::{find_default_python, find_requested_python, Error};
use uv_resolver::{FlatIndex, InMemoryIndex, OptionsBuilder}; use uv_resolver::{FlatIndex, InMemoryIndex, OptionsBuilder};
use uv_types::{BuildContext, BuildIsolation, InFlight, RequiredHashes}; use uv_types::{BuildContext, BuildIsolation, HashStrategy, InFlight};
use crate::commands::ExitStatus; use crate::commands::ExitStatus;
use crate::printer::Printer; use crate::printer::Printer;
@ -170,7 +170,7 @@ async fn venv_impl(
FlatIndex::from_entries( FlatIndex::from_entries(
entries, entries,
tags, tags,
&RequiredHashes::default(), &HashStrategy::None,
&NoBuild::All, &NoBuild::All,
&NoBinary::None, &NoBinary::None,
) )

View file

@ -3447,33 +3447,14 @@ fn compile_legacy_sdist_setuptools() -> Result<()> {
Ok(()) Ok(())
} }
/// Include hashes in the generated output. /// Include hashes from the registry in the generated output.
#[test] #[test]
fn generate_hashes() -> Result<()> { fn generate_hashes_registry() -> Result<()> {
let context = TestContext::new("3.12"); let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in"); let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("flask==3.0.0")?; requirements_in.write_str("anyio==4.0.0")?;
let colorama_locked = regex::escape(indoc! {r" uv_snapshot!(context.compile()
colorama==0.4.6 \
--hash=sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44 \
--hash=sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6
# via click
"});
let filters: Vec<_> = if cfg!(windows) {
// Remove colorama
vec![
(colorama_locked.as_str(), ""),
("Resolved 8 packages", "Resolved 7 packages"),
]
} else {
vec![]
}
.into_iter()
.chain(context.filters())
.collect();
uv_snapshot!(filters, context.compile()
.arg("requirements.in") .arg("requirements.in")
.arg("--generate-hashes"), @r###" .arg("--generate-hashes"), @r###"
success: true success: true
@ -3481,102 +3462,170 @@ fn generate_hashes() -> Result<()> {
----- stdout ----- ----- stdout -----
# This file was autogenerated by uv via the following command: # This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in --generate-hashes # uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in --generate-hashes
blinker==1.7.0 \ anyio==4.0.0 \
--hash=sha256:c3f865d4d54db7abc53758a01601cf343fe55b84c1de4e3fa910e420b438d5b9 \ --hash=sha256:cfdb2b588b9fc25ede96d8db56ed50848b0b649dca3dd1df0b11f683bb9e0b5f \
--hash=sha256:e6820ff6fa4e4d1d8e2747c2283749c3f547e4fee112b98555cdcdae32996182 --hash=sha256:f7ed51751b2c2add651e5747c891b47e26d2a21be5d32d9311dfe9692f3e5d7a
# via flask idna==3.6 \
click==8.1.7 \ --hash=sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca \
--hash=sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28 \ --hash=sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f
--hash=sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de # via anyio
# via flask sniffio==1.3.1 \
flask==3.0.0 \ --hash=sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2 \
--hash=sha256:21128f47e4e3b9d597a3e8521a329bf56909b690fcc3fa3e477725aa81367638 \ --hash=sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc
--hash=sha256:cfadcdb638b609361d29ec22360d6070a77d7463dcb3ab08d2c2f2f168845f58 # via anyio
itsdangerous==2.1.2 \
--hash=sha256:2c2349112351b88699d8d4b6b075022c0808887cb7ad10069318a8b0bc88db44 \
--hash=sha256:5dbbc68b317e5e42f327f9021763545dc3fc3bfe22e6deb96aaf1fc38874156a
# via flask
jinja2==3.1.3 \
--hash=sha256:7d6d50dd97d52cbc355597bd845fabfbac3f551e1f99619e39a35ce8c370b5fa \
--hash=sha256:ac8bd6544d4bb2c9792bf3a159e80bba8fda7f07e81bc3aed565432d5925ba90
# via flask
markupsafe==2.1.5 \
--hash=sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf \
--hash=sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff \
--hash=sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f \
--hash=sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3 \
--hash=sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532 \
--hash=sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f \
--hash=sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617 \
--hash=sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df \
--hash=sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4 \
--hash=sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906 \
--hash=sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f \
--hash=sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4 \
--hash=sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8 \
--hash=sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371 \
--hash=sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2 \
--hash=sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465 \
--hash=sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52 \
--hash=sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6 \
--hash=sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169 \
--hash=sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad \
--hash=sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2 \
--hash=sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0 \
--hash=sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029 \
--hash=sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f \
--hash=sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a \
--hash=sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced \
--hash=sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5 \
--hash=sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c \
--hash=sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf \
--hash=sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9 \
--hash=sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb \
--hash=sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad \
--hash=sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3 \
--hash=sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1 \
--hash=sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46 \
--hash=sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc \
--hash=sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a \
--hash=sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee \
--hash=sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900 \
--hash=sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5 \
--hash=sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea \
--hash=sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f \
--hash=sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5 \
--hash=sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e \
--hash=sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a \
--hash=sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f \
--hash=sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50 \
--hash=sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a \
--hash=sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b \
--hash=sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4 \
--hash=sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff \
--hash=sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2 \
--hash=sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46 \
--hash=sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b \
--hash=sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf \
--hash=sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5 \
--hash=sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5 \
--hash=sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab \
--hash=sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd \
--hash=sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68
# via
# jinja2
# werkzeug
werkzeug==3.0.1 \
--hash=sha256:507e811ecea72b18a404947aded4b3390e1db8f826b494d76550ef45bb3b1dcc \
--hash=sha256:90a285dc0e42ad56b34e696398b8122ee4c681833fb35b8334a095d82c56da10
# via flask
----- stderr ----- ----- stderr -----
Resolved 7 packages in [TIME] Resolved 3 packages in [TIME]
"### "###
); );
Ok(()) Ok(())
} }
/// Include hashes from the URL in the generated output.
#[test]
fn generate_hashes_source_distribution_url() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("anyio @ https://files.pythonhosted.org/packages/2d/b8/7333d87d5f03247215d86a86362fd3e324111788c6cdd8d2e6196a6ba833/anyio-4.2.0.tar.gz")?;
uv_snapshot!(context.compile()
.arg("requirements.in")
.arg("--generate-hashes"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in --generate-hashes
anyio @ https://files.pythonhosted.org/packages/2d/b8/7333d87d5f03247215d86a86362fd3e324111788c6cdd8d2e6196a6ba833/anyio-4.2.0.tar.gz \
--hash=sha256:e1875bb4b4e2de1669f4bc7869b6d3f54231cdced71605e6e64c9be77e3be50f
idna==3.6 \
--hash=sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca \
--hash=sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f
# via anyio
sniffio==1.3.1 \
--hash=sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2 \
--hash=sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc
# via anyio
----- stderr -----
Resolved 3 packages in [TIME]
"###
);
Ok(())
}
/// Include hashes from the URL in the generated output.
#[test]
fn generate_hashes_built_distribution_url() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("anyio @ https://files.pythonhosted.org/packages/14/fd/2f20c40b45e4fb4324834aea24bd4afdf1143390242c0b33774da0e2e34f/anyio-4.3.0-py3-none-any.whl")?;
uv_snapshot!(context.compile()
.arg("requirements.in")
.arg("--generate-hashes"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in --generate-hashes
anyio @ https://files.pythonhosted.org/packages/14/fd/2f20c40b45e4fb4324834aea24bd4afdf1143390242c0b33774da0e2e34f/anyio-4.3.0-py3-none-any.whl \
--hash=sha256:048e05d0f6caeed70d731f3db756d35dcc1f35747c8c403364a8332c630441b8
idna==3.6 \
--hash=sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca \
--hash=sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f
# via anyio
sniffio==1.3.1 \
--hash=sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2 \
--hash=sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc
# via anyio
----- stderr -----
Resolved 3 packages in [TIME]
"###
);
Ok(())
}
/// Given an unnamed URL, include hashes for the URL and its dependencies.
#[test]
fn generate_hashes_unnamed_url() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("https://files.pythonhosted.org/packages/14/fd/2f20c40b45e4fb4324834aea24bd4afdf1143390242c0b33774da0e2e34f/anyio-4.3.0-py3-none-any.whl")?;
uv_snapshot!(context.compile()
.arg("requirements.in")
.arg("--generate-hashes"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in --generate-hashes
anyio @ https://files.pythonhosted.org/packages/14/fd/2f20c40b45e4fb4324834aea24bd4afdf1143390242c0b33774da0e2e34f/anyio-4.3.0-py3-none-any.whl \
--hash=sha256:048e05d0f6caeed70d731f3db756d35dcc1f35747c8c403364a8332c630441b8
idna==3.6 \
--hash=sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca \
--hash=sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f
# via anyio
sniffio==1.3.1 \
--hash=sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2 \
--hash=sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc
# via anyio
----- stderr -----
Resolved 3 packages in [TIME]
"###
);
Ok(())
}
/// Given a local directory, include hashes for its dependencies, but not the directory itself.
#[test]
fn generate_hashes_local_directory() -> Result<()> {
let _context = TestContext::new("3.12");
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str(indoc! {r"
../../scripts/packages/poetry_editable
"
})?;
uv_snapshot!(context.filters(), context.compile()
.arg(requirements_in.path())
.arg("--generate-hashes")
.current_dir(current_dir()?), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z [TEMP_DIR]/requirements.in --generate-hashes
anyio==4.3.0 \
--hash=sha256:048e05d0f6caeed70d731f3db756d35dcc1f35747c8c403364a8332c630441b8 \
--hash=sha256:f75253795a87df48568485fd18cdd2a3fa5c4f7c5be8e5e36637733fce06fed6
# via poetry-editable
idna==3.6 \
--hash=sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca \
--hash=sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f
# via anyio
poetry-editable @ ../../scripts/packages/poetry_editable
sniffio==1.3.1 \
--hash=sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2 \
--hash=sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc
# via anyio
----- stderr -----
Resolved 4 packages in [TIME]
"###);
Ok(())
}
/// Compile using `--find-links` with a local directory. /// Compile using `--find-links` with a local directory.
#[test] #[test]
fn find_links_directory() -> Result<()> { fn find_links_directory() -> Result<()> {