mirror of
https://github.com/astral-sh/uv.git
synced 2025-08-04 19:08:04 +00:00
Add support for URL requirements in --generate-hashes
(#2952)
## Summary This PR enables hash generation for URL requirements when the user provides `--generate-hashes` to `pip compile`. While we include the hashes from the registry already, today, we omit hashes for URLs. To power hash generation, we introduce a `HashPolicy` abstraction: ```rust #[derive(Debug, Clone, Copy, PartialEq, Eq)] pub enum HashPolicy<'a> { /// No hash policy is specified. None, /// Hashes should be generated (specifically, a SHA-256 hash), but not validated. Generate, /// Hashes should be validated against a pre-defined list of hashes. If necessary, hashes should /// be generated so as to ensure that the archive is valid. Validate(&'a [HashDigest]), } ``` All of the methods on the distribution database now accept this policy, instead of accepting `&'a [HashDigest]`. Closes #2378.
This commit is contained in:
parent
8513d603b4
commit
006379c50c
31 changed files with 546 additions and 495 deletions
|
@ -8,7 +8,7 @@ use pypi_types::HashDigest;
|
|||
use uv_normalize::PackageName;
|
||||
|
||||
use crate::direct_url::{DirectUrl, LocalFileUrl};
|
||||
use crate::hashed::Hashed;
|
||||
use crate::hash::Hashed;
|
||||
use crate::{
|
||||
BuiltDist, Dist, DistributionMetadata, InstalledMetadata, InstalledVersion, Name, SourceDist,
|
||||
VersionOrUrl,
|
||||
|
|
84
crates/distribution-types/src/hash.rs
Normal file
84
crates/distribution-types/src/hash.rs
Normal file
|
@ -0,0 +1,84 @@
|
|||
use pypi_types::{HashAlgorithm, HashDigest};
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
pub enum HashPolicy<'a> {
|
||||
/// No hash policy is specified.
|
||||
None,
|
||||
/// Hashes should be generated (specifically, a SHA-256 hash), but not validated.
|
||||
Generate,
|
||||
/// Hashes should be validated against a pre-defined list of hashes. If necessary, hashes should
|
||||
/// be generated so as to ensure that the archive is valid.
|
||||
Validate(&'a [HashDigest]),
|
||||
}
|
||||
|
||||
impl<'a> HashPolicy<'a> {
|
||||
/// Returns `true` if the hash policy is `None`.
|
||||
pub fn is_none(&self) -> bool {
|
||||
matches!(self, Self::None)
|
||||
}
|
||||
|
||||
/// Returns `true` if the hash policy is `Generate`.
|
||||
pub fn is_generate(&self) -> bool {
|
||||
matches!(self, Self::Generate)
|
||||
}
|
||||
|
||||
/// Returns `true` if the hash policy is `Validate`.
|
||||
pub fn is_validate(&self) -> bool {
|
||||
matches!(self, Self::Validate(_))
|
||||
}
|
||||
|
||||
/// Return the algorithms used in the hash policy.
|
||||
pub fn algorithms(&self) -> Vec<HashAlgorithm> {
|
||||
match self {
|
||||
Self::None => vec![],
|
||||
Self::Generate => vec![HashAlgorithm::Sha256],
|
||||
Self::Validate(hashes) => {
|
||||
let mut algorithms = hashes.iter().map(HashDigest::algorithm).collect::<Vec<_>>();
|
||||
algorithms.sort();
|
||||
algorithms.dedup();
|
||||
algorithms
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Return the digests used in the hash policy.
|
||||
pub fn digests(&self) -> &[HashDigest] {
|
||||
match self {
|
||||
Self::None => &[],
|
||||
Self::Generate => &[],
|
||||
Self::Validate(hashes) => hashes,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub trait Hashed {
|
||||
/// Return the [`HashDigest`]s for the archive.
|
||||
fn hashes(&self) -> &[HashDigest];
|
||||
|
||||
/// Returns `true` if the archive satisfies the given hash policy.
|
||||
fn satisfies(&self, hashes: HashPolicy) -> bool {
|
||||
match hashes {
|
||||
HashPolicy::None => true,
|
||||
HashPolicy::Generate => self
|
||||
.hashes()
|
||||
.iter()
|
||||
.any(|hash| hash.algorithm == HashAlgorithm::Sha256),
|
||||
HashPolicy::Validate(hashes) => self.hashes().iter().any(|hash| hashes.contains(hash)),
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns `true` if the archive includes a hash for at least one of the given algorithms.
|
||||
fn has_digests(&self, hashes: HashPolicy) -> bool {
|
||||
match hashes {
|
||||
HashPolicy::None => true,
|
||||
HashPolicy::Generate => self
|
||||
.hashes()
|
||||
.iter()
|
||||
.any(|hash| hash.algorithm == HashAlgorithm::Sha256),
|
||||
HashPolicy::Validate(hashes) => hashes
|
||||
.iter()
|
||||
.map(HashDigest::algorithm)
|
||||
.any(|algorithm| self.hashes().iter().any(|hash| hash.algorithm == algorithm)),
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,27 +0,0 @@
|
|||
use pypi_types::HashDigest;
|
||||
|
||||
pub trait Hashed {
|
||||
/// Return the [`HashDigest`]s for the archive.
|
||||
fn hashes(&self) -> &[HashDigest];
|
||||
|
||||
/// Returns `true` if the archive satisfies the given hashes.
|
||||
fn satisfies(&self, hashes: &[HashDigest]) -> bool {
|
||||
if hashes.is_empty() {
|
||||
true
|
||||
} else {
|
||||
self.hashes().iter().any(|hash| hashes.contains(hash))
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns `true` if the archive includes a hash for at least one of the given algorithms.
|
||||
fn has_digests(&self, hashes: &[HashDigest]) -> bool {
|
||||
if hashes.is_empty() {
|
||||
true
|
||||
} else {
|
||||
hashes
|
||||
.iter()
|
||||
.map(HashDigest::algorithm)
|
||||
.any(|algorithm| self.hashes().iter().any(|hash| hash.algorithm == algorithm))
|
||||
}
|
||||
}
|
||||
}
|
|
@ -51,7 +51,7 @@ pub use crate::direct_url::*;
|
|||
pub use crate::editable::*;
|
||||
pub use crate::error::*;
|
||||
pub use crate::file::*;
|
||||
pub use crate::hashed::*;
|
||||
pub use crate::hash::*;
|
||||
pub use crate::id::*;
|
||||
pub use crate::index_url::*;
|
||||
pub use crate::installed::*;
|
||||
|
@ -67,7 +67,7 @@ mod direct_url;
|
|||
mod editable;
|
||||
mod error;
|
||||
mod file;
|
||||
mod hashed;
|
||||
mod hash;
|
||||
mod id;
|
||||
mod index_url;
|
||||
mod installed;
|
||||
|
|
|
@ -18,7 +18,7 @@ use uv_dispatch::BuildDispatch;
|
|||
use uv_installer::SitePackages;
|
||||
use uv_interpreter::PythonEnvironment;
|
||||
use uv_resolver::{FlatIndex, InMemoryIndex, Manifest, Options, Resolver};
|
||||
use uv_types::{BuildIsolation, InFlight, RequiredHashes};
|
||||
use uv_types::{BuildIsolation, HashStrategy, InFlight};
|
||||
|
||||
#[derive(ValueEnum, Default, Clone)]
|
||||
pub(crate) enum ResolveCliFormat {
|
||||
|
@ -58,7 +58,6 @@ pub(crate) async fn resolve_cli(args: ResolveCliArgs) -> Result<()> {
|
|||
let index_locations =
|
||||
IndexLocations::new(args.index_url, args.extra_index_url, args.find_links, false);
|
||||
let index = InMemoryIndex::default();
|
||||
let hashes = RequiredHashes::default();
|
||||
let in_flight = InFlight::default();
|
||||
let no_build = if args.no_build {
|
||||
NoBuild::All
|
||||
|
@ -74,7 +73,7 @@ pub(crate) async fn resolve_cli(args: ResolveCliArgs) -> Result<()> {
|
|||
FlatIndex::from_entries(
|
||||
entries,
|
||||
venv.interpreter().tags()?,
|
||||
&RequiredHashes::default(),
|
||||
&HashStrategy::None,
|
||||
&no_build,
|
||||
&NoBinary::None,
|
||||
)
|
||||
|
@ -109,7 +108,7 @@ pub(crate) async fn resolve_cli(args: ResolveCliArgs) -> Result<()> {
|
|||
&client,
|
||||
&flat_index,
|
||||
&index,
|
||||
&hashes,
|
||||
&HashStrategy::None,
|
||||
&build_dispatch,
|
||||
&site_packages,
|
||||
)?;
|
||||
|
|
|
@ -21,7 +21,7 @@ use uv_configuration::{BuildKind, ConfigSettings, NoBinary, NoBuild, Reinstall,
|
|||
use uv_installer::{Downloader, Installer, Plan, Planner, SitePackages};
|
||||
use uv_interpreter::{Interpreter, PythonEnvironment};
|
||||
use uv_resolver::{FlatIndex, InMemoryIndex, Manifest, Options, Resolver};
|
||||
use uv_types::{BuildContext, BuildIsolation, EmptyInstalledPackages, InFlight, RequiredHashes};
|
||||
use uv_types::{BuildContext, BuildIsolation, EmptyInstalledPackages, HashStrategy, InFlight};
|
||||
|
||||
/// The main implementation of [`BuildContext`], used by the CLI, see [`BuildContext`]
|
||||
/// documentation.
|
||||
|
@ -134,7 +134,6 @@ impl<'a> BuildContext for BuildDispatch<'a> {
|
|||
async fn resolve<'data>(&'data self, requirements: &'data [Requirement]) -> Result<Resolution> {
|
||||
let markers = self.interpreter.markers();
|
||||
let tags = self.interpreter.tags()?;
|
||||
let hashes = RequiredHashes::default();
|
||||
let resolver = Resolver::new(
|
||||
Manifest::simple(requirements.to_vec()),
|
||||
self.options,
|
||||
|
@ -144,7 +143,7 @@ impl<'a> BuildContext for BuildDispatch<'a> {
|
|||
self.client,
|
||||
self.flat_index,
|
||||
self.index,
|
||||
&hashes,
|
||||
&HashStrategy::None,
|
||||
self,
|
||||
&EmptyInstalledPackages,
|
||||
)?;
|
||||
|
@ -178,9 +177,6 @@ impl<'a> BuildContext for BuildDispatch<'a> {
|
|||
venv.root().display(),
|
||||
);
|
||||
|
||||
// Don't enforce hashes for build dependencies.
|
||||
let hashes = RequiredHashes::default();
|
||||
|
||||
// Determine the current environment markers.
|
||||
let tags = self.interpreter.tags()?;
|
||||
|
||||
|
@ -197,7 +193,7 @@ impl<'a> BuildContext for BuildDispatch<'a> {
|
|||
site_packages,
|
||||
&Reinstall::None,
|
||||
&NoBinary::None,
|
||||
&RequiredHashes::default(),
|
||||
&HashStrategy::None,
|
||||
self.index_locations,
|
||||
self.cache(),
|
||||
venv,
|
||||
|
@ -226,7 +222,8 @@ impl<'a> BuildContext for BuildDispatch<'a> {
|
|||
vec![]
|
||||
} else {
|
||||
// TODO(konstin): Check that there is no endless recursion.
|
||||
let downloader = Downloader::new(self.cache, tags, &hashes, self.client, self);
|
||||
let downloader =
|
||||
Downloader::new(self.cache, tags, &HashStrategy::None, self.client, self);
|
||||
debug!(
|
||||
"Downloading and building requirement{} for build: {}",
|
||||
if remote.len() == 1 { "" } else { "s" },
|
||||
|
|
|
@ -11,8 +11,8 @@ use url::Url;
|
|||
|
||||
use distribution_filename::WheelFilename;
|
||||
use distribution_types::{
|
||||
BuildableSource, BuiltDist, Dist, FileLocation, Hashed, IndexLocations, LocalEditable, Name,
|
||||
SourceDist,
|
||||
BuildableSource, BuiltDist, Dist, FileLocation, HashPolicy, Hashed, IndexLocations,
|
||||
LocalEditable, Name, SourceDist,
|
||||
};
|
||||
use platform_tags::Tags;
|
||||
use pypi_types::{HashDigest, Metadata23};
|
||||
|
@ -91,7 +91,7 @@ impl<'a, Context: BuildContext + Send + Sync> DistributionDatabase<'a, Context>
|
|||
&self,
|
||||
dist: &Dist,
|
||||
tags: &Tags,
|
||||
hashes: &[HashDigest],
|
||||
hashes: HashPolicy<'_>,
|
||||
) -> Result<LocalWheel, Error> {
|
||||
match dist {
|
||||
Dist::Built(built) => self.get_wheel(built, hashes).await,
|
||||
|
@ -108,7 +108,7 @@ impl<'a, Context: BuildContext + Send + Sync> DistributionDatabase<'a, Context>
|
|||
pub async fn get_or_build_wheel_metadata(
|
||||
&self,
|
||||
dist: &Dist,
|
||||
hashes: &[HashDigest],
|
||||
hashes: HashPolicy<'_>,
|
||||
) -> Result<ArchiveMetadata, Error> {
|
||||
match dist {
|
||||
Dist::Built(built) => self.get_wheel_metadata(built, hashes).await,
|
||||
|
@ -147,12 +147,12 @@ impl<'a, Context: BuildContext + Send + Sync> DistributionDatabase<'a, Context>
|
|||
|
||||
/// Fetch a wheel from the cache or download it from the index.
|
||||
///
|
||||
/// While hashes will be generated in some cases, hash-checking is _not_ enforced and should
|
||||
/// While hashes will be generated in all cases, hash-checking is _not_ enforced and should
|
||||
/// instead be enforced by the caller.
|
||||
async fn get_wheel(
|
||||
&self,
|
||||
dist: &BuiltDist,
|
||||
hashes: &[HashDigest],
|
||||
hashes: HashPolicy<'_>,
|
||||
) -> Result<LocalWheel, Error> {
|
||||
let no_binary = match self.build_context.no_binary() {
|
||||
NoBinary::None => false,
|
||||
|
@ -298,7 +298,7 @@ impl<'a, Context: BuildContext + Send + Sync> DistributionDatabase<'a, Context>
|
|||
&self,
|
||||
dist: &SourceDist,
|
||||
tags: &Tags,
|
||||
hashes: &[HashDigest],
|
||||
hashes: HashPolicy<'_>,
|
||||
) -> Result<LocalWheel, Error> {
|
||||
let lock = self.locks.acquire(&Dist::Source(dist.clone())).await;
|
||||
let _guard = lock.lock().await;
|
||||
|
@ -342,8 +342,21 @@ impl<'a, Context: BuildContext + Send + Sync> DistributionDatabase<'a, Context>
|
|||
pub async fn get_wheel_metadata(
|
||||
&self,
|
||||
dist: &BuiltDist,
|
||||
hashes: &[HashDigest],
|
||||
hashes: HashPolicy<'_>,
|
||||
) -> Result<ArchiveMetadata, Error> {
|
||||
// If hash generation is enabled, and the distribution isn't hosted on an index, get the
|
||||
// entire wheel to ensure that the hashes are included in the response. If the distribution
|
||||
// is hosted on an index, the hashes will be included in the simple metadata response.
|
||||
// For hash _validation_, callers are expected to enforce the policy when retrieving the
|
||||
// wheel.
|
||||
// TODO(charlie): Request the hashes via a separate method, to reduce the coupling in this API.
|
||||
if hashes.is_generate() && matches!(dist, BuiltDist::DirectUrl(_) | BuiltDist::Path(_)) {
|
||||
let wheel = self.get_wheel(dist, hashes).await?;
|
||||
let metadata = wheel.metadata()?;
|
||||
let hashes = wheel.hashes;
|
||||
return Ok(ArchiveMetadata { metadata, hashes });
|
||||
}
|
||||
|
||||
match self.client.wheel_metadata(dist).boxed().await {
|
||||
Ok(metadata) => Ok(ArchiveMetadata::from(metadata)),
|
||||
Err(err) if err.is_http_streaming_unsupported() => {
|
||||
|
@ -367,7 +380,7 @@ impl<'a, Context: BuildContext + Send + Sync> DistributionDatabase<'a, Context>
|
|||
pub async fn build_wheel_metadata(
|
||||
&self,
|
||||
source: &BuildableSource<'_>,
|
||||
hashes: &[HashDigest],
|
||||
hashes: HashPolicy<'_>,
|
||||
) -> Result<ArchiveMetadata, Error> {
|
||||
let no_build = match self.build_context.no_build() {
|
||||
NoBuild::All => true,
|
||||
|
@ -400,7 +413,7 @@ impl<'a, Context: BuildContext + Send + Sync> DistributionDatabase<'a, Context>
|
|||
filename: &WheelFilename,
|
||||
wheel_entry: &CacheEntry,
|
||||
dist: &BuiltDist,
|
||||
hashes: &[HashDigest],
|
||||
hashes: HashPolicy<'_>,
|
||||
) -> Result<Archive, Error> {
|
||||
// Create an entry for the HTTP cache.
|
||||
let http_entry = wheel_entry.with_file(format!("{}.http", filename.stem()));
|
||||
|
@ -413,12 +426,7 @@ impl<'a, Context: BuildContext + Send + Sync> DistributionDatabase<'a, Context>
|
|||
.into_async_read();
|
||||
|
||||
// Create a hasher for each hash algorithm.
|
||||
let algorithms = {
|
||||
let mut hash = hashes.iter().map(HashDigest::algorithm).collect::<Vec<_>>();
|
||||
hash.sort();
|
||||
hash.dedup();
|
||||
hash
|
||||
};
|
||||
let algorithms = hashes.algorithms();
|
||||
let mut hashers = algorithms.into_iter().map(Hasher::from).collect::<Vec<_>>();
|
||||
let mut hasher = uv_extract::hash::HashReader::new(reader.compat(), &mut hashers);
|
||||
|
||||
|
@ -428,7 +436,7 @@ impl<'a, Context: BuildContext + Send + Sync> DistributionDatabase<'a, Context>
|
|||
uv_extract::stream::unzip(&mut hasher, temp_dir.path()).await?;
|
||||
|
||||
// If necessary, exhaust the reader to compute the hash.
|
||||
if !hashes.is_empty() {
|
||||
if !hashes.is_none() {
|
||||
hasher.finish().await.map_err(Error::HashExhaustion)?;
|
||||
}
|
||||
|
||||
|
@ -492,7 +500,7 @@ impl<'a, Context: BuildContext + Send + Sync> DistributionDatabase<'a, Context>
|
|||
filename: &WheelFilename,
|
||||
wheel_entry: &CacheEntry,
|
||||
dist: &BuiltDist,
|
||||
hashes: &[HashDigest],
|
||||
hashes: HashPolicy<'_>,
|
||||
) -> Result<Archive, Error> {
|
||||
// Create an entry for the HTTP cache.
|
||||
let http_entry = wheel_entry.with_file(format!("{}.http", filename.stem()));
|
||||
|
@ -521,7 +529,7 @@ impl<'a, Context: BuildContext + Send + Sync> DistributionDatabase<'a, Context>
|
|||
.map_err(Error::CacheWrite)?;
|
||||
|
||||
// If no hashes are required, parallelize the unzip operation.
|
||||
let hashes = if hashes.is_empty() {
|
||||
let hashes = if hashes.is_none() {
|
||||
let file = file.into_std().await;
|
||||
tokio::task::spawn_blocking({
|
||||
let target = temp_dir.path().to_owned();
|
||||
|
@ -536,12 +544,7 @@ impl<'a, Context: BuildContext + Send + Sync> DistributionDatabase<'a, Context>
|
|||
vec![]
|
||||
} else {
|
||||
// Create a hasher for each hash algorithm.
|
||||
let algorithms = {
|
||||
let mut hash = hashes.iter().map(HashDigest::algorithm).collect::<Vec<_>>();
|
||||
hash.sort();
|
||||
hash.dedup();
|
||||
hash
|
||||
};
|
||||
let algorithms = hashes.algorithms();
|
||||
let mut hashers = algorithms.into_iter().map(Hasher::from).collect::<Vec<_>>();
|
||||
let mut hasher = uv_extract::hash::HashReader::new(file, &mut hashers);
|
||||
uv_extract::stream::unzip(&mut hasher, temp_dir.path()).await?;
|
||||
|
@ -609,7 +612,7 @@ impl<'a, Context: BuildContext + Send + Sync> DistributionDatabase<'a, Context>
|
|||
filename: &WheelFilename,
|
||||
wheel_entry: CacheEntry,
|
||||
dist: &BuiltDist,
|
||||
hashes: &[HashDigest],
|
||||
hashes: HashPolicy<'_>,
|
||||
) -> Result<LocalWheel, Error> {
|
||||
// Determine the last-modified time of the wheel.
|
||||
let modified = ArchiveTimestamp::from_file(path).map_err(Error::CacheRead)?;
|
||||
|
@ -626,7 +629,7 @@ impl<'a, Context: BuildContext + Send + Sync> DistributionDatabase<'a, Context>
|
|||
hashes: archive.hashes,
|
||||
filename: filename.clone(),
|
||||
})
|
||||
} else if hashes.is_empty() {
|
||||
} else if hashes.is_none() {
|
||||
// Otherwise, unzip the wheel.
|
||||
let archive = Archive::new(self.unzip_wheel(path, wheel_entry.path()).await?, vec![]);
|
||||
write_timestamped_archive(&archive_entry, archive.clone(), modified).await?;
|
||||
|
@ -646,12 +649,7 @@ impl<'a, Context: BuildContext + Send + Sync> DistributionDatabase<'a, Context>
|
|||
.map_err(Error::CacheWrite)?;
|
||||
|
||||
// Create a hasher for each hash algorithm.
|
||||
let algorithms = {
|
||||
let mut hash = hashes.iter().map(HashDigest::algorithm).collect::<Vec<_>>();
|
||||
hash.sort();
|
||||
hash.dedup();
|
||||
hash
|
||||
};
|
||||
let algorithms = hashes.algorithms();
|
||||
let mut hashers = algorithms.into_iter().map(Hasher::from).collect::<Vec<_>>();
|
||||
let mut hasher = uv_extract::hash::HashReader::new(file, &mut hashers);
|
||||
|
||||
|
|
|
@ -4,7 +4,7 @@ use distribution_types::{
|
|||
use platform_tags::Tags;
|
||||
use uv_cache::{ArchiveTimestamp, Cache, CacheBucket, CacheShard, WheelCache};
|
||||
use uv_fs::symlinks;
|
||||
use uv_types::RequiredHashes;
|
||||
use uv_types::HashStrategy;
|
||||
|
||||
use crate::index::cached_wheel::CachedWheel;
|
||||
use crate::source::{read_http_revision, read_timestamped_revision, REVISION};
|
||||
|
@ -15,16 +15,16 @@ use crate::Error;
|
|||
pub struct BuiltWheelIndex<'a> {
|
||||
cache: &'a Cache,
|
||||
tags: &'a Tags,
|
||||
hashes: &'a RequiredHashes,
|
||||
hasher: &'a HashStrategy,
|
||||
}
|
||||
|
||||
impl<'a> BuiltWheelIndex<'a> {
|
||||
/// Initialize an index of built distributions.
|
||||
pub fn new(cache: &'a Cache, tags: &'a Tags, hashes: &'a RequiredHashes) -> Self {
|
||||
pub fn new(cache: &'a Cache, tags: &'a Tags, hasher: &'a HashStrategy) -> Self {
|
||||
Self {
|
||||
cache,
|
||||
tags,
|
||||
hashes,
|
||||
hasher,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -46,10 +46,8 @@ impl<'a> BuiltWheelIndex<'a> {
|
|||
};
|
||||
|
||||
// Enforce hash-checking by omitting any wheels that don't satisfy the required hashes.
|
||||
if let Some(hashes) = self.hashes.get(&source_dist.name) {
|
||||
if !revision.satisfies(hashes) {
|
||||
return Ok(None);
|
||||
}
|
||||
if !revision.satisfies(self.hasher.get(&source_dist.name)) {
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
Ok(self.find(&cache_shard.shard(revision.id())))
|
||||
|
@ -76,10 +74,8 @@ impl<'a> BuiltWheelIndex<'a> {
|
|||
};
|
||||
|
||||
// Enforce hash-checking by omitting any wheels that don't satisfy the required hashes.
|
||||
if let Some(hashes) = self.hashes.get(&source_dist.name) {
|
||||
if !revision.satisfies(hashes) {
|
||||
return Ok(None);
|
||||
}
|
||||
if !revision.satisfies(self.hasher.get(&source_dist.name)) {
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
Ok(self.find(&cache_shard.shard(revision.id())))
|
||||
|
@ -88,7 +84,7 @@ impl<'a> BuiltWheelIndex<'a> {
|
|||
/// Return the most compatible [`CachedWheel`] for a given source distribution at a git URL.
|
||||
pub fn git(&self, source_dist: &GitSourceDist) -> Option<CachedWheel> {
|
||||
// Enforce hash-checking, which isn't supported for Git distributions.
|
||||
if self.hashes.get(&source_dist.name).is_some() {
|
||||
if self.hasher.get(&source_dist.name).is_validate() {
|
||||
return None;
|
||||
}
|
||||
|
||||
|
|
|
@ -10,7 +10,7 @@ use platform_tags::Tags;
|
|||
use uv_cache::{Cache, CacheBucket, WheelCache};
|
||||
use uv_fs::{directories, files, symlinks};
|
||||
use uv_normalize::PackageName;
|
||||
use uv_types::RequiredHashes;
|
||||
use uv_types::HashStrategy;
|
||||
|
||||
use crate::index::cached_wheel::CachedWheel;
|
||||
use crate::source::{read_http_revision, REVISION};
|
||||
|
@ -21,7 +21,7 @@ pub struct RegistryWheelIndex<'a> {
|
|||
cache: &'a Cache,
|
||||
tags: &'a Tags,
|
||||
index_locations: &'a IndexLocations,
|
||||
hashes: &'a RequiredHashes,
|
||||
hasher: &'a HashStrategy,
|
||||
index: FxHashMap<&'a PackageName, BTreeMap<Version, CachedRegistryDist>>,
|
||||
}
|
||||
|
||||
|
@ -31,13 +31,13 @@ impl<'a> RegistryWheelIndex<'a> {
|
|||
cache: &'a Cache,
|
||||
tags: &'a Tags,
|
||||
index_locations: &'a IndexLocations,
|
||||
hashes: &'a RequiredHashes,
|
||||
hasher: &'a HashStrategy,
|
||||
) -> Self {
|
||||
Self {
|
||||
cache,
|
||||
tags,
|
||||
index_locations,
|
||||
hashes,
|
||||
hasher,
|
||||
index: FxHashMap::default(),
|
||||
}
|
||||
}
|
||||
|
@ -72,7 +72,7 @@ impl<'a> RegistryWheelIndex<'a> {
|
|||
self.cache,
|
||||
self.tags,
|
||||
self.index_locations,
|
||||
self.hashes,
|
||||
self.hasher,
|
||||
)),
|
||||
};
|
||||
versions
|
||||
|
@ -84,10 +84,9 @@ impl<'a> RegistryWheelIndex<'a> {
|
|||
cache: &Cache,
|
||||
tags: &Tags,
|
||||
index_locations: &IndexLocations,
|
||||
hashes: &RequiredHashes,
|
||||
hasher: &HashStrategy,
|
||||
) -> BTreeMap<Version, CachedRegistryDist> {
|
||||
let mut versions = BTreeMap::new();
|
||||
let hashes = hashes.get(package).unwrap_or_default();
|
||||
|
||||
// Collect into owned `IndexUrl`
|
||||
let flat_index_urls: Vec<IndexUrl> = index_locations
|
||||
|
@ -119,7 +118,7 @@ impl<'a> RegistryWheelIndex<'a> {
|
|||
{
|
||||
if let Some(wheel) = CachedWheel::from_http_pointer(&wheel_dir.join(&file)) {
|
||||
// Enforce hash-checking based on the built distribution.
|
||||
if wheel.satisfies(hashes) {
|
||||
if wheel.satisfies(hasher.get(package)) {
|
||||
Self::add_wheel(wheel, tags, &mut versions);
|
||||
}
|
||||
}
|
||||
|
@ -132,7 +131,7 @@ impl<'a> RegistryWheelIndex<'a> {
|
|||
if let Some(wheel) = CachedWheel::from_revision_pointer(&wheel_dir.join(&file))
|
||||
{
|
||||
// Enforce hash-checking based on the built distribution.
|
||||
if wheel.satisfies(hashes) {
|
||||
if wheel.satisfies(hasher.get(package)) {
|
||||
Self::add_wheel(wheel, tags, &mut versions);
|
||||
}
|
||||
}
|
||||
|
@ -153,7 +152,7 @@ impl<'a> RegistryWheelIndex<'a> {
|
|||
let revision_entry = cache_shard.entry(REVISION);
|
||||
if let Ok(Some(revision)) = read_http_revision(&revision_entry) {
|
||||
// Enforce hash-checking based on the source distribution.
|
||||
if revision.satisfies(hashes) {
|
||||
if revision.satisfies(hasher.get(package)) {
|
||||
for wheel_dir in symlinks(cache_shard.join(revision.id())) {
|
||||
if let Some(wheel) = CachedWheel::from_built_source(&wheel_dir) {
|
||||
Self::add_wheel(wheel, tags, &mut versions);
|
||||
|
|
|
@ -16,8 +16,8 @@ use zip::ZipArchive;
|
|||
|
||||
use distribution_filename::WheelFilename;
|
||||
use distribution_types::{
|
||||
BuildableSource, DirectArchiveUrl, Dist, FileLocation, GitSourceUrl, Hashed, LocalEditable,
|
||||
PathSourceDist, PathSourceUrl, RemoteSource, SourceDist, SourceUrl,
|
||||
BuildableSource, DirectArchiveUrl, Dist, FileLocation, GitSourceUrl, HashPolicy, Hashed,
|
||||
LocalEditable, PathSourceDist, PathSourceUrl, RemoteSource, SourceDist, SourceUrl,
|
||||
};
|
||||
use install_wheel_rs::metadata::read_archive_metadata;
|
||||
use platform_tags::Tags;
|
||||
|
@ -79,7 +79,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
|
|||
&self,
|
||||
source: &BuildableSource<'_>,
|
||||
tags: &Tags,
|
||||
hashes: &[HashDigest],
|
||||
hashes: HashPolicy<'_>,
|
||||
) -> Result<BuiltWheelMetadata, Error> {
|
||||
let built_wheel_metadata = match &source {
|
||||
BuildableSource::Dist(SourceDist::Registry(dist)) => {
|
||||
|
@ -214,7 +214,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
|
|||
pub(super) async fn download_and_build_metadata(
|
||||
&self,
|
||||
source: &BuildableSource<'_>,
|
||||
hashes: &[HashDigest],
|
||||
hashes: HashPolicy<'_>,
|
||||
) -> Result<ArchiveMetadata, Error> {
|
||||
let metadata = match &source {
|
||||
BuildableSource::Dist(SourceDist::Registry(dist)) => {
|
||||
|
@ -350,7 +350,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
|
|||
cache_shard: &CacheShard,
|
||||
subdirectory: Option<&'data Path>,
|
||||
tags: &Tags,
|
||||
hashes: &[HashDigest],
|
||||
hashes: HashPolicy<'_>,
|
||||
) -> Result<BuiltWheelMetadata, Error> {
|
||||
// Fetch the revision for the source distribution.
|
||||
let revision = self
|
||||
|
@ -361,7 +361,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
|
|||
if !revision.satisfies(hashes) {
|
||||
return Err(Error::hash_mismatch(
|
||||
source.to_string(),
|
||||
hashes,
|
||||
hashes.digests(),
|
||||
revision.hashes(),
|
||||
));
|
||||
}
|
||||
|
@ -418,7 +418,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
|
|||
url: &'data Url,
|
||||
cache_shard: &CacheShard,
|
||||
subdirectory: Option<&'data Path>,
|
||||
hashes: &[HashDigest],
|
||||
hashes: HashPolicy<'_>,
|
||||
) -> Result<ArchiveMetadata, Error> {
|
||||
// Fetch the revision for the source distribution.
|
||||
let revision = self
|
||||
|
@ -429,7 +429,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
|
|||
if !revision.satisfies(hashes) {
|
||||
return Err(Error::hash_mismatch(
|
||||
source.to_string(),
|
||||
hashes,
|
||||
hashes.digests(),
|
||||
revision.hashes(),
|
||||
));
|
||||
}
|
||||
|
@ -507,7 +507,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
|
|||
filename: &str,
|
||||
url: &Url,
|
||||
cache_shard: &CacheShard,
|
||||
hashes: &[HashDigest],
|
||||
hashes: HashPolicy<'_>,
|
||||
) -> Result<Revision, Error> {
|
||||
let cache_entry = cache_shard.entry(REVISION);
|
||||
let cache_control = match self.client.connectivity() {
|
||||
|
@ -570,7 +570,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
|
|||
source: &BuildableSource<'_>,
|
||||
resource: &PathSourceUrl<'_>,
|
||||
tags: &Tags,
|
||||
hashes: &[HashDigest],
|
||||
hashes: HashPolicy<'_>,
|
||||
) -> Result<BuiltWheelMetadata, Error> {
|
||||
let cache_shard = self.build_context.cache().shard(
|
||||
CacheBucket::BuiltWheels,
|
||||
|
@ -586,7 +586,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
|
|||
if !revision.satisfies(hashes) {
|
||||
return Err(Error::hash_mismatch(
|
||||
source.to_string(),
|
||||
hashes,
|
||||
hashes.digests(),
|
||||
revision.hashes(),
|
||||
));
|
||||
}
|
||||
|
@ -640,7 +640,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
|
|||
&self,
|
||||
source: &BuildableSource<'_>,
|
||||
resource: &PathSourceUrl<'_>,
|
||||
hashes: &[HashDigest],
|
||||
hashes: HashPolicy<'_>,
|
||||
) -> Result<ArchiveMetadata, Error> {
|
||||
let cache_shard = self.build_context.cache().shard(
|
||||
CacheBucket::BuiltWheels,
|
||||
|
@ -656,7 +656,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
|
|||
if !revision.satisfies(hashes) {
|
||||
return Err(Error::hash_mismatch(
|
||||
source.to_string(),
|
||||
hashes,
|
||||
hashes.digests(),
|
||||
revision.hashes(),
|
||||
));
|
||||
}
|
||||
|
@ -732,7 +732,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
|
|||
source: &BuildableSource<'_>,
|
||||
resource: &PathSourceUrl<'_>,
|
||||
cache_shard: &CacheShard,
|
||||
hashes: &[HashDigest],
|
||||
hashes: HashPolicy<'_>,
|
||||
) -> Result<Revision, Error> {
|
||||
// Determine the last-modified time of the source distribution.
|
||||
let modified = ArchiveTimestamp::from_file(&resource.path).map_err(Error::CacheRead)?;
|
||||
|
@ -779,10 +779,10 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
|
|||
source: &BuildableSource<'_>,
|
||||
resource: &PathSourceUrl<'_>,
|
||||
tags: &Tags,
|
||||
hashes: &[HashDigest],
|
||||
hashes: HashPolicy<'_>,
|
||||
) -> Result<BuiltWheelMetadata, Error> {
|
||||
// Before running the build, check that the hashes match.
|
||||
if !hashes.is_empty() {
|
||||
if hashes.is_validate() {
|
||||
return Err(Error::HashesNotSupportedSourceTree(source.to_string()));
|
||||
}
|
||||
|
||||
|
@ -843,10 +843,10 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
|
|||
&self,
|
||||
source: &BuildableSource<'_>,
|
||||
resource: &PathSourceUrl<'_>,
|
||||
hashes: &[HashDigest],
|
||||
hashes: HashPolicy<'_>,
|
||||
) -> Result<ArchiveMetadata, Error> {
|
||||
// Before running the build, check that the hashes match.
|
||||
if !hashes.is_empty() {
|
||||
if hashes.is_validate() {
|
||||
return Err(Error::HashesNotSupportedSourceTree(source.to_string()));
|
||||
}
|
||||
|
||||
|
@ -945,10 +945,10 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
|
|||
source: &BuildableSource<'_>,
|
||||
resource: &GitSourceUrl<'_>,
|
||||
tags: &Tags,
|
||||
hashes: &[HashDigest],
|
||||
hashes: HashPolicy<'_>,
|
||||
) -> Result<BuiltWheelMetadata, Error> {
|
||||
// Before running the build, check that the hashes match.
|
||||
if !hashes.is_empty() {
|
||||
if hashes.is_validate() {
|
||||
return Err(Error::HashesNotSupportedGit(source.to_string()));
|
||||
}
|
||||
|
||||
|
@ -1017,10 +1017,10 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
|
|||
&self,
|
||||
source: &BuildableSource<'_>,
|
||||
resource: &GitSourceUrl<'_>,
|
||||
hashes: &[HashDigest],
|
||||
hashes: HashPolicy<'_>,
|
||||
) -> Result<ArchiveMetadata, Error> {
|
||||
// Before running the build, check that the hashes match.
|
||||
if !hashes.is_empty() {
|
||||
if hashes.is_validate() {
|
||||
return Err(Error::HashesNotSupportedGit(source.to_string()));
|
||||
}
|
||||
|
||||
|
@ -1111,7 +1111,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
|
|||
source: &BuildableSource<'_>,
|
||||
filename: &str,
|
||||
target: &Path,
|
||||
hashes: &[HashDigest],
|
||||
hashes: HashPolicy<'_>,
|
||||
) -> Result<Vec<HashDigest>, Error> {
|
||||
let temp_dir =
|
||||
tempfile::tempdir_in(self.build_context.cache().bucket(CacheBucket::BuiltWheels))
|
||||
|
@ -1122,12 +1122,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
|
|||
.into_async_read();
|
||||
|
||||
// Create a hasher for each hash algorithm.
|
||||
let algorithms = {
|
||||
let mut hash = hashes.iter().map(HashDigest::algorithm).collect::<Vec<_>>();
|
||||
hash.sort();
|
||||
hash.dedup();
|
||||
hash
|
||||
};
|
||||
let algorithms = hashes.algorithms();
|
||||
let mut hashers = algorithms.into_iter().map(Hasher::from).collect::<Vec<_>>();
|
||||
let mut hasher = uv_extract::hash::HashReader::new(reader.compat(), &mut hashers);
|
||||
|
||||
|
@ -1137,7 +1132,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
|
|||
drop(span);
|
||||
|
||||
// If necessary, exhaust the reader to compute the hash.
|
||||
if !hashes.is_empty() {
|
||||
if !hashes.is_none() {
|
||||
hasher.finish().await.map_err(Error::HashExhaustion)?;
|
||||
}
|
||||
|
||||
|
@ -1166,7 +1161,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
|
|||
&self,
|
||||
path: &Path,
|
||||
target: &Path,
|
||||
hashes: &[HashDigest],
|
||||
hashes: HashPolicy<'_>,
|
||||
) -> Result<Vec<HashDigest>, Error> {
|
||||
debug!("Unpacking for build: {}", path.display());
|
||||
|
||||
|
@ -1178,12 +1173,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
|
|||
.map_err(Error::CacheRead)?;
|
||||
|
||||
// Create a hasher for each hash algorithm.
|
||||
let algorithms = {
|
||||
let mut hash = hashes.iter().map(HashDigest::algorithm).collect::<Vec<_>>();
|
||||
hash.sort();
|
||||
hash.dedup();
|
||||
hash
|
||||
};
|
||||
let algorithms = hashes.algorithms();
|
||||
let mut hashers = algorithms.into_iter().map(Hasher::from).collect::<Vec<_>>();
|
||||
let mut hasher = uv_extract::hash::HashReader::new(reader, &mut hashers);
|
||||
|
||||
|
@ -1191,7 +1181,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
|
|||
uv_extract::stream::archive(&mut hasher, path, &temp_dir.path()).await?;
|
||||
|
||||
// If necessary, exhaust the reader to compute the hash.
|
||||
if !hashes.is_empty() {
|
||||
if !hashes.is_none() {
|
||||
hasher.finish().await.map_err(Error::HashExhaustion)?;
|
||||
}
|
||||
|
||||
|
|
|
@ -15,7 +15,7 @@ use platform_tags::Tags;
|
|||
use uv_cache::Cache;
|
||||
use uv_client::RegistryClient;
|
||||
use uv_distribution::{DistributionDatabase, LocalWheel};
|
||||
use uv_types::{BuildContext, InFlight, RequiredHashes};
|
||||
use uv_types::{BuildContext, HashStrategy, InFlight};
|
||||
|
||||
use crate::editable::BuiltEditable;
|
||||
|
||||
|
@ -40,7 +40,7 @@ pub enum Error {
|
|||
pub struct Downloader<'a, Context: BuildContext + Send + Sync> {
|
||||
tags: &'a Tags,
|
||||
cache: &'a Cache,
|
||||
hashes: &'a RequiredHashes,
|
||||
hashes: &'a HashStrategy,
|
||||
database: DistributionDatabase<'a, Context>,
|
||||
reporter: Option<Arc<dyn Reporter>>,
|
||||
}
|
||||
|
@ -49,7 +49,7 @@ impl<'a, Context: BuildContext + Send + Sync> Downloader<'a, Context> {
|
|||
pub fn new(
|
||||
cache: &'a Cache,
|
||||
tags: &'a Tags,
|
||||
hashes: &'a RequiredHashes,
|
||||
hashes: &'a HashStrategy,
|
||||
client: &'a RegistryClient,
|
||||
build_context: &'a Context,
|
||||
) -> Self {
|
||||
|
@ -170,22 +170,22 @@ impl<'a, Context: BuildContext + Send + Sync> Downloader<'a, Context> {
|
|||
pub async fn get_wheel(&self, dist: Dist, in_flight: &InFlight) -> Result<CachedDist, Error> {
|
||||
let id = dist.distribution_id();
|
||||
if in_flight.downloads.register(id.clone()) {
|
||||
let hashes = self.hashes.get(dist.name()).unwrap_or_default();
|
||||
let policy = self.hashes.get(dist.name());
|
||||
let result = self
|
||||
.database
|
||||
.get_or_build_wheel(&dist, self.tags, hashes)
|
||||
.get_or_build_wheel(&dist, self.tags, policy)
|
||||
.boxed()
|
||||
.map_err(|err| Error::Fetch(dist.clone(), err))
|
||||
.await
|
||||
.and_then(|wheel: LocalWheel| {
|
||||
if wheel.satisfies(hashes) {
|
||||
if wheel.satisfies(policy) {
|
||||
Ok(wheel)
|
||||
} else {
|
||||
Err(Error::Fetch(
|
||||
dist.clone(),
|
||||
uv_distribution::Error::hash_mismatch(
|
||||
dist.to_string(),
|
||||
hashes,
|
||||
policy.digests(),
|
||||
wheel.hashes(),
|
||||
),
|
||||
))
|
||||
|
|
|
@ -19,7 +19,7 @@ use uv_configuration::{NoBinary, Reinstall};
|
|||
use uv_distribution::{read_timestamped_archive, Archive, BuiltWheelIndex, RegistryWheelIndex};
|
||||
use uv_fs::Simplified;
|
||||
use uv_interpreter::PythonEnvironment;
|
||||
use uv_types::RequiredHashes;
|
||||
use uv_types::HashStrategy;
|
||||
|
||||
use crate::{ResolvedEditable, SitePackages};
|
||||
|
||||
|
@ -66,15 +66,15 @@ impl<'a> Planner<'a> {
|
|||
mut site_packages: SitePackages<'_>,
|
||||
reinstall: &Reinstall,
|
||||
no_binary: &NoBinary,
|
||||
hashes: &RequiredHashes,
|
||||
hasher: &HashStrategy,
|
||||
index_locations: &IndexLocations,
|
||||
cache: &Cache,
|
||||
venv: &PythonEnvironment,
|
||||
tags: &Tags,
|
||||
) -> Result<Plan> {
|
||||
// Index all the already-downloaded wheels in the cache.
|
||||
let mut registry_index = RegistryWheelIndex::new(cache, tags, index_locations, hashes);
|
||||
let built_index = BuiltWheelIndex::new(cache, tags, hashes);
|
||||
let mut registry_index = RegistryWheelIndex::new(cache, tags, index_locations, hasher);
|
||||
let built_index = BuiltWheelIndex::new(cache, tags, hasher);
|
||||
|
||||
let mut cached = vec![];
|
||||
let mut remote = vec![];
|
||||
|
@ -262,8 +262,7 @@ impl<'a> Planner<'a> {
|
|||
let archive = rmp_serde::from_slice::<Archive>(&data)?;
|
||||
|
||||
// Enforce hash checking.
|
||||
let hashes = hashes.get(&requirement.name).unwrap_or_default();
|
||||
if archive.satisfies(hashes) {
|
||||
if archive.satisfies(hasher.get(&requirement.name)) {
|
||||
let cached_dist = CachedDirectUrlDist::from_url(
|
||||
wheel.filename,
|
||||
wheel.url,
|
||||
|
@ -312,8 +311,7 @@ impl<'a> Planner<'a> {
|
|||
&cache_entry,
|
||||
ArchiveTimestamp::from_file(&wheel.path)?,
|
||||
)? {
|
||||
let hashes = hashes.get(&requirement.name).unwrap_or_default();
|
||||
if archive.satisfies(hashes) {
|
||||
if archive.satisfies(hasher.get(&requirement.name)) {
|
||||
let cached_dist = CachedDirectUrlDist::from_url(
|
||||
wheel.filename,
|
||||
wheel.url,
|
||||
|
|
|
@ -12,7 +12,7 @@ use uv_client::RegistryClient;
|
|||
use uv_configuration::{Constraints, Overrides};
|
||||
use uv_distribution::{DistributionDatabase, Reporter};
|
||||
use uv_resolver::{InMemoryIndex, MetadataResponse};
|
||||
use uv_types::{BuildContext, RequestedRequirements, RequiredHashes};
|
||||
use uv_types::{BuildContext, HashStrategy, RequestedRequirements};
|
||||
|
||||
/// A resolver for resolving lookahead requirements from direct URLs.
|
||||
///
|
||||
|
@ -40,7 +40,7 @@ pub struct LookaheadResolver<'a, Context: BuildContext + Send + Sync> {
|
|||
/// The editable requirements for the project.
|
||||
editables: &'a [(LocalEditable, Metadata23)],
|
||||
/// The required hashes for the project.
|
||||
hashes: &'a RequiredHashes,
|
||||
hasher: &'a HashStrategy,
|
||||
/// The in-memory index for resolving dependencies.
|
||||
index: &'a InMemoryIndex,
|
||||
/// The database for fetching and building distributions.
|
||||
|
@ -55,7 +55,7 @@ impl<'a, Context: BuildContext + Send + Sync> LookaheadResolver<'a, Context> {
|
|||
constraints: &'a Constraints,
|
||||
overrides: &'a Overrides,
|
||||
editables: &'a [(LocalEditable, Metadata23)],
|
||||
hashes: &'a RequiredHashes,
|
||||
hasher: &'a HashStrategy,
|
||||
context: &'a Context,
|
||||
client: &'a RegistryClient,
|
||||
index: &'a InMemoryIndex,
|
||||
|
@ -65,7 +65,7 @@ impl<'a, Context: BuildContext + Send + Sync> LookaheadResolver<'a, Context> {
|
|||
constraints,
|
||||
overrides,
|
||||
editables,
|
||||
hashes,
|
||||
hasher,
|
||||
index,
|
||||
database: DistributionDatabase::new(client, context),
|
||||
}
|
||||
|
@ -155,10 +155,9 @@ impl<'a, Context: BuildContext + Send + Sync> LookaheadResolver<'a, Context> {
|
|||
archive.metadata.requires_dist.clone()
|
||||
} else {
|
||||
// Run the PEP 517 build process to extract metadata from the source distribution.
|
||||
let hashes = self.hashes.get(dist.name()).unwrap_or_default();
|
||||
let archive = self
|
||||
.database
|
||||
.get_or_build_wheel_metadata(&dist, hashes)
|
||||
.get_or_build_wheel_metadata(&dist, self.hasher.get(dist.name()))
|
||||
.await
|
||||
.with_context(|| match &dist {
|
||||
Dist::Built(built) => format!("Failed to download: {built}"),
|
||||
|
|
|
@ -5,13 +5,13 @@ use anyhow::{Context, Result};
|
|||
use futures::{StreamExt, TryStreamExt};
|
||||
use url::Url;
|
||||
|
||||
use distribution_types::{BuildableSource, PackageId, PathSourceUrl, SourceUrl};
|
||||
use distribution_types::{BuildableSource, HashPolicy, PackageId, PathSourceUrl, SourceUrl};
|
||||
use pep508_rs::Requirement;
|
||||
use uv_client::RegistryClient;
|
||||
use uv_distribution::{DistributionDatabase, Reporter};
|
||||
use uv_fs::Simplified;
|
||||
use uv_resolver::{InMemoryIndex, MetadataResponse};
|
||||
use uv_types::BuildContext;
|
||||
use uv_types::{BuildContext, HashStrategy};
|
||||
|
||||
use crate::ExtrasSpecification;
|
||||
|
||||
|
@ -24,8 +24,8 @@ pub struct SourceTreeResolver<'a, Context: BuildContext + Send + Sync> {
|
|||
source_trees: Vec<PathBuf>,
|
||||
/// The extras to include when resolving requirements.
|
||||
extras: &'a ExtrasSpecification<'a>,
|
||||
/// Whether to require hashes for all dependencies.
|
||||
require_hashes: bool,
|
||||
/// The hash policy to enforce.
|
||||
hasher: &'a HashStrategy,
|
||||
/// The in-memory index for resolving dependencies.
|
||||
index: &'a InMemoryIndex,
|
||||
/// The database for fetching and building distributions.
|
||||
|
@ -37,7 +37,7 @@ impl<'a, Context: BuildContext + Send + Sync> SourceTreeResolver<'a, Context> {
|
|||
pub fn new(
|
||||
source_trees: Vec<PathBuf>,
|
||||
extras: &'a ExtrasSpecification<'a>,
|
||||
require_hashes: bool,
|
||||
hasher: &'a HashStrategy,
|
||||
context: &'a Context,
|
||||
client: &'a RegistryClient,
|
||||
index: &'a InMemoryIndex,
|
||||
|
@ -45,7 +45,7 @@ impl<'a, Context: BuildContext + Send + Sync> SourceTreeResolver<'a, Context> {
|
|||
Self {
|
||||
source_trees,
|
||||
extras,
|
||||
require_hashes,
|
||||
hasher,
|
||||
index,
|
||||
database: DistributionDatabase::new(client, context),
|
||||
}
|
||||
|
@ -87,15 +87,19 @@ impl<'a, Context: BuildContext + Send + Sync> SourceTreeResolver<'a, Context> {
|
|||
path: Cow::Owned(path),
|
||||
});
|
||||
|
||||
// TODO(charlie): Should we enforce this earlier? If the metadata can be extracted
|
||||
// statically, it won't go through this resolver. But we'll fail anyway, since the
|
||||
// dependencies (when extracted from a `pyproject.toml` or `setup.py`) won't include hashes.
|
||||
if self.require_hashes {
|
||||
return Err(anyhow::anyhow!(
|
||||
"Hash-checking is not supported for local directories: {}",
|
||||
source_tree.user_display()
|
||||
));
|
||||
}
|
||||
// Determine the hash policy. Since we don't have a package name, we perform a
|
||||
// manual match.
|
||||
let hashes = match self.hasher {
|
||||
HashStrategy::None => HashPolicy::None,
|
||||
HashStrategy::Generate => HashPolicy::Generate,
|
||||
HashStrategy::Validate(_) => {
|
||||
// TODO(charlie): Support `--require-hashes` for unnamed requirements.
|
||||
return Err(anyhow::anyhow!(
|
||||
"Hash-checking is not supported for local directories: {}",
|
||||
source_tree.user_display()
|
||||
));
|
||||
}
|
||||
};
|
||||
|
||||
// Fetch the metadata for the distribution.
|
||||
let metadata = {
|
||||
|
@ -117,7 +121,7 @@ impl<'a, Context: BuildContext + Send + Sync> SourceTreeResolver<'a, Context> {
|
|||
} else {
|
||||
// Run the PEP 517 build process to extract metadata from the source distribution.
|
||||
let source = BuildableSource::Url(source);
|
||||
let archive = self.database.build_wheel_metadata(&source, &[]).await?;
|
||||
let archive = self.database.build_wheel_metadata(&source, hashes).await?;
|
||||
|
||||
// Insert the metadata into the index.
|
||||
self.index
|
||||
|
|
|
@ -10,8 +10,8 @@ use tracing::debug;
|
|||
|
||||
use distribution_filename::{SourceDistFilename, WheelFilename};
|
||||
use distribution_types::{
|
||||
BuildableSource, DirectSourceUrl, GitSourceUrl, PackageId, PathSourceUrl, RemoteSource,
|
||||
SourceUrl,
|
||||
BuildableSource, DirectSourceUrl, GitSourceUrl, HashPolicy, PackageId, PathSourceUrl,
|
||||
RemoteSource, SourceUrl,
|
||||
};
|
||||
use pep508_rs::{
|
||||
Requirement, RequirementsTxtRequirement, Scheme, UnnamedRequirement, VersionOrUrl,
|
||||
|
@ -21,14 +21,14 @@ use uv_client::RegistryClient;
|
|||
use uv_distribution::{DistributionDatabase, Reporter};
|
||||
use uv_normalize::PackageName;
|
||||
use uv_resolver::{InMemoryIndex, MetadataResponse};
|
||||
use uv_types::BuildContext;
|
||||
use uv_types::{BuildContext, HashStrategy};
|
||||
|
||||
/// Like [`RequirementsSpecification`], but with concrete names for all requirements.
|
||||
pub struct NamedRequirementsResolver<'a, Context: BuildContext + Send + Sync> {
|
||||
/// The requirements for the project.
|
||||
requirements: Vec<RequirementsTxtRequirement>,
|
||||
/// Whether to check hashes for distributions.
|
||||
require_hashes: bool,
|
||||
hasher: &'a HashStrategy,
|
||||
/// The in-memory index for resolving dependencies.
|
||||
index: &'a InMemoryIndex,
|
||||
/// The database for fetching and building distributions.
|
||||
|
@ -39,14 +39,14 @@ impl<'a, Context: BuildContext + Send + Sync> NamedRequirementsResolver<'a, Cont
|
|||
/// Instantiate a new [`NamedRequirementsResolver`] for a given set of requirements.
|
||||
pub fn new(
|
||||
requirements: Vec<RequirementsTxtRequirement>,
|
||||
require_hashes: bool,
|
||||
hasher: &'a HashStrategy,
|
||||
context: &'a Context,
|
||||
client: &'a RegistryClient,
|
||||
index: &'a InMemoryIndex,
|
||||
) -> Self {
|
||||
Self {
|
||||
requirements,
|
||||
require_hashes,
|
||||
hasher,
|
||||
index,
|
||||
database: DistributionDatabase::new(client, context),
|
||||
}
|
||||
|
@ -65,7 +65,7 @@ impl<'a, Context: BuildContext + Send + Sync> NamedRequirementsResolver<'a, Cont
|
|||
pub async fn resolve(self) -> Result<Vec<Requirement>> {
|
||||
let Self {
|
||||
requirements,
|
||||
require_hashes,
|
||||
hasher,
|
||||
index,
|
||||
database,
|
||||
} = self;
|
||||
|
@ -74,8 +74,7 @@ impl<'a, Context: BuildContext + Send + Sync> NamedRequirementsResolver<'a, Cont
|
|||
match requirement {
|
||||
RequirementsTxtRequirement::Pep508(requirement) => Ok(requirement),
|
||||
RequirementsTxtRequirement::Unnamed(requirement) => {
|
||||
Self::resolve_requirement(requirement, require_hashes, index, &database)
|
||||
.await
|
||||
Self::resolve_requirement(requirement, hasher, index, &database).await
|
||||
}
|
||||
}
|
||||
})
|
||||
|
@ -87,7 +86,7 @@ impl<'a, Context: BuildContext + Send + Sync> NamedRequirementsResolver<'a, Cont
|
|||
/// Infer the package name for a given "unnamed" requirement.
|
||||
async fn resolve_requirement(
|
||||
requirement: UnnamedRequirement,
|
||||
require_hashes: bool,
|
||||
hasher: &HashStrategy,
|
||||
index: &InMemoryIndex,
|
||||
database: &DistributionDatabase<'a, Context>,
|
||||
) -> Result<Requirement> {
|
||||
|
@ -240,13 +239,6 @@ impl<'a, Context: BuildContext + Send + Sync> NamedRequirementsResolver<'a, Cont
|
|||
}
|
||||
};
|
||||
|
||||
// TODO(charlie): Support `--require-hashes` for unnamed requirements.
|
||||
if require_hashes {
|
||||
return Err(anyhow::anyhow!(
|
||||
"Unnamed requirements are not supported with `--require-hashes`"
|
||||
));
|
||||
}
|
||||
|
||||
// Fetch the metadata for the distribution.
|
||||
let name = {
|
||||
let id = PackageId::from_url(source.url());
|
||||
|
@ -260,9 +252,22 @@ impl<'a, Context: BuildContext + Send + Sync> NamedRequirementsResolver<'a, Cont
|
|||
// If the metadata is already in the index, return it.
|
||||
archive.metadata.name.clone()
|
||||
} else {
|
||||
// Determine the hash policy. Since we don't have a package name, we perform a
|
||||
// manual match.
|
||||
let hashes = match hasher {
|
||||
HashStrategy::None => HashPolicy::None,
|
||||
HashStrategy::Generate => HashPolicy::Generate,
|
||||
HashStrategy::Validate(_) => {
|
||||
// TODO(charlie): Support `--require-hashes` for unnamed requirements.
|
||||
return Err(anyhow::anyhow!(
|
||||
"Unnamed requirements are not supported with `--require-hashes`"
|
||||
));
|
||||
}
|
||||
};
|
||||
|
||||
// Run the PEP 517 build process to extract metadata from the source distribution.
|
||||
let source = BuildableSource::Url(source);
|
||||
let archive = database.build_wheel_metadata(&source, &[]).await?;
|
||||
let archive = database.build_wheel_metadata(&source, hashes).await?;
|
||||
|
||||
let name = archive.metadata.name.clone();
|
||||
|
||||
|
|
|
@ -6,8 +6,9 @@ use tracing::instrument;
|
|||
|
||||
use distribution_filename::{DistFilename, SourceDistFilename, WheelFilename};
|
||||
use distribution_types::{
|
||||
BuiltDist, Dist, File, Hash, IncompatibleSource, IncompatibleWheel, IndexUrl, PrioritizedDist,
|
||||
RegistryBuiltDist, RegistrySourceDist, SourceDist, SourceDistCompatibility, WheelCompatibility,
|
||||
BuiltDist, Dist, File, Hash, HashPolicy, IncompatibleSource, IncompatibleWheel, IndexUrl,
|
||||
PrioritizedDist, RegistryBuiltDist, RegistrySourceDist, SourceDist, SourceDistCompatibility,
|
||||
WheelCompatibility,
|
||||
};
|
||||
use pep440_rs::Version;
|
||||
use platform_tags::{TagCompatibility, Tags};
|
||||
|
@ -15,7 +16,7 @@ use pypi_types::HashDigest;
|
|||
use uv_client::FlatIndexEntries;
|
||||
use uv_configuration::{NoBinary, NoBuild};
|
||||
use uv_normalize::PackageName;
|
||||
use uv_types::RequiredHashes;
|
||||
use uv_types::HashStrategy;
|
||||
|
||||
/// A set of [`PrioritizedDist`] from a `--find-links` entry, indexed by [`PackageName`]
|
||||
/// and [`Version`].
|
||||
|
@ -34,7 +35,7 @@ impl FlatIndex {
|
|||
pub fn from_entries(
|
||||
entries: FlatIndexEntries,
|
||||
tags: &Tags,
|
||||
required_hashes: &RequiredHashes,
|
||||
hasher: &HashStrategy,
|
||||
no_build: &NoBuild,
|
||||
no_binary: &NoBinary,
|
||||
) -> Self {
|
||||
|
@ -47,7 +48,7 @@ impl FlatIndex {
|
|||
file,
|
||||
filename,
|
||||
tags,
|
||||
required_hashes,
|
||||
hasher,
|
||||
no_build,
|
||||
no_binary,
|
||||
url,
|
||||
|
@ -66,7 +67,7 @@ impl FlatIndex {
|
|||
file: File,
|
||||
filename: DistFilename,
|
||||
tags: &Tags,
|
||||
required_hashes: &RequiredHashes,
|
||||
hasher: &HashStrategy,
|
||||
no_build: &NoBuild,
|
||||
no_binary: &NoBinary,
|
||||
index: IndexUrl,
|
||||
|
@ -77,13 +78,8 @@ impl FlatIndex {
|
|||
DistFilename::WheelFilename(filename) => {
|
||||
let version = filename.version.clone();
|
||||
|
||||
let compatibility = Self::wheel_compatibility(
|
||||
&filename,
|
||||
&file.hashes,
|
||||
tags,
|
||||
required_hashes,
|
||||
no_binary,
|
||||
);
|
||||
let compatibility =
|
||||
Self::wheel_compatibility(&filename, &file.hashes, tags, hasher, no_binary);
|
||||
let dist = Dist::Built(BuiltDist::Registry(RegistryBuiltDist {
|
||||
filename,
|
||||
file: Box::new(file),
|
||||
|
@ -99,12 +95,8 @@ impl FlatIndex {
|
|||
}
|
||||
}
|
||||
DistFilename::SourceDistFilename(filename) => {
|
||||
let compatibility = Self::source_dist_compatibility(
|
||||
&filename,
|
||||
&file.hashes,
|
||||
required_hashes,
|
||||
no_build,
|
||||
);
|
||||
let compatibility =
|
||||
Self::source_dist_compatibility(&filename, &file.hashes, hasher, no_build);
|
||||
let dist = Dist::Source(SourceDist::Registry(RegistrySourceDist {
|
||||
filename: filename.clone(),
|
||||
file: Box::new(file),
|
||||
|
@ -125,7 +117,7 @@ impl FlatIndex {
|
|||
fn source_dist_compatibility(
|
||||
filename: &SourceDistFilename,
|
||||
hashes: &[HashDigest],
|
||||
required_hashes: &RequiredHashes,
|
||||
hasher: &HashStrategy,
|
||||
no_build: &NoBuild,
|
||||
) -> SourceDistCompatibility {
|
||||
// Check if source distributions are allowed for this package.
|
||||
|
@ -140,10 +132,10 @@ impl FlatIndex {
|
|||
}
|
||||
|
||||
// Check if hashes line up
|
||||
let hash = if let Some(required_hashes) = required_hashes.get(&filename.name) {
|
||||
let hash = if let HashPolicy::Validate(required) = hasher.get(&filename.name) {
|
||||
if hashes.is_empty() {
|
||||
Hash::Missing
|
||||
} else if hashes.iter().any(|hash| required_hashes.contains(hash)) {
|
||||
} else if required.iter().any(|hash| hashes.contains(hash)) {
|
||||
Hash::Matched
|
||||
} else {
|
||||
Hash::Mismatched
|
||||
|
@ -159,7 +151,7 @@ impl FlatIndex {
|
|||
filename: &WheelFilename,
|
||||
hashes: &[HashDigest],
|
||||
tags: &Tags,
|
||||
required_hashes: &RequiredHashes,
|
||||
hasher: &HashStrategy,
|
||||
no_binary: &NoBinary,
|
||||
) -> WheelCompatibility {
|
||||
// Check if binaries are allowed for this package.
|
||||
|
@ -182,10 +174,10 @@ impl FlatIndex {
|
|||
};
|
||||
|
||||
// Check if hashes line up
|
||||
let hash = if let Some(required_hashes) = required_hashes.get(&filename.name) {
|
||||
let hash = if let HashPolicy::Validate(required) = hasher.get(&filename.name) {
|
||||
if hashes.is_empty() {
|
||||
Hash::Missing
|
||||
} else if hashes.iter().any(|hash| required_hashes.contains(hash)) {
|
||||
} else if required.iter().any(|hash| hashes.contains(hash)) {
|
||||
Hash::Matched
|
||||
} else {
|
||||
Hash::Mismatched
|
||||
|
|
|
@ -1,15 +0,0 @@
|
|||
#[derive(Debug, Default, Clone, Copy)]
|
||||
pub enum HashCheckingMode {
|
||||
/// Hash-checking mode is disabled.
|
||||
#[default]
|
||||
Disabled,
|
||||
/// Hash-checking mode is enabled.
|
||||
Enabled,
|
||||
}
|
||||
|
||||
impl HashCheckingMode {
|
||||
/// Returns `true` if hash-checking is enabled.
|
||||
pub fn is_enabled(self) -> bool {
|
||||
matches!(self, Self::Enabled)
|
||||
}
|
||||
}
|
|
@ -2,7 +2,6 @@ pub use dependency_mode::DependencyMode;
|
|||
pub use error::ResolveError;
|
||||
pub use exclusions::Exclusions;
|
||||
pub use flat_index::FlatIndex;
|
||||
pub use hash_checking_mode::HashCheckingMode;
|
||||
pub use manifest::Manifest;
|
||||
pub use options::{Options, OptionsBuilder};
|
||||
pub use preferences::{Preference, PreferenceError};
|
||||
|
@ -27,7 +26,6 @@ mod editables;
|
|||
mod error;
|
||||
mod exclusions;
|
||||
mod flat_index;
|
||||
mod hash_checking_mode;
|
||||
mod manifest;
|
||||
mod options;
|
||||
mod pins;
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
use chrono::{DateTime, Utc};
|
||||
|
||||
use crate::hash_checking_mode::HashCheckingMode;
|
||||
use crate::{DependencyMode, PreReleaseMode, ResolutionMode};
|
||||
|
||||
/// Options for resolving a manifest.
|
||||
|
@ -9,7 +8,6 @@ pub struct Options {
|
|||
pub resolution_mode: ResolutionMode,
|
||||
pub prerelease_mode: PreReleaseMode,
|
||||
pub dependency_mode: DependencyMode,
|
||||
pub hash_checking_mode: HashCheckingMode,
|
||||
pub exclude_newer: Option<DateTime<Utc>>,
|
||||
}
|
||||
|
||||
|
@ -19,7 +17,6 @@ pub struct OptionsBuilder {
|
|||
resolution_mode: ResolutionMode,
|
||||
prerelease_mode: PreReleaseMode,
|
||||
dependency_mode: DependencyMode,
|
||||
hash_checking_mode: HashCheckingMode,
|
||||
exclude_newer: Option<DateTime<Utc>>,
|
||||
}
|
||||
|
||||
|
@ -50,13 +47,6 @@ impl OptionsBuilder {
|
|||
self
|
||||
}
|
||||
|
||||
/// Sets the hash-checking mode.
|
||||
#[must_use]
|
||||
pub fn hash_checking_mode(mut self, hash_checking_mode: HashCheckingMode) -> Self {
|
||||
self.hash_checking_mode = hash_checking_mode;
|
||||
self
|
||||
}
|
||||
|
||||
/// Sets the exclusion date.
|
||||
#[must_use]
|
||||
pub fn exclude_newer(mut self, exclude_newer: Option<DateTime<Utc>>) -> Self {
|
||||
|
@ -70,7 +60,6 @@ impl OptionsBuilder {
|
|||
resolution_mode: self.resolution_mode,
|
||||
prerelease_mode: self.prerelease_mode,
|
||||
dependency_mode: self.dependency_mode,
|
||||
hash_checking_mode: self.hash_checking_mode,
|
||||
exclude_newer: self.exclude_newer,
|
||||
}
|
||||
}
|
||||
|
|
|
@ -134,15 +134,13 @@ impl ResolutionGraph {
|
|||
.filter(|digests| !digests.is_empty())
|
||||
{
|
||||
hashes.insert(package_name.clone(), digests.to_vec());
|
||||
} else if let Some(versions_response) = packages.get(package_name) {
|
||||
if let VersionsResponse::Found(ref version_maps) = *versions_response {
|
||||
for version_map in version_maps {
|
||||
if let Some(mut digests) = version_map.hashes(version) {
|
||||
digests.sort_unstable();
|
||||
hashes.insert(package_name.clone(), digests);
|
||||
break;
|
||||
}
|
||||
}
|
||||
} else if let Some(metadata_response) =
|
||||
distributions.get(&pinned_package.package_id())
|
||||
{
|
||||
if let MetadataResponse::Found(ref archive) = *metadata_response {
|
||||
let mut digests = archive.hashes.clone();
|
||||
digests.sort_unstable();
|
||||
hashes.insert(package_name.clone(), digests);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -31,12 +31,11 @@ use uv_configuration::{Constraints, Overrides};
|
|||
use uv_distribution::{ArchiveMetadata, DistributionDatabase};
|
||||
use uv_interpreter::Interpreter;
|
||||
use uv_normalize::PackageName;
|
||||
use uv_types::{BuildContext, InstalledPackagesProvider, RequiredHashes};
|
||||
use uv_types::{BuildContext, HashStrategy, InstalledPackagesProvider};
|
||||
|
||||
use crate::candidate_selector::{CandidateDist, CandidateSelector};
|
||||
use crate::editables::Editables;
|
||||
use crate::error::ResolveError;
|
||||
use crate::hash_checking_mode::HashCheckingMode;
|
||||
use crate::manifest::Manifest;
|
||||
use crate::pins::FilePins;
|
||||
use crate::preferences::Preferences;
|
||||
|
@ -122,8 +121,7 @@ pub struct Resolver<
|
|||
urls: Urls,
|
||||
locals: Locals,
|
||||
dependency_mode: DependencyMode,
|
||||
hash_checking_mode: HashCheckingMode,
|
||||
hashes: &'a RequiredHashes,
|
||||
hasher: &'a HashStrategy,
|
||||
markers: &'a MarkerEnvironment,
|
||||
python_requirement: PythonRequirement,
|
||||
selector: CandidateSelector,
|
||||
|
@ -158,7 +156,7 @@ impl<
|
|||
client: &'a RegistryClient,
|
||||
flat_index: &'a FlatIndex,
|
||||
index: &'a InMemoryIndex,
|
||||
hashes: &'a RequiredHashes,
|
||||
hasher: &'a HashStrategy,
|
||||
build_context: &'a Context,
|
||||
installed_packages: &'a InstalledPackages,
|
||||
) -> Result<Self, ResolveError> {
|
||||
|
@ -169,7 +167,7 @@ impl<
|
|||
tags,
|
||||
PythonRequirement::new(interpreter, markers),
|
||||
AllowedYanks::from_manifest(&manifest, markers),
|
||||
hashes,
|
||||
hasher,
|
||||
options.exclude_newer,
|
||||
build_context.no_binary(),
|
||||
build_context.no_build(),
|
||||
|
@ -177,7 +175,7 @@ impl<
|
|||
Self::new_custom_io(
|
||||
manifest,
|
||||
options,
|
||||
hashes,
|
||||
hasher,
|
||||
markers,
|
||||
PythonRequirement::new(interpreter, markers),
|
||||
index,
|
||||
|
@ -198,7 +196,7 @@ impl<
|
|||
pub fn new_custom_io(
|
||||
manifest: Manifest,
|
||||
options: Options,
|
||||
hashes: &'a RequiredHashes,
|
||||
hasher: &'a HashStrategy,
|
||||
markers: &'a MarkerEnvironment,
|
||||
python_requirement: PythonRequirement,
|
||||
index: &'a InMemoryIndex,
|
||||
|
@ -212,7 +210,6 @@ impl<
|
|||
visited: DashSet::default(),
|
||||
selector: CandidateSelector::for_resolution(options, &manifest, markers),
|
||||
dependency_mode: options.dependency_mode,
|
||||
hash_checking_mode: options.hash_checking_mode,
|
||||
urls: Urls::from_manifest(&manifest, markers)?,
|
||||
locals: Locals::from_manifest(&manifest, markers),
|
||||
project: manifest.project,
|
||||
|
@ -222,7 +219,7 @@ impl<
|
|||
preferences: Preferences::from_iter(manifest.preferences, markers),
|
||||
exclusions: manifest.exclusions,
|
||||
editables: Editables::from_requirements(manifest.editables),
|
||||
hashes,
|
||||
hasher,
|
||||
markers,
|
||||
python_requirement,
|
||||
reporter: None,
|
||||
|
@ -528,10 +525,8 @@ impl<
|
|||
PubGrubPackage::Python(_) => {}
|
||||
PubGrubPackage::Package(package_name, _extra, None) => {
|
||||
// Validate that the package is permitted under hash-checking mode.
|
||||
if self.hash_checking_mode.is_enabled() {
|
||||
if !self.hashes.contains(package_name) {
|
||||
return Err(ResolveError::UnhashedPackage(package_name.clone()));
|
||||
}
|
||||
if !self.hasher.allows(package_name) {
|
||||
return Err(ResolveError::UnhashedPackage(package_name.clone()));
|
||||
}
|
||||
|
||||
// Emit a request to fetch the metadata for this package.
|
||||
|
@ -544,10 +539,8 @@ impl<
|
|||
}
|
||||
PubGrubPackage::Package(package_name, _extra, Some(url)) => {
|
||||
// Validate that the package is permitted under hash-checking mode.
|
||||
if self.hash_checking_mode.is_enabled() {
|
||||
if !self.hashes.contains(package_name) {
|
||||
return Err(ResolveError::UnhashedPackage(package_name.clone()));
|
||||
}
|
||||
if !self.hasher.allows(package_name) {
|
||||
return Err(ResolveError::UnhashedPackage(package_name.clone()));
|
||||
}
|
||||
|
||||
// Emit a request to fetch the metadata for this distribution.
|
||||
|
|
|
@ -10,7 +10,7 @@ use uv_client::RegistryClient;
|
|||
use uv_configuration::{NoBinary, NoBuild};
|
||||
use uv_distribution::{ArchiveMetadata, DistributionDatabase};
|
||||
use uv_normalize::PackageName;
|
||||
use uv_types::{BuildContext, RequiredHashes};
|
||||
use uv_types::{BuildContext, HashStrategy};
|
||||
|
||||
use crate::flat_index::FlatIndex;
|
||||
use crate::python_requirement::PythonRequirement;
|
||||
|
@ -83,7 +83,7 @@ pub struct DefaultResolverProvider<'a, Context: BuildContext + Send + Sync> {
|
|||
tags: Tags,
|
||||
python_requirement: PythonRequirement,
|
||||
allowed_yanks: AllowedYanks,
|
||||
required_hashes: RequiredHashes,
|
||||
hasher: HashStrategy,
|
||||
exclude_newer: Option<DateTime<Utc>>,
|
||||
no_binary: NoBinary,
|
||||
no_build: NoBuild,
|
||||
|
@ -99,7 +99,7 @@ impl<'a, Context: BuildContext + Send + Sync> DefaultResolverProvider<'a, Contex
|
|||
tags: &'a Tags,
|
||||
python_requirement: PythonRequirement,
|
||||
allowed_yanks: AllowedYanks,
|
||||
required_hashes: &'a RequiredHashes,
|
||||
hasher: &'a HashStrategy,
|
||||
exclude_newer: Option<DateTime<Utc>>,
|
||||
no_binary: &'a NoBinary,
|
||||
no_build: &'a NoBuild,
|
||||
|
@ -111,7 +111,7 @@ impl<'a, Context: BuildContext + Send + Sync> DefaultResolverProvider<'a, Contex
|
|||
tags: tags.clone(),
|
||||
python_requirement,
|
||||
allowed_yanks,
|
||||
required_hashes: required_hashes.clone(),
|
||||
hasher: hasher.clone(),
|
||||
exclude_newer,
|
||||
no_binary: no_binary.clone(),
|
||||
no_build: no_build.clone(),
|
||||
|
@ -139,7 +139,7 @@ impl<'a, Context: BuildContext + Send + Sync> ResolverProvider
|
|||
&self.tags,
|
||||
&self.python_requirement,
|
||||
&self.allowed_yanks,
|
||||
&self.required_hashes,
|
||||
&self.hasher,
|
||||
self.exclude_newer.as_ref(),
|
||||
self.flat_index.get(package_name).cloned(),
|
||||
&self.no_binary,
|
||||
|
@ -179,8 +179,11 @@ impl<'a, Context: BuildContext + Send + Sync> ResolverProvider
|
|||
|
||||
/// Fetch the metadata for a distribution, building it if necessary.
|
||||
async fn get_or_build_wheel_metadata<'io>(&'io self, dist: &'io Dist) -> WheelMetadataResult {
|
||||
let hashes = self.required_hashes.get(dist.name()).unwrap_or_default();
|
||||
match self.fetcher.get_or_build_wheel_metadata(dist, hashes).await {
|
||||
match self
|
||||
.fetcher
|
||||
.get_or_build_wheel_metadata(dist, self.hasher.get(dist.name()))
|
||||
.await
|
||||
{
|
||||
Ok(metadata) => Ok(MetadataResponse::Found(metadata)),
|
||||
Err(err) => match err {
|
||||
uv_distribution::Error::Client(client) => match client.into_kind() {
|
||||
|
|
|
@ -17,7 +17,7 @@ use pypi_types::{HashDigest, Yanked};
|
|||
use uv_client::{OwnedArchive, SimpleMetadata, VersionFiles};
|
||||
use uv_configuration::{NoBinary, NoBuild};
|
||||
use uv_normalize::PackageName;
|
||||
use uv_types::RequiredHashes;
|
||||
use uv_types::HashStrategy;
|
||||
use uv_warnings::warn_user_once;
|
||||
|
||||
use crate::flat_index::FlatDistributions;
|
||||
|
@ -48,7 +48,7 @@ impl VersionMap {
|
|||
tags: &Tags,
|
||||
python_requirement: &PythonRequirement,
|
||||
allowed_yanks: &AllowedYanks,
|
||||
required_hashes: &RequiredHashes,
|
||||
hasher: &HashStrategy,
|
||||
exclude_newer: Option<&DateTime<Utc>>,
|
||||
flat_index: Option<FlatDistributions>,
|
||||
no_binary: &NoBinary,
|
||||
|
@ -112,10 +112,7 @@ impl VersionMap {
|
|||
.allowed_versions(package_name)
|
||||
.cloned()
|
||||
.unwrap_or_default();
|
||||
let required_hashes = required_hashes
|
||||
.get(package_name)
|
||||
.unwrap_or_default()
|
||||
.to_vec();
|
||||
let required_hashes = hasher.get(package_name).digests().to_vec();
|
||||
Self {
|
||||
inner: VersionMapInner::Lazy(VersionMapLazy {
|
||||
map,
|
||||
|
|
|
@ -22,7 +22,7 @@ use uv_resolver::{
|
|||
OptionsBuilder, PreReleaseMode, Preference, ResolutionGraph, ResolutionMode, Resolver,
|
||||
};
|
||||
use uv_types::{
|
||||
BuildContext, BuildIsolation, EmptyInstalledPackages, RequiredHashes, SourceBuildTrait,
|
||||
BuildContext, BuildIsolation, EmptyInstalledPackages, HashStrategy, SourceBuildTrait,
|
||||
};
|
||||
|
||||
// Exclude any packages uploaded after this date.
|
||||
|
@ -125,7 +125,7 @@ async fn resolve(
|
|||
find_default_python(&Cache::temp().unwrap()).expect("Expected a python to be installed");
|
||||
let interpreter = Interpreter::artificial(real_interpreter.platform().clone(), markers.clone());
|
||||
let build_context = DummyContext::new(Cache::temp()?, interpreter.clone());
|
||||
let hashes = RequiredHashes::default();
|
||||
let hashes = HashStrategy::None;
|
||||
let installed_packages = EmptyInstalledPackages;
|
||||
let resolver = Resolver::new(
|
||||
manifest,
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
use distribution_types::HashPolicy;
|
||||
use rustc_hash::FxHashMap;
|
||||
use std::str::FromStr;
|
||||
|
||||
|
@ -5,18 +6,45 @@ use pep508_rs::{MarkerEnvironment, Requirement, VersionOrUrl};
|
|||
use pypi_types::{HashDigest, HashError};
|
||||
use uv_normalize::PackageName;
|
||||
|
||||
/// A set of package versions that are permitted, even if they're marked as yanked by the
|
||||
/// relevant index.
|
||||
#[derive(Debug, Default, Clone)]
|
||||
pub struct RequiredHashes(FxHashMap<PackageName, Vec<HashDigest>>);
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum HashStrategy {
|
||||
/// No hash policy is specified.
|
||||
None,
|
||||
/// Hashes should be generated (specifically, a SHA-256 hash), but not validated.
|
||||
Generate,
|
||||
/// Hashes should be validated against a pre-defined list of hashes. If necessary, hashes should
|
||||
/// be generated so as to ensure that the archive is valid.
|
||||
Validate(FxHashMap<PackageName, Vec<HashDigest>>),
|
||||
}
|
||||
|
||||
impl RequiredHashes {
|
||||
/// Generate the [`RequiredHashes`] from a set of requirement entries.
|
||||
impl HashStrategy {
|
||||
/// Return the [`HashPolicy`] for the given package.
|
||||
pub fn get(&self, package_name: &PackageName) -> HashPolicy {
|
||||
match self {
|
||||
Self::None => HashPolicy::None,
|
||||
Self::Generate => HashPolicy::Generate,
|
||||
Self::Validate(hashes) => hashes
|
||||
.get(package_name)
|
||||
.map(Vec::as_slice)
|
||||
.map_or(HashPolicy::None, HashPolicy::Validate),
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns `true` if the given package is allowed.
|
||||
pub fn allows(&self, package_name: &PackageName) -> bool {
|
||||
match self {
|
||||
Self::None => true,
|
||||
Self::Generate => true,
|
||||
Self::Validate(hashes) => hashes.contains_key(package_name),
|
||||
}
|
||||
}
|
||||
|
||||
/// Generate the required hashes from a set of [`Requirement`] entries.
|
||||
pub fn from_requirements(
|
||||
requirements: impl Iterator<Item = (Requirement, Vec<String>)>,
|
||||
markers: &MarkerEnvironment,
|
||||
) -> Result<Self, RequiredHashesError> {
|
||||
let mut allowed_hashes = FxHashMap::<PackageName, Vec<HashDigest>>::default();
|
||||
) -> Result<Self, HashStrategyError> {
|
||||
let mut hashes = FxHashMap::<PackageName, Vec<HashDigest>>::default();
|
||||
|
||||
// For each requirement, map from name to allowed hashes. We use the last entry for each
|
||||
// package.
|
||||
|
@ -26,7 +54,7 @@ impl RequiredHashes {
|
|||
//
|
||||
// TODO(charlie): Preserve hashes from `requirements.txt` through to this pass, so that we
|
||||
// can iterate over requirements directly, rather than iterating over the entries.
|
||||
for (requirement, hashes) in requirements {
|
||||
for (requirement, digests) in requirements {
|
||||
if !requirement.evaluate_markers(markers, &[]) {
|
||||
continue;
|
||||
}
|
||||
|
@ -43,51 +71,40 @@ impl RequiredHashes {
|
|||
{
|
||||
// Pinned versions are allowed.
|
||||
} else {
|
||||
return Err(RequiredHashesError::UnpinnedRequirement(
|
||||
return Err(HashStrategyError::UnpinnedRequirement(
|
||||
requirement.to_string(),
|
||||
));
|
||||
}
|
||||
}
|
||||
None => {
|
||||
return Err(RequiredHashesError::UnpinnedRequirement(
|
||||
return Err(HashStrategyError::UnpinnedRequirement(
|
||||
requirement.to_string(),
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
// Every requirement must include a hash.
|
||||
if hashes.is_empty() {
|
||||
return Err(RequiredHashesError::MissingHashes(requirement.to_string()));
|
||||
if digests.is_empty() {
|
||||
return Err(HashStrategyError::MissingHashes(requirement.to_string()));
|
||||
}
|
||||
|
||||
// Parse the hashes.
|
||||
let hashes = hashes
|
||||
let digests = digests
|
||||
.iter()
|
||||
.map(|hash| HashDigest::from_str(hash))
|
||||
.map(|digest| HashDigest::from_str(digest))
|
||||
.collect::<Result<Vec<_>, _>>()
|
||||
.unwrap();
|
||||
|
||||
// TODO(charlie): Extract hashes from URL fragments.
|
||||
allowed_hashes.insert(requirement.name, hashes);
|
||||
hashes.insert(requirement.name, digests);
|
||||
}
|
||||
|
||||
Ok(Self(allowed_hashes))
|
||||
}
|
||||
|
||||
/// Returns versions for the given package which are allowed even if marked as yanked by the
|
||||
/// relevant index.
|
||||
pub fn get(&self, package_name: &PackageName) -> Option<&[HashDigest]> {
|
||||
self.0.get(package_name).map(Vec::as_slice)
|
||||
}
|
||||
|
||||
/// Returns whether the given package is allowed even if marked as yanked by the relevant index.
|
||||
pub fn contains(&self, package_name: &PackageName) -> bool {
|
||||
self.0.contains_key(package_name)
|
||||
Ok(Self::Validate(hashes))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(thiserror::Error, Debug)]
|
||||
pub enum RequiredHashesError {
|
||||
pub enum HashStrategyError {
|
||||
#[error(transparent)]
|
||||
Hash(#[from] HashError),
|
||||
#[error("Unnamed requirements are not supported in `--require-hashes`")]
|
|
@ -1,12 +1,12 @@
|
|||
//! Fundamental types shared across `uv` crates.
|
||||
pub use builds::*;
|
||||
pub use downloads::*;
|
||||
pub use hashes::*;
|
||||
pub use hash::*;
|
||||
pub use requirements::*;
|
||||
pub use traits::*;
|
||||
|
||||
mod builds;
|
||||
mod downloads;
|
||||
mod hashes;
|
||||
mod hash;
|
||||
mod requirements;
|
||||
mod traits;
|
||||
|
|
|
@ -38,7 +38,7 @@ use uv_resolver::{
|
|||
Manifest, OptionsBuilder, PreReleaseMode, PythonRequirement, ResolutionMode, Resolver,
|
||||
};
|
||||
use uv_toolchain::PythonVersion;
|
||||
use uv_types::{BuildIsolation, EmptyInstalledPackages, InFlight, RequiredHashes};
|
||||
use uv_types::{BuildIsolation, EmptyInstalledPackages, HashStrategy, InFlight};
|
||||
use uv_warnings::warn_user;
|
||||
|
||||
use crate::commands::reporters::{DownloadReporter, ResolverReporter};
|
||||
|
@ -199,8 +199,12 @@ pub(crate) async fn pip_compile(
|
|||
|python_version| Cow::Owned(python_version.markers(interpreter.markers())),
|
||||
);
|
||||
|
||||
// Don't enforce hashes during resolution.
|
||||
let hashes = RequiredHashes::default();
|
||||
// Generate, but don't enforce hashes for the requirements.
|
||||
let hasher = if generate_hashes {
|
||||
HashStrategy::Generate
|
||||
} else {
|
||||
HashStrategy::None
|
||||
};
|
||||
|
||||
// Incorporate any index locations from the provided sources.
|
||||
let index_locations =
|
||||
|
@ -233,7 +237,7 @@ pub(crate) async fn pip_compile(
|
|||
let flat_index = {
|
||||
let client = FlatIndexClient::new(&client, &cache);
|
||||
let entries = client.fetch(index_locations.flat_index()).await?;
|
||||
FlatIndex::from_entries(entries, &tags, &hashes, &no_build, &NoBinary::None)
|
||||
FlatIndex::from_entries(entries, &tags, &hasher, &no_build, &NoBinary::None)
|
||||
};
|
||||
|
||||
// Track in-flight downloads, builds, etc., across resolutions.
|
||||
|
@ -272,7 +276,7 @@ pub(crate) async fn pip_compile(
|
|||
// Convert from unnamed to named requirements.
|
||||
let mut requirements = NamedRequirementsResolver::new(
|
||||
requirements,
|
||||
false,
|
||||
&hasher,
|
||||
&build_dispatch,
|
||||
&client,
|
||||
&top_level_index,
|
||||
|
@ -287,7 +291,7 @@ pub(crate) async fn pip_compile(
|
|||
SourceTreeResolver::new(
|
||||
source_trees,
|
||||
&extras,
|
||||
false,
|
||||
&hasher,
|
||||
&build_dispatch,
|
||||
&client,
|
||||
&top_level_index,
|
||||
|
@ -312,7 +316,7 @@ pub(crate) async fn pip_compile(
|
|||
LocalEditable { url, path, extras }
|
||||
}));
|
||||
|
||||
let downloader = Downloader::new(&cache, &tags, &hashes, &client, &build_dispatch)
|
||||
let downloader = Downloader::new(&cache, &tags, &hasher, &client, &build_dispatch)
|
||||
.with_reporter(DownloadReporter::from(printer).with_length(editables.len() as u64));
|
||||
|
||||
// Build all editables.
|
||||
|
@ -360,7 +364,7 @@ pub(crate) async fn pip_compile(
|
|||
&constraints,
|
||||
&overrides,
|
||||
&editables,
|
||||
&hashes,
|
||||
&hasher,
|
||||
&build_dispatch,
|
||||
&client,
|
||||
&top_level_index,
|
||||
|
@ -399,7 +403,7 @@ pub(crate) async fn pip_compile(
|
|||
&client,
|
||||
&flat_index,
|
||||
&top_level_index,
|
||||
&hashes,
|
||||
&hasher,
|
||||
&build_dispatch,
|
||||
&EmptyInstalledPackages,
|
||||
)?
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
use std::fmt::Write;
|
||||
|
||||
use std::path::Path;
|
||||
|
||||
use anstream::eprint;
|
||||
|
@ -37,10 +38,10 @@ use uv_requirements::{
|
|||
RequirementsSpecification, SourceTreeResolver,
|
||||
};
|
||||
use uv_resolver::{
|
||||
DependencyMode, Exclusions, FlatIndex, HashCheckingMode, InMemoryIndex, Manifest, Options,
|
||||
OptionsBuilder, PreReleaseMode, Preference, ResolutionGraph, ResolutionMode, Resolver,
|
||||
DependencyMode, Exclusions, FlatIndex, InMemoryIndex, Manifest, Options, OptionsBuilder,
|
||||
PreReleaseMode, Preference, ResolutionGraph, ResolutionMode, Resolver,
|
||||
};
|
||||
use uv_types::{BuildIsolation, InFlight, RequiredHashes};
|
||||
use uv_types::{BuildIsolation, HashStrategy, InFlight};
|
||||
use uv_warnings::warn_user;
|
||||
|
||||
use crate::commands::reporters::{DownloadReporter, InstallReporter, ResolverReporter};
|
||||
|
@ -186,8 +187,8 @@ pub(crate) async fn pip_install(
|
|||
let markers = venv.interpreter().markers();
|
||||
|
||||
// Collect the set of required hashes.
|
||||
let hashes = if require_hashes {
|
||||
RequiredHashes::from_requirements(
|
||||
let hasher = if require_hashes {
|
||||
HashStrategy::from_requirements(
|
||||
entries
|
||||
.into_iter()
|
||||
.filter_map(|requirement| match requirement.requirement {
|
||||
|
@ -197,7 +198,7 @@ pub(crate) async fn pip_install(
|
|||
markers,
|
||||
)?
|
||||
} else {
|
||||
RequiredHashes::default()
|
||||
HashStrategy::None
|
||||
};
|
||||
|
||||
// Incorporate any index locations from the provided sources.
|
||||
|
@ -224,7 +225,7 @@ pub(crate) async fn pip_install(
|
|||
let flat_index = {
|
||||
let client = FlatIndexClient::new(&client, &cache);
|
||||
let entries = client.fetch(index_locations.flat_index()).await?;
|
||||
FlatIndex::from_entries(entries, tags, &hashes, &no_build, &no_binary)
|
||||
FlatIndex::from_entries(entries, tags, &hasher, &no_build, &no_binary)
|
||||
};
|
||||
|
||||
// Determine whether to enable build isolation.
|
||||
|
@ -266,7 +267,7 @@ pub(crate) async fn pip_install(
|
|||
// Convert from unnamed to named requirements.
|
||||
let mut requirements = NamedRequirementsResolver::new(
|
||||
requirements,
|
||||
require_hashes,
|
||||
&hasher,
|
||||
&resolve_dispatch,
|
||||
&client,
|
||||
&index,
|
||||
|
@ -281,7 +282,7 @@ pub(crate) async fn pip_install(
|
|||
SourceTreeResolver::new(
|
||||
source_trees,
|
||||
extras,
|
||||
require_hashes,
|
||||
&hasher,
|
||||
&resolve_dispatch,
|
||||
&client,
|
||||
&index,
|
||||
|
@ -306,7 +307,7 @@ pub(crate) async fn pip_install(
|
|||
build_editables(
|
||||
&editables,
|
||||
editable_wheel_dir.path(),
|
||||
&hashes,
|
||||
&hasher,
|
||||
&cache,
|
||||
&interpreter,
|
||||
tags,
|
||||
|
@ -321,11 +322,6 @@ pub(crate) async fn pip_install(
|
|||
.resolution_mode(resolution_mode)
|
||||
.prerelease_mode(prerelease_mode)
|
||||
.dependency_mode(dependency_mode)
|
||||
.hash_checking_mode(if require_hashes {
|
||||
HashCheckingMode::Enabled
|
||||
} else {
|
||||
HashCheckingMode::Disabled
|
||||
})
|
||||
.exclude_newer(exclude_newer)
|
||||
.build();
|
||||
|
||||
|
@ -336,7 +332,7 @@ pub(crate) async fn pip_install(
|
|||
overrides,
|
||||
project,
|
||||
&editables,
|
||||
&hashes,
|
||||
&hasher,
|
||||
&site_packages,
|
||||
&reinstall,
|
||||
&upgrade,
|
||||
|
@ -397,7 +393,7 @@ pub(crate) async fn pip_install(
|
|||
link_mode,
|
||||
compile,
|
||||
&index_locations,
|
||||
&hashes,
|
||||
&hasher,
|
||||
tags,
|
||||
&client,
|
||||
&in_flight,
|
||||
|
@ -473,7 +469,7 @@ async fn read_requirements(
|
|||
async fn build_editables(
|
||||
editables: &[EditableRequirement],
|
||||
editable_wheel_dir: &Path,
|
||||
hashes: &RequiredHashes,
|
||||
hasher: &HashStrategy,
|
||||
cache: &Cache,
|
||||
interpreter: &Interpreter,
|
||||
tags: &Tags,
|
||||
|
@ -483,7 +479,7 @@ async fn build_editables(
|
|||
) -> Result<Vec<BuiltEditable>, Error> {
|
||||
let start = std::time::Instant::now();
|
||||
|
||||
let downloader = Downloader::new(cache, tags, hashes, client, build_dispatch)
|
||||
let downloader = Downloader::new(cache, tags, hasher, client, build_dispatch)
|
||||
.with_reporter(DownloadReporter::from(printer).with_length(editables.len() as u64));
|
||||
|
||||
let editables = LocalEditables::from_editables(editables.iter().map(|editable| {
|
||||
|
@ -540,7 +536,7 @@ async fn resolve(
|
|||
overrides: Vec<Requirement>,
|
||||
project: Option<PackageName>,
|
||||
editables: &[BuiltEditable],
|
||||
hashes: &RequiredHashes,
|
||||
hasher: &HashStrategy,
|
||||
site_packages: &SitePackages<'_>,
|
||||
reinstall: &Reinstall,
|
||||
upgrade: &Upgrade,
|
||||
|
@ -587,7 +583,7 @@ async fn resolve(
|
|||
&constraints,
|
||||
&overrides,
|
||||
&editables,
|
||||
hashes,
|
||||
hasher,
|
||||
build_dispatch,
|
||||
client,
|
||||
index,
|
||||
|
@ -618,7 +614,7 @@ async fn resolve(
|
|||
client,
|
||||
flat_index,
|
||||
index,
|
||||
hashes,
|
||||
hasher,
|
||||
build_dispatch,
|
||||
site_packages,
|
||||
)?
|
||||
|
@ -662,7 +658,7 @@ async fn install(
|
|||
link_mode: LinkMode,
|
||||
compile: bool,
|
||||
index_urls: &IndexLocations,
|
||||
hashes: &RequiredHashes,
|
||||
hasher: &HashStrategy,
|
||||
tags: &Tags,
|
||||
client: &RegistryClient,
|
||||
in_flight: &InFlight,
|
||||
|
@ -690,7 +686,7 @@ async fn install(
|
|||
site_packages,
|
||||
reinstall,
|
||||
no_binary,
|
||||
hashes,
|
||||
hasher,
|
||||
index_urls,
|
||||
cache,
|
||||
venv,
|
||||
|
@ -743,7 +739,7 @@ async fn install(
|
|||
} else {
|
||||
let start = std::time::Instant::now();
|
||||
|
||||
let downloader = Downloader::new(cache, tags, hashes, client, build_dispatch)
|
||||
let downloader = Downloader::new(cache, tags, hasher, client, build_dispatch)
|
||||
.with_reporter(DownloadReporter::from(printer).with_length(remote.len() as u64));
|
||||
|
||||
let wheels = downloader
|
||||
|
@ -1060,7 +1056,7 @@ enum Error {
|
|||
Platform(#[from] platform_tags::PlatformError),
|
||||
|
||||
#[error(transparent)]
|
||||
RequiredHashes(#[from] uv_types::RequiredHashesError),
|
||||
Hash(#[from] uv_types::HashStrategyError),
|
||||
|
||||
#[error(transparent)]
|
||||
Io(#[from] std::io::Error),
|
||||
|
|
|
@ -30,10 +30,8 @@ use uv_requirements::{
|
|||
ExtrasSpecification, NamedRequirementsResolver, RequirementsSource, RequirementsSpecification,
|
||||
SourceTreeResolver,
|
||||
};
|
||||
use uv_resolver::{
|
||||
DependencyMode, FlatIndex, HashCheckingMode, InMemoryIndex, Manifest, OptionsBuilder, Resolver,
|
||||
};
|
||||
use uv_types::{BuildIsolation, EmptyInstalledPackages, InFlight, RequiredHashes};
|
||||
use uv_resolver::{DependencyMode, FlatIndex, InMemoryIndex, Manifest, OptionsBuilder, Resolver};
|
||||
use uv_types::{BuildIsolation, EmptyInstalledPackages, HashStrategy, InFlight};
|
||||
use uv_warnings::warn_user;
|
||||
|
||||
use crate::commands::reporters::{DownloadReporter, InstallReporter, ResolverReporter};
|
||||
|
@ -138,8 +136,8 @@ pub(crate) async fn pip_sync(
|
|||
let markers = venv.interpreter().markers();
|
||||
|
||||
// Collect the set of required hashes.
|
||||
let hashes = if require_hashes {
|
||||
RequiredHashes::from_requirements(
|
||||
let hasher = if require_hashes {
|
||||
HashStrategy::from_requirements(
|
||||
entries
|
||||
.into_iter()
|
||||
.filter_map(|requirement| match requirement.requirement {
|
||||
|
@ -149,7 +147,7 @@ pub(crate) async fn pip_sync(
|
|||
markers,
|
||||
)?
|
||||
} else {
|
||||
RequiredHashes::default()
|
||||
HashStrategy::None
|
||||
};
|
||||
|
||||
// Incorporate any index locations from the provided sources.
|
||||
|
@ -176,7 +174,7 @@ pub(crate) async fn pip_sync(
|
|||
let flat_index = {
|
||||
let client = FlatIndexClient::new(&client, &cache);
|
||||
let entries = client.fetch(index_locations.flat_index()).await?;
|
||||
FlatIndex::from_entries(entries, tags, &hashes, &no_build, &no_binary)
|
||||
FlatIndex::from_entries(entries, tags, &hasher, &no_build, &no_binary)
|
||||
};
|
||||
|
||||
// Create a shared in-memory index.
|
||||
|
@ -218,16 +216,11 @@ pub(crate) async fn pip_sync(
|
|||
// Convert from unnamed to named requirements.
|
||||
let requirements = {
|
||||
// Convert from unnamed to named requirements.
|
||||
let mut requirements = NamedRequirementsResolver::new(
|
||||
requirements,
|
||||
require_hashes,
|
||||
&build_dispatch,
|
||||
&client,
|
||||
&index,
|
||||
)
|
||||
.with_reporter(ResolverReporter::from(printer))
|
||||
.resolve()
|
||||
.await?;
|
||||
let mut requirements =
|
||||
NamedRequirementsResolver::new(requirements, &hasher, &build_dispatch, &client, &index)
|
||||
.with_reporter(ResolverReporter::from(printer))
|
||||
.resolve()
|
||||
.await?;
|
||||
|
||||
// Resolve any source trees into requirements.
|
||||
if !source_trees.is_empty() {
|
||||
|
@ -235,7 +228,7 @@ pub(crate) async fn pip_sync(
|
|||
SourceTreeResolver::new(
|
||||
source_trees,
|
||||
&ExtrasSpecification::None,
|
||||
require_hashes,
|
||||
&hasher,
|
||||
&build_dispatch,
|
||||
&client,
|
||||
&index,
|
||||
|
@ -254,7 +247,7 @@ pub(crate) async fn pip_sync(
|
|||
editables,
|
||||
&site_packages,
|
||||
reinstall,
|
||||
&hashes,
|
||||
&hasher,
|
||||
venv.interpreter(),
|
||||
tags,
|
||||
&cache,
|
||||
|
@ -278,7 +271,7 @@ pub(crate) async fn pip_sync(
|
|||
site_packages,
|
||||
reinstall,
|
||||
&no_binary,
|
||||
&hashes,
|
||||
&hasher,
|
||||
&index_locations,
|
||||
&cache,
|
||||
&venv,
|
||||
|
@ -317,11 +310,6 @@ pub(crate) async fn pip_sync(
|
|||
// Resolve with `--no-deps`.
|
||||
let options = OptionsBuilder::new()
|
||||
.dependency_mode(DependencyMode::Direct)
|
||||
.hash_checking_mode(if require_hashes {
|
||||
HashCheckingMode::Enabled
|
||||
} else {
|
||||
HashCheckingMode::Disabled
|
||||
})
|
||||
.build();
|
||||
|
||||
// Create a bound on the progress bar, since we know the number of packages upfront.
|
||||
|
@ -337,7 +325,7 @@ pub(crate) async fn pip_sync(
|
|||
&client,
|
||||
&flat_index,
|
||||
&index,
|
||||
&hashes,
|
||||
&hasher,
|
||||
&build_dispatch,
|
||||
// TODO(zanieb): We should consider support for installed packages in pip sync
|
||||
&EmptyInstalledPackages,
|
||||
|
@ -381,7 +369,7 @@ pub(crate) async fn pip_sync(
|
|||
} else {
|
||||
let start = std::time::Instant::now();
|
||||
|
||||
let downloader = Downloader::new(&cache, tags, &hashes, &client, &build_dispatch)
|
||||
let downloader = Downloader::new(&cache, tags, &hasher, &client, &build_dispatch)
|
||||
.with_reporter(DownloadReporter::from(printer).with_length(remote.len() as u64));
|
||||
|
||||
let wheels = downloader
|
||||
|
@ -577,7 +565,7 @@ async fn resolve_editables(
|
|||
editables: Vec<EditableRequirement>,
|
||||
site_packages: &SitePackages<'_>,
|
||||
reinstall: &Reinstall,
|
||||
hashes: &RequiredHashes,
|
||||
hasher: &HashStrategy,
|
||||
interpreter: &Interpreter,
|
||||
tags: &Tags,
|
||||
cache: &Cache,
|
||||
|
@ -644,7 +632,7 @@ async fn resolve_editables(
|
|||
} else {
|
||||
let start = std::time::Instant::now();
|
||||
|
||||
let downloader = Downloader::new(cache, tags, hashes, client, build_dispatch)
|
||||
let downloader = Downloader::new(cache, tags, hasher, client, build_dispatch)
|
||||
.with_reporter(DownloadReporter::from(printer).with_length(uninstalled.len() as u64));
|
||||
|
||||
let editables = LocalEditables::from_editables(uninstalled.iter().map(|editable| {
|
||||
|
|
|
@ -21,7 +21,7 @@ use uv_dispatch::BuildDispatch;
|
|||
use uv_fs::Simplified;
|
||||
use uv_interpreter::{find_default_python, find_requested_python, Error};
|
||||
use uv_resolver::{FlatIndex, InMemoryIndex, OptionsBuilder};
|
||||
use uv_types::{BuildContext, BuildIsolation, InFlight, RequiredHashes};
|
||||
use uv_types::{BuildContext, BuildIsolation, HashStrategy, InFlight};
|
||||
|
||||
use crate::commands::ExitStatus;
|
||||
use crate::printer::Printer;
|
||||
|
@ -170,7 +170,7 @@ async fn venv_impl(
|
|||
FlatIndex::from_entries(
|
||||
entries,
|
||||
tags,
|
||||
&RequiredHashes::default(),
|
||||
&HashStrategy::None,
|
||||
&NoBuild::All,
|
||||
&NoBinary::None,
|
||||
)
|
||||
|
|
|
@ -3447,33 +3447,14 @@ fn compile_legacy_sdist_setuptools() -> Result<()> {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
/// Include hashes in the generated output.
|
||||
/// Include hashes from the registry in the generated output.
|
||||
#[test]
|
||||
fn generate_hashes() -> Result<()> {
|
||||
fn generate_hashes_registry() -> Result<()> {
|
||||
let context = TestContext::new("3.12");
|
||||
let requirements_in = context.temp_dir.child("requirements.in");
|
||||
requirements_in.write_str("flask==3.0.0")?;
|
||||
requirements_in.write_str("anyio==4.0.0")?;
|
||||
|
||||
let colorama_locked = regex::escape(indoc! {r"
|
||||
colorama==0.4.6 \
|
||||
--hash=sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44 \
|
||||
--hash=sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6
|
||||
# via click
|
||||
"});
|
||||
let filters: Vec<_> = if cfg!(windows) {
|
||||
// Remove colorama
|
||||
vec![
|
||||
(colorama_locked.as_str(), ""),
|
||||
("Resolved 8 packages", "Resolved 7 packages"),
|
||||
]
|
||||
} else {
|
||||
vec![]
|
||||
}
|
||||
.into_iter()
|
||||
.chain(context.filters())
|
||||
.collect();
|
||||
|
||||
uv_snapshot!(filters, context.compile()
|
||||
uv_snapshot!(context.compile()
|
||||
.arg("requirements.in")
|
||||
.arg("--generate-hashes"), @r###"
|
||||
success: true
|
||||
|
@ -3481,102 +3462,170 @@ fn generate_hashes() -> Result<()> {
|
|||
----- stdout -----
|
||||
# This file was autogenerated by uv via the following command:
|
||||
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in --generate-hashes
|
||||
blinker==1.7.0 \
|
||||
--hash=sha256:c3f865d4d54db7abc53758a01601cf343fe55b84c1de4e3fa910e420b438d5b9 \
|
||||
--hash=sha256:e6820ff6fa4e4d1d8e2747c2283749c3f547e4fee112b98555cdcdae32996182
|
||||
# via flask
|
||||
click==8.1.7 \
|
||||
--hash=sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28 \
|
||||
--hash=sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de
|
||||
# via flask
|
||||
flask==3.0.0 \
|
||||
--hash=sha256:21128f47e4e3b9d597a3e8521a329bf56909b690fcc3fa3e477725aa81367638 \
|
||||
--hash=sha256:cfadcdb638b609361d29ec22360d6070a77d7463dcb3ab08d2c2f2f168845f58
|
||||
itsdangerous==2.1.2 \
|
||||
--hash=sha256:2c2349112351b88699d8d4b6b075022c0808887cb7ad10069318a8b0bc88db44 \
|
||||
--hash=sha256:5dbbc68b317e5e42f327f9021763545dc3fc3bfe22e6deb96aaf1fc38874156a
|
||||
# via flask
|
||||
jinja2==3.1.3 \
|
||||
--hash=sha256:7d6d50dd97d52cbc355597bd845fabfbac3f551e1f99619e39a35ce8c370b5fa \
|
||||
--hash=sha256:ac8bd6544d4bb2c9792bf3a159e80bba8fda7f07e81bc3aed565432d5925ba90
|
||||
# via flask
|
||||
markupsafe==2.1.5 \
|
||||
--hash=sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf \
|
||||
--hash=sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff \
|
||||
--hash=sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f \
|
||||
--hash=sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3 \
|
||||
--hash=sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532 \
|
||||
--hash=sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f \
|
||||
--hash=sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617 \
|
||||
--hash=sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df \
|
||||
--hash=sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4 \
|
||||
--hash=sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906 \
|
||||
--hash=sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f \
|
||||
--hash=sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4 \
|
||||
--hash=sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8 \
|
||||
--hash=sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371 \
|
||||
--hash=sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2 \
|
||||
--hash=sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465 \
|
||||
--hash=sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52 \
|
||||
--hash=sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6 \
|
||||
--hash=sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169 \
|
||||
--hash=sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad \
|
||||
--hash=sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2 \
|
||||
--hash=sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0 \
|
||||
--hash=sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029 \
|
||||
--hash=sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f \
|
||||
--hash=sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a \
|
||||
--hash=sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced \
|
||||
--hash=sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5 \
|
||||
--hash=sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c \
|
||||
--hash=sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf \
|
||||
--hash=sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9 \
|
||||
--hash=sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb \
|
||||
--hash=sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad \
|
||||
--hash=sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3 \
|
||||
--hash=sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1 \
|
||||
--hash=sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46 \
|
||||
--hash=sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc \
|
||||
--hash=sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a \
|
||||
--hash=sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee \
|
||||
--hash=sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900 \
|
||||
--hash=sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5 \
|
||||
--hash=sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea \
|
||||
--hash=sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f \
|
||||
--hash=sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5 \
|
||||
--hash=sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e \
|
||||
--hash=sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a \
|
||||
--hash=sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f \
|
||||
--hash=sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50 \
|
||||
--hash=sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a \
|
||||
--hash=sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b \
|
||||
--hash=sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4 \
|
||||
--hash=sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff \
|
||||
--hash=sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2 \
|
||||
--hash=sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46 \
|
||||
--hash=sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b \
|
||||
--hash=sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf \
|
||||
--hash=sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5 \
|
||||
--hash=sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5 \
|
||||
--hash=sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab \
|
||||
--hash=sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd \
|
||||
--hash=sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68
|
||||
# via
|
||||
# jinja2
|
||||
# werkzeug
|
||||
werkzeug==3.0.1 \
|
||||
--hash=sha256:507e811ecea72b18a404947aded4b3390e1db8f826b494d76550ef45bb3b1dcc \
|
||||
--hash=sha256:90a285dc0e42ad56b34e696398b8122ee4c681833fb35b8334a095d82c56da10
|
||||
# via flask
|
||||
anyio==4.0.0 \
|
||||
--hash=sha256:cfdb2b588b9fc25ede96d8db56ed50848b0b649dca3dd1df0b11f683bb9e0b5f \
|
||||
--hash=sha256:f7ed51751b2c2add651e5747c891b47e26d2a21be5d32d9311dfe9692f3e5d7a
|
||||
idna==3.6 \
|
||||
--hash=sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca \
|
||||
--hash=sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f
|
||||
# via anyio
|
||||
sniffio==1.3.1 \
|
||||
--hash=sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2 \
|
||||
--hash=sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc
|
||||
# via anyio
|
||||
|
||||
----- stderr -----
|
||||
Resolved 7 packages in [TIME]
|
||||
Resolved 3 packages in [TIME]
|
||||
"###
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Include hashes from the URL in the generated output.
|
||||
#[test]
|
||||
fn generate_hashes_source_distribution_url() -> Result<()> {
|
||||
let context = TestContext::new("3.12");
|
||||
let requirements_in = context.temp_dir.child("requirements.in");
|
||||
requirements_in.write_str("anyio @ https://files.pythonhosted.org/packages/2d/b8/7333d87d5f03247215d86a86362fd3e324111788c6cdd8d2e6196a6ba833/anyio-4.2.0.tar.gz")?;
|
||||
|
||||
uv_snapshot!(context.compile()
|
||||
.arg("requirements.in")
|
||||
.arg("--generate-hashes"), @r###"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
# This file was autogenerated by uv via the following command:
|
||||
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in --generate-hashes
|
||||
anyio @ https://files.pythonhosted.org/packages/2d/b8/7333d87d5f03247215d86a86362fd3e324111788c6cdd8d2e6196a6ba833/anyio-4.2.0.tar.gz \
|
||||
--hash=sha256:e1875bb4b4e2de1669f4bc7869b6d3f54231cdced71605e6e64c9be77e3be50f
|
||||
idna==3.6 \
|
||||
--hash=sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca \
|
||||
--hash=sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f
|
||||
# via anyio
|
||||
sniffio==1.3.1 \
|
||||
--hash=sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2 \
|
||||
--hash=sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc
|
||||
# via anyio
|
||||
|
||||
----- stderr -----
|
||||
Resolved 3 packages in [TIME]
|
||||
"###
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Include hashes from the URL in the generated output.
|
||||
#[test]
|
||||
fn generate_hashes_built_distribution_url() -> Result<()> {
|
||||
let context = TestContext::new("3.12");
|
||||
let requirements_in = context.temp_dir.child("requirements.in");
|
||||
requirements_in.write_str("anyio @ https://files.pythonhosted.org/packages/14/fd/2f20c40b45e4fb4324834aea24bd4afdf1143390242c0b33774da0e2e34f/anyio-4.3.0-py3-none-any.whl")?;
|
||||
|
||||
uv_snapshot!(context.compile()
|
||||
.arg("requirements.in")
|
||||
.arg("--generate-hashes"), @r###"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
# This file was autogenerated by uv via the following command:
|
||||
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in --generate-hashes
|
||||
anyio @ https://files.pythonhosted.org/packages/14/fd/2f20c40b45e4fb4324834aea24bd4afdf1143390242c0b33774da0e2e34f/anyio-4.3.0-py3-none-any.whl \
|
||||
--hash=sha256:048e05d0f6caeed70d731f3db756d35dcc1f35747c8c403364a8332c630441b8
|
||||
idna==3.6 \
|
||||
--hash=sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca \
|
||||
--hash=sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f
|
||||
# via anyio
|
||||
sniffio==1.3.1 \
|
||||
--hash=sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2 \
|
||||
--hash=sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc
|
||||
# via anyio
|
||||
|
||||
----- stderr -----
|
||||
Resolved 3 packages in [TIME]
|
||||
"###
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Given an unnamed URL, include hashes for the URL and its dependencies.
|
||||
#[test]
|
||||
fn generate_hashes_unnamed_url() -> Result<()> {
|
||||
let context = TestContext::new("3.12");
|
||||
let requirements_in = context.temp_dir.child("requirements.in");
|
||||
requirements_in.write_str("https://files.pythonhosted.org/packages/14/fd/2f20c40b45e4fb4324834aea24bd4afdf1143390242c0b33774da0e2e34f/anyio-4.3.0-py3-none-any.whl")?;
|
||||
|
||||
uv_snapshot!(context.compile()
|
||||
.arg("requirements.in")
|
||||
.arg("--generate-hashes"), @r###"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
# This file was autogenerated by uv via the following command:
|
||||
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in --generate-hashes
|
||||
anyio @ https://files.pythonhosted.org/packages/14/fd/2f20c40b45e4fb4324834aea24bd4afdf1143390242c0b33774da0e2e34f/anyio-4.3.0-py3-none-any.whl \
|
||||
--hash=sha256:048e05d0f6caeed70d731f3db756d35dcc1f35747c8c403364a8332c630441b8
|
||||
idna==3.6 \
|
||||
--hash=sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca \
|
||||
--hash=sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f
|
||||
# via anyio
|
||||
sniffio==1.3.1 \
|
||||
--hash=sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2 \
|
||||
--hash=sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc
|
||||
# via anyio
|
||||
|
||||
----- stderr -----
|
||||
Resolved 3 packages in [TIME]
|
||||
"###
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Given a local directory, include hashes for its dependencies, but not the directory itself.
|
||||
#[test]
|
||||
fn generate_hashes_local_directory() -> Result<()> {
|
||||
let _context = TestContext::new("3.12");
|
||||
|
||||
let context = TestContext::new("3.12");
|
||||
let requirements_in = context.temp_dir.child("requirements.in");
|
||||
requirements_in.write_str(indoc! {r"
|
||||
../../scripts/packages/poetry_editable
|
||||
"
|
||||
})?;
|
||||
|
||||
uv_snapshot!(context.filters(), context.compile()
|
||||
.arg(requirements_in.path())
|
||||
.arg("--generate-hashes")
|
||||
.current_dir(current_dir()?), @r###"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
# This file was autogenerated by uv via the following command:
|
||||
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z [TEMP_DIR]/requirements.in --generate-hashes
|
||||
anyio==4.3.0 \
|
||||
--hash=sha256:048e05d0f6caeed70d731f3db756d35dcc1f35747c8c403364a8332c630441b8 \
|
||||
--hash=sha256:f75253795a87df48568485fd18cdd2a3fa5c4f7c5be8e5e36637733fce06fed6
|
||||
# via poetry-editable
|
||||
idna==3.6 \
|
||||
--hash=sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca \
|
||||
--hash=sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f
|
||||
# via anyio
|
||||
poetry-editable @ ../../scripts/packages/poetry_editable
|
||||
sniffio==1.3.1 \
|
||||
--hash=sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2 \
|
||||
--hash=sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc
|
||||
# via anyio
|
||||
|
||||
----- stderr -----
|
||||
Resolved 4 packages in [TIME]
|
||||
"###);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Compile using `--find-links` with a local directory.
|
||||
#[test]
|
||||
fn find_links_directory() -> Result<()> {
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue