Implement a --verify-hashes hash-checking mode (#4007)

## Summary

This is an alternative to `--require-hashes` which will validate a hash
if it's present, but ignore requirements that omit hashes or are absent
from the lockfile entirely.

So, e.g., transitive dependencies that are missing will _not_ error; nor
will dependencies that are included but lack a hash.

Closes https://github.com/astral-sh/uv/issues/3305.
This commit is contained in:
Charlie Marsh 2024-07-17 17:25:31 -04:00 committed by GitHub
parent ba4e2e3d2a
commit 82d94838cb
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
14 changed files with 482 additions and 85 deletions

View file

@ -803,13 +803,33 @@ pub struct PipSyncArgs {
/// - Editable installs are not supported.
/// - Local dependencies are not supported, unless they point to a specific wheel (`.whl`) or
/// source archive (`.zip`, `.tar.gz`), as opposed to a directory.
#[arg(long, env = "UV_REQUIRE_HASHES",
value_parser = clap::builder::BoolishValueParser::new(), overrides_with("no_require_hashes"))]
#[arg(
long,
env = "UV_REQUIRE_HASHES",
value_parser = clap::builder::BoolishValueParser::new(),
overrides_with("no_require_hashes"),
)]
pub require_hashes: bool,
#[arg(long, overrides_with("require_hashes"), hide = true)]
pub no_require_hashes: bool,
/// Validate any hashes provided in the requirements file.
///
/// Unlike `--require-hashes`, `--verify-hashes` does not require that all requirements have
/// hashes; instead, it will limit itself to verifying the hashes of those requirements that do
/// include them.
#[arg(
long,
env = "UV_VERIFY_HASHES",
value_parser = clap::builder::BoolishValueParser::new(),
overrides_with("no_verify_hashes"),
)]
pub verify_hashes: bool,
#[arg(long, overrides_with("verify_hashes"), hide = true)]
pub no_verify_hashes: bool,
/// The Python interpreter into which packages should be installed.
///
/// By default, uv installs into the virtual environment in the current working directory or
@ -1084,6 +1104,22 @@ pub struct PipInstallArgs {
#[arg(long, overrides_with("require_hashes"), hide = true)]
pub no_require_hashes: bool,
/// Validate any hashes provided in the requirements file.
///
/// Unlike `--require-hashes`, `--verify-hashes` does not require that all requirements have
/// hashes; instead, it will limit itself to verifying the hashes of those requirements that do
/// include them.
#[arg(
long,
env = "UV_VERIFY_HASHES",
value_parser = clap::builder::BoolishValueParser::new(),
overrides_with("no_verify_hashes"),
)]
pub verify_hashes: bool,
#[arg(long, overrides_with("verify_hashes"), hide = true)]
pub no_verify_hashes: bool,
/// The Python interpreter into which packages should be installed.
///
/// By default, uv installs into the virtual environment in the current working directory or

View file

@ -0,0 +1,35 @@
#[derive(Debug, Copy, Clone)]
pub enum HashCheckingMode {
/// Hashes should be validated against a pre-defined list of hashes. Every requirement must
/// itself be hashable (e.g., Git dependencies are forbidden) _and_ have a hash in the lockfile.
Require,
/// Hashes should be validated, if present, but ignored if absent.
Verify,
}
impl HashCheckingMode {
/// Return the [`HashCheckingMode`] from the command-line arguments, if any.
pub fn from_args(require_hashes: bool, verify_hashes: bool) -> Option<Self> {
if require_hashes {
Some(Self::Require)
} else if verify_hashes {
Some(Self::Verify)
} else {
None
}
}
/// Returns `true` if the hash checking mode is `Require`.
pub fn is_require(&self) -> bool {
matches!(self, Self::Require)
}
}
impl std::fmt::Display for HashCheckingMode {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Self::Require => write!(f, "--require-hashes"),
Self::Verify => write!(f, "--verify-hashes"),
}
}
}

View file

@ -4,6 +4,7 @@ pub use concurrency::*;
pub use config_settings::*;
pub use constraints::*;
pub use extras::*;
pub use hash::*;
pub use name_specifiers::*;
pub use overrides::*;
pub use package_options::*;
@ -16,6 +17,7 @@ mod concurrency;
mod config_settings;
mod constraints;
mod extras;
mod hash;
mod name_specifiers;
mod overrides;
mod package_options;

View file

@ -182,7 +182,8 @@ impl<'a, Context: BuildContext> SourceTreeResolver<'a, Context> {
let hashes = match self.hasher {
HashStrategy::None => HashPolicy::None,
HashStrategy::Generate => HashPolicy::Generate,
HashStrategy::Validate { .. } => {
HashStrategy::Verify(_) => HashPolicy::Generate,
HashStrategy::Require(_) => {
return Err(anyhow::anyhow!(
"Hash-checking is not supported for local directories: {}",
path.user_display()

View file

@ -1018,6 +1018,19 @@ pub struct PipOptions {
"#
)]
pub require_hashes: Option<bool>,
/// Validate any hashes provided in the requirements file.
///
/// Unlike `--require-hashes`, `--verify-hashes` does not require that all requirements have
/// hashes; instead, it will limit itself to verifying the hashes of those requirements that do
/// include them.
#[option(
default = "false",
value_type = "bool",
example = r#"
verify-hashes = true
"#
)]
pub verify_hashes: Option<bool>,
/// Allow package upgrades, ignoring pinned versions in any existing output file.
#[option(
default = "false",

View file

@ -6,6 +6,7 @@ use url::Url;
use distribution_types::{DistributionMetadata, HashPolicy, PackageId, UnresolvedRequirement};
use pep508_rs::MarkerEnvironment;
use pypi_types::{HashDigest, HashError, Requirement, RequirementSource};
use uv_configuration::HashCheckingMode;
use uv_normalize::PackageName;
#[derive(Debug, Default, Clone)]
@ -15,9 +16,14 @@ pub enum HashStrategy {
None,
/// Hashes should be generated (specifically, a SHA-256 hash), but not validated.
Generate,
/// Hashes should be validated against a pre-defined list of hashes. If necessary, hashes should
/// be generated so as to ensure that the archive is valid.
Validate(FxHashMap<PackageId, Vec<HashDigest>>),
/// Hashes should be validated, if present, but ignored if absent.
///
/// If necessary, hashes should be generated to ensure that the archive is valid.
Verify(FxHashMap<PackageId, Vec<HashDigest>>),
/// Hashes should be validated against a pre-defined list of hashes.
///
/// If necessary, hashes should be generated to ensure that the archive is valid.
Require(FxHashMap<PackageId, Vec<HashDigest>>),
}
impl HashStrategy {
@ -26,7 +32,14 @@ impl HashStrategy {
match self {
Self::None => HashPolicy::None,
Self::Generate => HashPolicy::Generate,
Self::Validate(hashes) => HashPolicy::Validate(
Self::Verify(hashes) => {
if let Some(hashes) = hashes.get(&distribution.package_id()) {
HashPolicy::Validate(hashes.as_slice())
} else {
HashPolicy::None
}
}
Self::Require(hashes) => HashPolicy::Validate(
hashes
.get(&distribution.package_id())
.map(Vec::as_slice)
@ -40,7 +53,14 @@ impl HashStrategy {
match self {
Self::None => HashPolicy::None,
Self::Generate => HashPolicy::Generate,
Self::Validate(hashes) => HashPolicy::Validate(
Self::Verify(hashes) => {
if let Some(hashes) = hashes.get(&PackageId::from_registry(name.clone())) {
HashPolicy::Validate(hashes.as_slice())
} else {
HashPolicy::None
}
}
Self::Require(hashes) => HashPolicy::Validate(
hashes
.get(&PackageId::from_registry(name.clone()))
.map(Vec::as_slice)
@ -54,7 +74,14 @@ impl HashStrategy {
match self {
Self::None => HashPolicy::None,
Self::Generate => HashPolicy::Generate,
Self::Validate(hashes) => HashPolicy::Validate(
Self::Verify(hashes) => {
if let Some(hashes) = hashes.get(&PackageId::from_url(url)) {
HashPolicy::Validate(hashes.as_slice())
} else {
HashPolicy::None
}
}
Self::Require(hashes) => HashPolicy::Validate(
hashes
.get(&PackageId::from_url(url))
.map(Vec::as_slice)
@ -68,7 +95,8 @@ impl HashStrategy {
match self {
Self::None => true,
Self::Generate => true,
Self::Validate(hashes) => hashes.contains_key(&PackageId::from_registry(name.clone())),
Self::Verify(_) => true,
Self::Require(hashes) => hashes.contains_key(&PackageId::from_registry(name.clone())),
}
}
@ -77,7 +105,8 @@ impl HashStrategy {
match self {
Self::None => true,
Self::Generate => true,
Self::Validate(hashes) => hashes.contains_key(&PackageId::from_url(url)),
Self::Verify(_) => true,
Self::Require(hashes) => hashes.contains_key(&PackageId::from_url(url)),
}
}
@ -90,6 +119,7 @@ impl HashStrategy {
pub fn from_requirements<'a>(
requirements: impl Iterator<Item = (&'a UnresolvedRequirement, &'a [String])>,
markers: Option<&MarkerEnvironment>,
mode: HashCheckingMode,
) -> Result<Self, HashStrategyError> {
let mut hashes = FxHashMap::<PackageId, Vec<HashDigest>>::default();
@ -103,7 +133,9 @@ impl HashStrategy {
// Every requirement must be either a pinned version or a direct URL.
let id = match &requirement {
UnresolvedRequirement::Named(requirement) => {
uv_requirement_to_package_id(requirement)?
Self::pin(requirement).ok_or_else(|| {
HashStrategyError::UnpinnedRequirement(requirement.to_string(), mode)
})?
}
UnresolvedRequirement::Unnamed(requirement) => {
// Direct URLs are always allowed.
@ -111,9 +143,15 @@ impl HashStrategy {
}
};
// Every requirement must include a hash.
if digests.is_empty() {
return Err(HashStrategyError::MissingHashes(requirement.to_string()));
// Under `--require-hashes`, every requirement must include a hash.
if mode.is_require() {
return Err(HashStrategyError::MissingHashes(
requirement.to_string(),
mode,
));
}
continue;
}
// Parse the hashes.
@ -125,42 +163,44 @@ impl HashStrategy {
hashes.insert(id, digests);
}
Ok(Self::Validate(hashes))
}
}
fn uv_requirement_to_package_id(requirement: &Requirement) -> Result<PackageId, HashStrategyError> {
Ok(match &requirement.source {
RequirementSource::Registry { specifier, .. } => {
// Must be a single specifier.
let [specifier] = specifier.as_ref() else {
return Err(HashStrategyError::UnpinnedRequirement(
requirement.to_string(),
));
};
// Must be pinned to a specific version.
if *specifier.operator() != pep440_rs::Operator::Equal {
return Err(HashStrategyError::UnpinnedRequirement(
requirement.to_string(),
));
}
PackageId::from_registry(requirement.name.clone())
match mode {
HashCheckingMode::Verify => Ok(Self::Verify(hashes)),
HashCheckingMode::Require => Ok(Self::Require(hashes)),
}
RequirementSource::Url { url, .. }
| RequirementSource::Git { url, .. }
| RequirementSource::Path { url, .. }
| RequirementSource::Directory { url, .. } => PackageId::from_url(url),
})
}
/// Pin a [`Requirement`] to a [`PackageId`], if possible.
fn pin(requirement: &Requirement) -> Option<PackageId> {
match &requirement.source {
RequirementSource::Registry { specifier, .. } => {
// Must be a single specifier.
let [specifier] = specifier.as_ref() else {
return None;
};
// Must be pinned to a specific version.
if *specifier.operator() != pep440_rs::Operator::Equal {
return None;
}
Some(PackageId::from_registry(requirement.name.clone()))
}
RequirementSource::Url { url, .. }
| RequirementSource::Git { url, .. }
| RequirementSource::Path { url, .. }
| RequirementSource::Directory { url, .. } => Some(PackageId::from_url(url)),
}
}
}
#[derive(thiserror::Error, Debug)]
pub enum HashStrategyError {
#[error(transparent)]
Hash(#[from] HashError),
#[error("In `--require-hashes` mode, all requirement must have their versions pinned with `==`, but found: {0}")]
UnpinnedRequirement(String),
#[error("In `--require-hashes` mode, all requirement must have a hash, but none were provided for: {0}")]
MissingHashes(String),
#[error(
"In `{1}` mode, all requirement must have their versions pinned with `==`, but found: {0}"
)]
UnpinnedRequirement(String, HashCheckingMode),
#[error("In `{1}` mode, all requirement must have a hash, but none were provided for: {0}")]
MissingHashes(String, HashCheckingMode),
}

View file

@ -12,8 +12,8 @@ use uv_auth::store_credentials_from_url;
use uv_cache::Cache;
use uv_client::{BaseClientBuilder, Connectivity, FlatIndexClient, RegistryClientBuilder};
use uv_configuration::{
BuildOptions, Concurrency, ConfigSettings, ExtrasSpecification, IndexStrategy, PreviewMode,
Reinstall, SetupPyStrategy, Upgrade,
BuildOptions, Concurrency, ConfigSettings, ExtrasSpecification, HashCheckingMode,
IndexStrategy, PreviewMode, Reinstall, SetupPyStrategy, Upgrade,
};
use uv_configuration::{KeyringProviderType, TargetTriple};
use uv_dispatch::BuildDispatch;
@ -52,7 +52,7 @@ pub(crate) async fn pip_install(
reinstall: Reinstall,
link_mode: LinkMode,
compile: bool,
require_hashes: bool,
hash_checking: Option<HashCheckingMode>,
setup_py: SetupPyStrategy,
connectivity: Connectivity,
config_settings: &ConfigSettings,
@ -226,13 +226,14 @@ pub(crate) async fn pip_install(
let (tags, markers) = resolution_environment(python_version, python_platform, interpreter)?;
// Collect the set of required hashes.
let hasher = if require_hashes {
let hasher = if let Some(hash_checking) = hash_checking {
HashStrategy::from_requirements(
requirements
.iter()
.chain(overrides.iter())
.map(|entry| (&entry.requirement, entry.hashes.as_slice())),
Some(&markers),
hash_checking,
)?
} else {
HashStrategy::None

View file

@ -11,8 +11,8 @@ use uv_auth::store_credentials_from_url;
use uv_cache::Cache;
use uv_client::{BaseClientBuilder, Connectivity, FlatIndexClient, RegistryClientBuilder};
use uv_configuration::{
BuildOptions, Concurrency, ConfigSettings, ExtrasSpecification, IndexStrategy, PreviewMode,
Reinstall, SetupPyStrategy, Upgrade,
BuildOptions, Concurrency, ConfigSettings, ExtrasSpecification, HashCheckingMode,
IndexStrategy, PreviewMode, Reinstall, SetupPyStrategy, Upgrade,
};
use uv_configuration::{KeyringProviderType, TargetTriple};
use uv_dispatch::BuildDispatch;
@ -41,7 +41,7 @@ pub(crate) async fn pip_sync(
reinstall: Reinstall,
link_mode: LinkMode,
compile: bool,
require_hashes: bool,
hash_checking: Option<HashCheckingMode>,
index_locations: IndexLocations,
index_strategy: IndexStrategy,
keyring_provider: KeyringProviderType,
@ -180,12 +180,13 @@ pub(crate) async fn pip_sync(
let (tags, markers) = resolution_environment(python_version, python_platform, interpreter)?;
// Collect the set of required hashes.
let hasher = if require_hashes {
let hasher = if let Some(hash_checking) = hash_checking {
HashStrategy::from_requirements(
requirements
.iter()
.map(|entry| (&entry.requirement, entry.hashes.as_slice())),
Some(&markers),
hash_checking,
)?
} else {
HashStrategy::None

View file

@ -279,7 +279,7 @@ async fn run(cli: Cli) -> Result<ExitStatus> {
args.settings.reinstall,
args.settings.link_mode,
args.settings.compile_bytecode,
args.settings.require_hashes,
args.settings.hash_checking,
args.settings.index_locations,
args.settings.index_strategy,
args.settings.keyring_provider,
@ -361,7 +361,7 @@ async fn run(cli: Cli) -> Result<ExitStatus> {
args.settings.reinstall,
args.settings.link_mode,
args.settings.compile_bytecode,
args.settings.require_hashes,
args.settings.hash_checking,
args.settings.setup_py,
globals.connectivity,
&args.settings.config_setting,

View file

@ -19,9 +19,9 @@ use uv_cli::{
};
use uv_client::Connectivity;
use uv_configuration::{
BuildOptions, Concurrency, ConfigSettings, ExtrasSpecification, IndexStrategy,
KeyringProviderType, NoBinary, NoBuild, PreviewMode, Reinstall, SetupPyStrategy, TargetTriple,
Upgrade,
BuildOptions, Concurrency, ConfigSettings, ExtrasSpecification, HashCheckingMode,
IndexStrategy, KeyringProviderType, NoBinary, NoBuild, PreviewMode, Reinstall, SetupPyStrategy,
TargetTriple, Upgrade,
};
use uv_distribution::pyproject::DependencyType;
use uv_normalize::PackageName;
@ -849,6 +849,8 @@ impl PipSyncSettings {
refresh,
require_hashes,
no_require_hashes,
verify_hashes,
no_verify_hashes,
python,
system,
no_system,
@ -890,6 +892,7 @@ impl PipSyncSettings {
target,
prefix,
require_hashes: flag(require_hashes, no_require_hashes),
verify_hashes: flag(verify_hashes, no_verify_hashes),
no_build: flag(no_build, build),
no_binary,
only_binary,
@ -946,6 +949,8 @@ impl PipInstallSettings {
require_hashes,
no_require_hashes,
installer,
verify_hashes,
no_verify_hashes,
python,
system,
no_system,
@ -1017,6 +1022,7 @@ impl PipInstallSettings {
python_version,
python_platform,
require_hashes: flag(require_hashes, no_require_hashes),
verify_hashes: flag(verify_hashes, no_verify_hashes),
concurrent_builds: env(env::CONCURRENT_BUILDS),
concurrent_downloads: env(env::CONCURRENT_DOWNLOADS),
concurrent_installs: env(env::CONCURRENT_INSTALLS),
@ -1659,7 +1665,7 @@ pub(crate) struct PipSettings {
pub(crate) annotation_style: AnnotationStyle,
pub(crate) link_mode: LinkMode,
pub(crate) compile_bytecode: bool,
pub(crate) require_hashes: bool,
pub(crate) hash_checking: Option<HashCheckingMode>,
pub(crate) upgrade: Upgrade,
pub(crate) reinstall: Reinstall,
pub(crate) concurrency: Concurrency,
@ -1718,6 +1724,7 @@ impl PipSettings {
link_mode,
compile_bytecode,
require_hashes,
verify_hashes,
upgrade,
upgrade_package,
reinstall,
@ -1869,10 +1876,14 @@ impl PipSettings {
.combine(emit_index_annotation)
.unwrap_or_default(),
link_mode: args.link_mode.combine(link_mode).unwrap_or_default(),
require_hashes: args
.require_hashes
.combine(require_hashes)
.unwrap_or_default(),
hash_checking: HashCheckingMode::from_args(
args.require_hashes
.combine(require_hashes)
.unwrap_or_default(),
args.verify_hashes
.combine(verify_hashes)
.unwrap_or_default(),
),
python: args.python.combine(python),
system: args.system.combine(system).unwrap_or_default(),
break_system_packages: args

View file

@ -5113,9 +5113,19 @@ fn require_hashes_mismatch() -> Result<()> {
// Write to a requirements file.
let requirements_txt = context.temp_dir.child("requirements.txt");
requirements_txt.write_str(
"anyio==4.0.0 --hash=sha256:afdb2b588b9fc25ede96d8db56ed50848b0b649dca3dd1df0b11f683bb9e0b5f",
)?;
requirements_txt.write_str(indoc::indoc! {r"
anyio==4.0.0 \
--hash=sha256:afdb2b588b9fc25ede96d8db56ed50848b0b649dca3dd1df0b11f683bb9e0b5f \
--hash=sha256:a7ed51751b2c2add651e5747c891b47e26d2a21be5d32d9311dfe9692f3e5d7a
idna==3.6 \
--hash=sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca \
--hash=sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f
# via anyio
sniffio==1.3.1 \
--hash=sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2 \
--hash=sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc
# via anyio
"})?;
// Raise an error.
uv_snapshot!(context.pip_install()
@ -5127,7 +5137,17 @@ fn require_hashes_mismatch() -> Result<()> {
----- stdout -----
----- stderr -----
error: In `--require-hashes` mode, all requirements must be pinned upfront with `==`, but found: `idna`
Resolved 3 packages in [TIME]
error: Failed to prepare distributions
Caused by: Failed to fetch wheel: anyio==4.0.0
Caused by: Hash mismatch for `anyio==4.0.0`
Expected:
sha256:afdb2b588b9fc25ede96d8db56ed50848b0b649dca3dd1df0b11f683bb9e0b5f
sha256:a7ed51751b2c2add651e5747c891b47e26d2a21be5d32d9311dfe9692f3e5d7a
Computed:
sha256:cfdb2b588b9fc25ede96d8db56ed50848b0b649dca3dd1df0b11f683bb9e0b5f
"###
);
@ -5390,6 +5410,206 @@ fn require_hashes_override() -> Result<()> {
Ok(())
}
/// Provide valid hashes for all dependencies with `--require-hashes`.
#[test]
fn verify_hashes() -> Result<()> {
let context = TestContext::new("3.12");
// Write to a requirements file.
let requirements_txt = context.temp_dir.child("requirements.txt");
requirements_txt.write_str(indoc::indoc! {r"
anyio==4.0.0 \
--hash=sha256:cfdb2b588b9fc25ede96d8db56ed50848b0b649dca3dd1df0b11f683bb9e0b5f \
--hash=sha256:f7ed51751b2c2add651e5747c891b47e26d2a21be5d32d9311dfe9692f3e5d7a
idna==3.6 \
--hash=sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca \
--hash=sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f
# via anyio
sniffio==1.3.1 \
--hash=sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2 \
--hash=sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc
# via anyio
"})?;
uv_snapshot!(context.pip_install()
.arg("-r")
.arg("requirements.txt")
.arg("--verify-hashes"), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 3 packages in [TIME]
Prepared 3 packages in [TIME]
Installed 3 packages in [TIME]
+ anyio==4.0.0
+ idna==3.6
+ sniffio==1.3.1
"###
);
Ok(())
}
/// Omit a pinned version with `--verify-hashes`.
#[test]
fn verify_hashes_missing_version() -> Result<()> {
let context = TestContext::new("3.12");
// Write to a requirements file.
let requirements_txt = context.temp_dir.child("requirements.txt");
requirements_txt.write_str(indoc::indoc! {r"
anyio \
--hash=sha256:afdb2b588b9fc25ede96d8db56ed50848b0b649dca3dd1df0b11f683bb9e0b5f \
--hash=sha256:a7ed51751b2c2add651e5747c891b47e26d2a21be5d32d9311dfe9692f3e5d7a
idna==3.6 \
--hash=sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca \
--hash=sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f
# via anyio
sniffio==1.3.1 \
--hash=sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2 \
--hash=sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc
# via anyio
"})?;
// Raise an error.
uv_snapshot!(context.pip_install()
.arg("-r")
.arg("requirements.txt")
.arg("--verify-hashes"), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: In `--verify-hashes` mode, all requirement must have their versions pinned with `==`, but found: anyio
"###
);
Ok(())
}
/// Provide the wrong hash with `--verify-hashes`.
#[test]
fn verify_hashes_mismatch() -> Result<()> {
let context = TestContext::new("3.12");
// Write to a requirements file.
let requirements_txt = context.temp_dir.child("requirements.txt");
requirements_txt.write_str(indoc::indoc! {r"
anyio==4.0.0 \
--hash=sha256:afdb2b588b9fc25ede96d8db56ed50848b0b649dca3dd1df0b11f683bb9e0b5f \
--hash=sha256:a7ed51751b2c2add651e5747c891b47e26d2a21be5d32d9311dfe9692f3e5d7a
idna==3.6 \
--hash=sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca \
--hash=sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f
# via anyio
sniffio==1.3.1 \
--hash=sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2 \
--hash=sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc
# via anyio
"})?;
// Raise an error.
uv_snapshot!(context.pip_install()
.arg("-r")
.arg("requirements.txt")
.arg("--verify-hashes"), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
Resolved 3 packages in [TIME]
error: Failed to prepare distributions
Caused by: Failed to fetch wheel: anyio==4.0.0
Caused by: Hash mismatch for `anyio==4.0.0`
Expected:
sha256:afdb2b588b9fc25ede96d8db56ed50848b0b649dca3dd1df0b11f683bb9e0b5f
sha256:a7ed51751b2c2add651e5747c891b47e26d2a21be5d32d9311dfe9692f3e5d7a
Computed:
sha256:cfdb2b588b9fc25ede96d8db56ed50848b0b649dca3dd1df0b11f683bb9e0b5f
"###
);
Ok(())
}
/// Omit a transitive dependency in `--verify-hashes`. This is allowed.
#[test]
fn verify_hashes_omit_dependency() -> Result<()> {
let context = TestContext::new("3.12");
// Write to a requirements file.
let requirements_txt = context.temp_dir.child("requirements.txt");
requirements_txt.write_str(
"anyio==4.0.0 --hash=sha256:cfdb2b588b9fc25ede96d8db56ed50848b0b649dca3dd1df0b11f683bb9e0b5f",
)?;
// Install without error when `--require-hashes` is omitted.
uv_snapshot!(context.pip_install()
.arg("-r")
.arg("requirements.txt")
.arg("--verify-hashes"), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 3 packages in [TIME]
Prepared 3 packages in [TIME]
Installed 3 packages in [TIME]
+ anyio==4.0.0
+ idna==3.6
+ sniffio==1.3.1
"###
);
Ok(())
}
/// We allow `--verify-hashes` for editable dependencies.
#[test]
fn verify_hashes_editable() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_txt = context.temp_dir.child("requirements.txt");
requirements_txt.write_str(&indoc::formatdoc! {r"
-e file://{workspace_root}/scripts/packages/black_editable[d]
",
workspace_root = context.workspace_root.simplified_display(),
})?;
// Install the editable packages.
uv_snapshot!(context.filters(), context.pip_install()
.arg("-r")
.arg(requirements_txt.path())
.arg("--verify-hashes"), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 8 packages in [TIME]
Prepared 8 packages in [TIME]
Installed 8 packages in [TIME]
+ aiohttp==3.9.3
+ aiosignal==1.3.1
+ attrs==23.2.0
+ black==0.1.0 (from file://[WORKSPACE]/scripts/packages/black_editable)
+ frozenlist==1.4.1
+ idna==3.6
+ multidict==6.0.5
+ yarl==1.9.4
"###
);
Ok(())
}
#[test]
fn tool_uv_sources() -> Result<()> {
let context = TestContext::new("3.12");

View file

@ -158,7 +158,7 @@ fn resolve_uv_toml() -> anyhow::Result<()> {
annotation_style: Split,
link_mode: Clone,
compile_bytecode: false,
require_hashes: false,
hash_checking: None,
upgrade: None,
reinstall: None,
concurrency: Concurrency {
@ -291,7 +291,7 @@ fn resolve_uv_toml() -> anyhow::Result<()> {
annotation_style: Split,
link_mode: Clone,
compile_bytecode: false,
require_hashes: false,
hash_checking: None,
upgrade: None,
reinstall: None,
concurrency: Concurrency {
@ -425,7 +425,7 @@ fn resolve_uv_toml() -> anyhow::Result<()> {
annotation_style: Split,
link_mode: Clone,
compile_bytecode: false,
require_hashes: false,
hash_checking: None,
upgrade: None,
reinstall: None,
concurrency: Concurrency {
@ -591,7 +591,7 @@ fn resolve_pyproject_toml() -> anyhow::Result<()> {
annotation_style: Split,
link_mode: Clone,
compile_bytecode: false,
require_hashes: false,
hash_checking: None,
upgrade: None,
reinstall: None,
concurrency: Concurrency {
@ -703,7 +703,7 @@ fn resolve_pyproject_toml() -> anyhow::Result<()> {
annotation_style: Split,
link_mode: Clone,
compile_bytecode: false,
require_hashes: false,
hash_checking: None,
upgrade: None,
reinstall: None,
concurrency: Concurrency {
@ -847,7 +847,7 @@ fn resolve_pyproject_toml() -> anyhow::Result<()> {
annotation_style: Split,
link_mode: Clone,
compile_bytecode: false,
require_hashes: false,
hash_checking: None,
upgrade: None,
reinstall: None,
concurrency: Concurrency {
@ -1028,7 +1028,7 @@ fn resolve_index_url() -> anyhow::Result<()> {
annotation_style: Split,
link_mode: Clone,
compile_bytecode: false,
require_hashes: false,
hash_checking: None,
upgrade: None,
reinstall: None,
concurrency: Concurrency {
@ -1208,7 +1208,7 @@ fn resolve_index_url() -> anyhow::Result<()> {
annotation_style: Split,
link_mode: Clone,
compile_bytecode: false,
require_hashes: false,
hash_checking: None,
upgrade: None,
reinstall: None,
concurrency: Concurrency {
@ -1366,7 +1366,7 @@ fn resolve_find_links() -> anyhow::Result<()> {
annotation_style: Split,
link_mode: Clone,
compile_bytecode: false,
require_hashes: false,
hash_checking: None,
upgrade: None,
reinstall: None,
concurrency: Concurrency {
@ -1500,7 +1500,7 @@ fn resolve_top_level() -> anyhow::Result<()> {
annotation_style: Split,
link_mode: Clone,
compile_bytecode: false,
require_hashes: false,
hash_checking: None,
upgrade: None,
reinstall: None,
concurrency: Concurrency {
@ -1672,7 +1672,7 @@ fn resolve_top_level() -> anyhow::Result<()> {
annotation_style: Split,
link_mode: Clone,
compile_bytecode: false,
require_hashes: false,
hash_checking: None,
upgrade: None,
reinstall: None,
concurrency: Concurrency {
@ -1827,7 +1827,7 @@ fn resolve_top_level() -> anyhow::Result<()> {
annotation_style: Split,
link_mode: Clone,
compile_bytecode: false,
require_hashes: false,
hash_checking: None,
upgrade: None,
reinstall: None,
concurrency: Concurrency {
@ -1961,7 +1961,7 @@ fn resolve_user_configuration() -> anyhow::Result<()> {
annotation_style: Split,
link_mode: Clone,
compile_bytecode: false,
require_hashes: false,
hash_checking: None,
upgrade: None,
reinstall: None,
concurrency: Concurrency {
@ -2078,7 +2078,7 @@ fn resolve_user_configuration() -> anyhow::Result<()> {
annotation_style: Split,
link_mode: Clone,
compile_bytecode: false,
require_hashes: false,
hash_checking: None,
upgrade: None,
reinstall: None,
concurrency: Concurrency {
@ -2195,7 +2195,7 @@ fn resolve_user_configuration() -> anyhow::Result<()> {
annotation_style: Split,
link_mode: Clone,
compile_bytecode: false,
require_hashes: false,
hash_checking: None,
upgrade: None,
reinstall: None,
concurrency: Concurrency {
@ -2314,7 +2314,7 @@ fn resolve_user_configuration() -> anyhow::Result<()> {
annotation_style: Split,
link_mode: Clone,
compile_bytecode: false,
require_hashes: false,
hash_checking: None,
upgrade: None,
reinstall: None,
concurrency: Concurrency {
@ -2458,7 +2458,7 @@ fn resolve_poetry_toml() -> anyhow::Result<()> {
annotation_style: Split,
link_mode: Clone,
compile_bytecode: false,
require_hashes: false,
hash_checking: None,
upgrade: None,
reinstall: None,
concurrency: Concurrency {
@ -2626,7 +2626,7 @@ fn resolve_both() -> anyhow::Result<()> {
annotation_style: Split,
link_mode: Clone,
compile_bytecode: false,
require_hashes: false,
hash_checking: None,
upgrade: None,
reinstall: None,
concurrency: Concurrency {

View file

@ -2350,6 +2350,36 @@ Accepts both standalone package names (`ruff`) and version specifiers (`ruff<0.5
---
#### [`verify-hashes`](#pip_verify-hashes) {: #pip_verify-hashes }
<span id="verify-hashes"></span>
Validate any hashes provided in the requirements file.
Unlike `--require-hashes`, `--verify-hashes` does not require that all requirements have
hashes; instead, it will limit itself to verifying the hashes of those requirements that do
include them.
**Default value**: `false`
**Type**: `bool`
**Example usage**:
=== "pyproject.toml"
```toml
[tool.uv.pip]
verify-hashes = true
```
=== "uv.toml"
```toml
[pip]
verify-hashes = true
```
---
## `workspace`
#### [`exclude`](#workspace_exclude) {: #workspace_exclude }

7
uv.schema.json generated
View file

@ -907,6 +907,13 @@
"items": {
"$ref": "#/definitions/Requirement"
}
},
"verify-hashes": {
"description": "Validate any hashes provided in the requirements file.\n\nUnlike `--require-hashes`, `--verify-hashes` does not require that all requirements have hashes; instead, it will limit itself to verifying the hashes of those requirements that do include them.",
"type": [
"boolean",
"null"
]
}
},
"additionalProperties": false