Enforce hashes in lockfile install (#5170)

## Summary

Hashes will be validated if present, but aren't required (since, e.g.,
some registries will omit them, as will Git dependencies and such).

Closes https://github.com/astral-sh/uv/issues/5168.
This commit is contained in:
Charlie Marsh 2024-07-17 19:10:37 -04:00 committed by GitHub
parent 218ae2c13e
commit 6a49dba30c
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
7 changed files with 148 additions and 12 deletions

View file

@ -1,6 +1,6 @@
use std::collections::BTreeMap;
use pypi_types::{Requirement, RequirementSource};
use pypi_types::{HashDigest, Requirement, RequirementSource};
use uv_normalize::{ExtraName, GroupName, PackageName};
use crate::{BuiltDist, Diagnostic, Dist, Name, ResolvedDist, SourceDist};
@ -9,6 +9,7 @@ use crate::{BuiltDist, Diagnostic, Dist, Name, ResolvedDist, SourceDist};
#[derive(Debug, Default, Clone)]
pub struct Resolution {
packages: BTreeMap<PackageName, ResolvedDist>,
hashes: BTreeMap<PackageName, Vec<HashDigest>>,
diagnostics: Vec<ResolutionDiagnostic>,
}
@ -16,10 +17,12 @@ impl Resolution {
/// Create a new resolution from the given pinned packages.
pub fn new(
packages: BTreeMap<PackageName, ResolvedDist>,
hashes: BTreeMap<PackageName, Vec<HashDigest>>,
diagnostics: Vec<ResolutionDiagnostic>,
) -> Self {
Self {
packages,
hashes,
diagnostics,
}
}
@ -32,6 +35,11 @@ impl Resolution {
}
}
/// Return the hashes for the given package name, if they exist.
pub fn get_hashes(&self, package_name: &PackageName) -> &[HashDigest] {
self.hashes.get(package_name).map_or(&[], Vec::as_slice)
}
/// Iterate over the [`PackageName`] entities in this resolution.
pub fn packages(&self) -> impl Iterator<Item = &PackageName> {
self.packages.keys()

View file

@ -382,6 +382,7 @@ impl Lock {
}
let mut map = BTreeMap::default();
let mut hashes = BTreeMap::default();
while let Some((dist, extra)) = queue.pop_front() {
let deps =
if let Some(extra) = extra {
@ -406,13 +407,14 @@ impl Lock {
}
}
}
let name = dist.id.name.clone();
let resolved_dist =
ResolvedDist::Installable(dist.to_dist(project.workspace().install_path(), tags)?);
map.insert(name, resolved_dist);
map.insert(
dist.id.name.clone(),
ResolvedDist::Installable(dist.to_dist(project.workspace().install_path(), tags)?),
);
hashes.insert(dist.id.name.clone(), dist.hashes());
}
let diagnostics = vec![];
Ok(Resolution::new(map, diagnostics))
Ok(Resolution::new(map, hashes, diagnostics))
}
/// Returns the TOML representation of this lock file.

View file

@ -489,6 +489,10 @@ impl From<ResolutionGraph> for distribution_types::Resolution {
.dists()
.map(|node| (node.name().clone(), node.dist.clone()))
.collect(),
graph
.dists()
.map(|node| (node.name().clone(), node.hashes.clone()))
.collect(),
graph.diagnostics,
)
}

View file

@ -3,7 +3,9 @@ use std::str::FromStr;
use std::sync::Arc;
use url::Url;
use distribution_types::{DistributionMetadata, HashPolicy, UnresolvedRequirement, VersionId};
use distribution_types::{
DistributionMetadata, HashPolicy, Name, Resolution, UnresolvedRequirement, VersionId,
};
use pep440_rs::Version;
use pep508_rs::MarkerEnvironment;
use pypi_types::{HashDigest, HashError, Requirement, RequirementSource};
@ -168,10 +170,37 @@ impl HashStrategy {
hashes.insert(id, digests);
}
let hashes = Arc::new(hashes);
match mode {
HashCheckingMode::Verify => Ok(Self::Verify(hashes)),
HashCheckingMode::Require => Ok(Self::Require(hashes)),
HashCheckingMode::Verify => Ok(Self::Verify(Arc::new(hashes))),
HashCheckingMode::Require => Ok(Self::Require(Arc::new(hashes))),
}
}
/// Generate the required hashes from a [`Resolution`].
pub fn from_resolution(
resolution: &Resolution,
mode: HashCheckingMode,
) -> Result<Self, HashStrategyError> {
let mut hashes = FxHashMap::<VersionId, Vec<HashDigest>>::default();
for dist in resolution.distributions() {
let digests = resolution.get_hashes(dist.name());
if digests.is_empty() {
// Under `--require-hashes`, every requirement must include a hash.
if mode.is_require() {
return Err(HashStrategyError::MissingHashes(
dist.name().to_string(),
mode,
));
}
continue;
}
hashes.insert(dist.version_id(), digests.to_vec());
}
match mode {
HashCheckingMode::Verify => Ok(Self::Verify(Arc::new(hashes))),
HashCheckingMode::Require => Ok(Self::Require(Arc::new(hashes))),
}
}

View file

@ -63,6 +63,9 @@ pub(crate) enum ProjectError {
#[error(transparent)]
Virtualenv(#[from] uv_virtualenv::Error),
#[error(transparent)]
HashStrategy(#[from] uv_types::HashStrategyError),
#[error(transparent)]
Tags(#[from] platform_tags::TagsError),

View file

@ -2,7 +2,9 @@ use anyhow::Result;
use uv_cache::Cache;
use uv_client::{Connectivity, FlatIndexClient, RegistryClientBuilder};
use uv_configuration::{Concurrency, ExtrasSpecification, PreviewMode, SetupPyStrategy};
use uv_configuration::{
Concurrency, ExtrasSpecification, HashCheckingMode, PreviewMode, SetupPyStrategy,
};
use uv_dispatch::BuildDispatch;
use uv_distribution::{VirtualProject, DEV_DEPENDENCIES};
use uv_installer::SitePackages;
@ -230,9 +232,11 @@ pub(super) async fn do_sync(
// optional on the downstream APIs.
let build_isolation = BuildIsolation::default();
let dry_run = false;
let hasher = HashStrategy::default();
let setup_py = SetupPyStrategy::default();
// Extract the hashes from the lockfile.
let hasher = HashStrategy::from_resolution(&resolution, HashCheckingMode::Verify)?;
// Resolve the flat indexes from `--find-links`.
let flat_index = {
let client = FlatIndexClient::new(&client, cache);

View file

@ -3920,3 +3920,89 @@ fn lock_new_extras() -> Result<()> {
Ok(())
}
/// Ensure that the installer rejects invalid hashes from the lockfile.
///
/// In this case, the hashes for `idna` have all been incremented by one in the left-most digit.
#[test]
fn lock_invalid_hash() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"
[project]
name = "project"
version = "0.1.0"
requires-python = ">=3.12"
dependencies = ["anyio==3.7.0"]
"#,
)?;
let lock = context.temp_dir.child("uv.lock");
lock.write_str(r#"
version = 1
requires-python = ">=3.12"
[[distribution]]
name = "anyio"
version = "3.7.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "idna" },
{ name = "sniffio" },
]
sdist = { url = "https://files.pythonhosted.org/packages/c6/b3/fefbf7e78ab3b805dec67d698dc18dd505af7a18a8dd08868c9b4fa736b5/anyio-3.7.0.tar.gz", hash = "sha256:275d9973793619a5374e1c89a4f4ad3f4b0a5510a2b5b939444bee8f4c4d37ce", size = 142737 }
wheels = [
{ url = "https://files.pythonhosted.org/packages/68/fe/7ce1926952c8a403b35029e194555558514b365ad77d75125f521a2bec62/anyio-3.7.0-py3-none-any.whl", hash = "sha256:eddca883c4175f14df8aedce21054bfca3adb70ffe76a9f607aef9d7fa2ea7f0", size = 80873 },
]
[[distribution]]
name = "idna"
version = "3.6"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/bf/3f/ea4b9117521a1e9c50344b909be7886dd00a519552724809bb1f486986c2/idna-3.6.tar.gz", hash = "sha256:aecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca", size = 175426 }
wheels = [
{ url = "https://files.pythonhosted.org/packages/c2/e7/a82b05cf63a603df6e68d59ae6a68bf5064484a0718ea5033660af4b54a9/idna-3.6-py3-none-any.whl", hash = "sha256:d05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f", size = 61567 },
]
[[distribution]]
name = "project"
version = "0.1.0"
source = { editable = "." }
dependencies = [
{ name = "anyio" },
]
[[distribution]]
name = "sniffio"
version = "1.3.1"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/a2/87/a6771e1546d97e7e041b6ae58d80074f81b7d5121207425c964ddf5cfdbd/sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc", size = 20372 }
wheels = [
{ url = "https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2", size = 10235 },
]
"#)?;
// Install from the lockfile.
uv_snapshot!(context.filters(), context.sync().arg("--frozen"), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
warning: `uv sync` is experimental and may change without warning.
error: Failed to prepare distributions
Caused by: Failed to fetch wheel: idna==3.6
Caused by: Hash mismatch for `idna==3.6`
Expected:
sha256:aecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca
sha256:d05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f
Computed:
sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f
"###);
Ok(())
}