From 72bc739a64b6dfe27baea0d9a19fba7f2fb32e2e Mon Sep 17 00:00:00 2001 From: Charlie Marsh Date: Mon, 10 Jun 2024 05:50:32 -0700 Subject: [PATCH] Remove `PubGrubPackage` dependency from `ResolutionGraph` (#4168) ## Summary Similar to how we abstracted the dependencies into `ResolutionDependencyNames`, I think it makes sense to abstract the base packages into a `ResolutionPackage`. This also avoids leaking details about the various `PubGrubPackage` enum variants to `ResolutionGraph`. --- crates/uv-resolver/src/resolution/graph.rs | 304 +++++++++------------ crates/uv-resolver/src/resolver/mod.rs | 47 +++- crates/uv/src/commands/project/lock.rs | 2 +- crates/uv/src/commands/project/sync.rs | 2 +- 4 files changed, 173 insertions(+), 182 deletions(-) diff --git a/crates/uv-resolver/src/resolution/graph.rs b/crates/uv-resolver/src/resolution/graph.rs index 19e776c83..6f2319ba5 100644 --- a/crates/uv-resolver/src/resolution/graph.rs +++ b/crates/uv-resolver/src/resolution/graph.rs @@ -16,11 +16,11 @@ use uv_git::GitResolver; use uv_normalize::{ExtraName, GroupName, PackageName}; use crate::preferences::Preferences; -use crate::pubgrub::{PubGrubDistribution, PubGrubPackageInner}; +use crate::pubgrub::PubGrubDistribution; use crate::python_requirement::PythonTarget; use crate::redirect::url_to_precise; use crate::resolution::AnnotatedDist; -use crate::resolver::Resolution; +use crate::resolver::{Resolution, ResolutionPackage}; use crate::{ InMemoryIndex, Manifest, MetadataResponse, PythonRequirement, RequiresPython, ResolveError, VersionsResponse, @@ -47,7 +47,6 @@ type NodeKey<'a> = ( impl ResolutionGraph { /// Create a new graph from the resolved PubGrub state. - #[allow(clippy::too_many_arguments)] pub(crate) fn from_state( index: &InMemoryIndex, preferences: &Preferences, @@ -55,7 +54,6 @@ impl ResolutionGraph { python: &PythonRequirement, resolution: Resolution, ) -> anyhow::Result { - // Add every package to the graph. let mut petgraph: Graph, Directed> = Graph::with_capacity(resolution.packages.len(), resolution.packages.len()); let mut inverse: FxHashMap> = FxHashMap::with_capacity_and_hasher( @@ -64,210 +62,172 @@ impl ResolutionGraph { ); let mut diagnostics = Vec::new(); + // Add every package to the graph. for (package, versions) in &resolution.packages { - match &**package { - PubGrubPackageInner::Package { - name, - extra, - dev, - marker: None, - url: None, - } => { - for version in versions { - // Create the distribution. - let dist = resolution - .pins - .get(name, version) - .expect("Every package should be pinned") - .clone(); + let ResolutionPackage { + name, + extra, + dev, + url, + } = &package; - // Track yanks for any registry distributions. - match dist.yanked() { - None | Some(Yanked::Bool(false)) => {} - Some(Yanked::Bool(true)) => { - diagnostics.push(ResolutionDiagnostic::YankedVersion { - dist: dist.clone(), - reason: None, - }); - } - Some(Yanked::Reason(reason)) => { - diagnostics.push(ResolutionDiagnostic::YankedVersion { - dist: dist.clone(), - reason: Some(reason.clone()), - }); - } - } + for version in versions { + // Map the package to a distribution. + let (dist, hashes, metadata) = if let Some(url) = url { + // Create the distribution. + let dist = Dist::from_url(name.clone(), url_to_precise(url.clone(), git))?; - // Extract the hashes, preserving those that were already present in the - // lockfile if necessary. - let hashes = if let Some(digests) = preferences - .match_hashes(name, version) - .filter(|digests| !digests.is_empty()) - { - digests.to_vec() - } else if let Some(versions_response) = index.packages().get(name) { - if let VersionsResponse::Found(ref version_maps) = *versions_response { - version_maps - .iter() - .find_map(|version_map| version_map.hashes(version)) - .map(|mut digests| { - digests.sort_unstable(); - digests - }) - .unwrap_or_default() - } else { - vec![] - } + // Extract the hashes, preserving those that were already present in the + // lockfile if necessary. + let hashes = if let Some(digests) = preferences + .match_hashes(name, version) + .filter(|digests| !digests.is_empty()) + { + digests.to_vec() + } else if let Some(metadata_response) = + index.distributions().get(&dist.version_id()) + { + if let MetadataResponse::Found(ref archive) = *metadata_response { + let mut digests = archive.hashes.clone(); + digests.sort_unstable(); + digests } else { vec![] - }; + } + } else { + vec![] + }; - // Extract the metadata. - let metadata = { - let dist = PubGrubDistribution::from_registry(name, version); + // Extract the metadata. + let metadata = { + let dist = PubGrubDistribution::from_url(name, url); - let response = index - .distributions() - .get(&dist.version_id()) - .unwrap_or_else(|| { - panic!( - "Every package should have metadata: {:?}", - dist.version_id() - ) - }); - - let MetadataResponse::Found(archive) = &*response else { + let response = index + .distributions() + .get(&dist.version_id()) + .unwrap_or_else(|| { panic!( "Every package should have metadata: {:?}", dist.version_id() ) - }; + }); - archive.metadata.clone() + let MetadataResponse::Found(archive) = &*response else { + panic!( + "Every package should have metadata: {:?}", + dist.version_id() + ) }; - // Validate the extra. - if let Some(extra) = extra { - if !metadata.provides_extras.contains(extra) { - diagnostics.push(ResolutionDiagnostic::MissingExtra { - dist: dist.clone(), - extra: extra.clone(), - }); - } - } + archive.metadata.clone() + }; - // Validate the development dependency group. - if let Some(dev) = dev { - if !metadata.dev_dependencies.contains_key(dev) { - diagnostics.push(ResolutionDiagnostic::MissingDev { - dist: dist.clone(), - dev: dev.clone(), - }); - } - } + (dist.into(), hashes, metadata) + } else { + let dist = resolution + .pins + .get(name, version) + .expect("Every package should be pinned") + .clone(); - // Add the distribution to the graph. - let index = petgraph.add_node(AnnotatedDist { - dist, - extra: extra.clone(), - dev: dev.clone(), - hashes, - metadata, - }); - inverse.insert((name, version, extra.as_ref(), dev.as_ref()), index); + // Track yanks for any registry distributions. + match dist.yanked() { + None | Some(Yanked::Bool(false)) => {} + Some(Yanked::Bool(true)) => { + diagnostics.push(ResolutionDiagnostic::YankedVersion { + dist: dist.clone(), + reason: None, + }); + } + Some(Yanked::Reason(reason)) => { + diagnostics.push(ResolutionDiagnostic::YankedVersion { + dist: dist.clone(), + reason: Some(reason.clone()), + }); + } } - } - PubGrubPackageInner::Package { - name, - extra, - dev, - marker: None, - url: Some(url), - } => { - for version in versions { - // Create the distribution. - let dist = Dist::from_url(name.clone(), url_to_precise(url.clone(), git))?; - - // Extract the hashes, preserving those that were already present in the - // lockfile if necessary. - let hashes = if let Some(digests) = preferences - .match_hashes(name, version) - .filter(|digests| !digests.is_empty()) - { - digests.to_vec() - } else if let Some(metadata_response) = - index.distributions().get(&dist.version_id()) - { - if let MetadataResponse::Found(ref archive) = *metadata_response { - let mut digests = archive.hashes.clone(); - digests.sort_unstable(); - digests - } else { - vec![] - } + // Extract the hashes, preserving those that were already present in the + // lockfile if necessary. + let hashes = if let Some(digests) = preferences + .match_hashes(name, version) + .filter(|digests| !digests.is_empty()) + { + digests.to_vec() + } else if let Some(versions_response) = index.packages().get(name) { + if let VersionsResponse::Found(ref version_maps) = *versions_response { + version_maps + .iter() + .find_map(|version_map| version_map.hashes(version)) + .map(|mut digests| { + digests.sort_unstable(); + digests + }) + .unwrap_or_default() } else { vec![] - }; + } + } else { + vec![] + }; - // Extract the metadata. - let metadata = { - let dist = PubGrubDistribution::from_url(name, url); + // Extract the metadata. + let metadata = { + let dist = PubGrubDistribution::from_registry(name, version); - let response = index - .distributions() - .get(&dist.version_id()) - .unwrap_or_else(|| { - panic!( - "Every package should have metadata: {:?}", - dist.version_id() - ) - }); - - let MetadataResponse::Found(archive) = &*response else { + let response = index + .distributions() + .get(&dist.version_id()) + .unwrap_or_else(|| { panic!( "Every package should have metadata: {:?}", dist.version_id() ) - }; + }); - archive.metadata.clone() + let MetadataResponse::Found(archive) = &*response else { + panic!( + "Every package should have metadata: {:?}", + dist.version_id() + ) }; - // Validate the extra. - if let Some(extra) = extra { - if !metadata.provides_extras.contains(extra) { - diagnostics.push(ResolutionDiagnostic::MissingExtra { - dist: dist.clone().into(), - extra: extra.clone(), - }); - } - } + archive.metadata.clone() + }; - // Validate the development dependency group. - if let Some(dev) = dev { - if !metadata.dev_dependencies.contains_key(dev) { - diagnostics.push(ResolutionDiagnostic::MissingDev { - dist: dist.clone().into(), - dev: dev.clone(), - }); - } - } + (dist, hashes, metadata) + }; - // Add the distribution to the graph. - let index = petgraph.add_node(AnnotatedDist { - dist: dist.into(), + // Validate the extra. + if let Some(extra) = extra { + if !metadata.provides_extras.contains(extra) { + diagnostics.push(ResolutionDiagnostic::MissingExtra { + dist: dist.clone(), extra: extra.clone(), - dev: dev.clone(), - hashes, - metadata, }); - inverse.insert((name, version, extra.as_ref(), dev.as_ref()), index); } } - _ => {} - }; + // Validate the development dependency group. + if let Some(dev) = dev { + if !metadata.dev_dependencies.contains_key(dev) { + diagnostics.push(ResolutionDiagnostic::MissingDev { + dist: dist.clone(), + dev: dev.clone(), + }); + } + } + + // Add the distribution to the graph. + let index = petgraph.add_node(AnnotatedDist { + dist, + extra: extra.clone(), + dev: dev.clone(), + hashes, + metadata, + }); + inverse.insert((name, version, extra.as_ref(), dev.as_ref()), index); + } } // Add every edge to the graph. diff --git a/crates/uv-resolver/src/resolver/mod.rs b/crates/uv-resolver/src/resolver/mod.rs index ec4636ef6..85f23fc1c 100644 --- a/crates/uv-resolver/src/resolver/mod.rs +++ b/crates/uv-resolver/src/resolver/mod.rs @@ -28,7 +28,7 @@ pub(crate) use locals::Locals; use pep440_rs::{Version, MIN_VERSION}; use pep508_rs::{MarkerEnvironment, MarkerTree}; use platform_tags::Tags; -use pypi_types::{Metadata23, Requirement}; +use pypi_types::{Metadata23, Requirement, VerbatimParsedUrl}; pub(crate) use urls::Urls; use uv_configuration::{Constraints, Overrides}; use uv_distribution::{ArchiveMetadata, DistributionDatabase}; @@ -529,7 +529,7 @@ impl ResolverState Resolution { - let packages = self.pubgrub.partial_solution.extract_solution(); + let solution = self.pubgrub.partial_solution.extract_solution(); let mut dependencies: FxHashMap< ResolutionDependencyNames, FxHashSet, > = FxHashMap::default(); - for (package, self_version) in &packages { + for (package, self_version) in &solution { for id in &self.pubgrub.incompatibilities[package] { let pubgrub::solver::Kind::FromDependencyOf( ref self_package, @@ -1528,7 +1528,7 @@ impl SolveState { if !self_range.contains(self_version) { continue; } - let Some(dependency_version) = packages.get(dependency_package) else { + let Some(dependency_version) = solution.get(dependency_package) else { continue; }; if !dependency_range.contains(dependency_version) { @@ -1649,10 +1649,33 @@ impl SolveState { } } } - let packages = packages + + let packages = solution .into_iter() - .map(|(package, version)| (package, FxHashSet::from_iter([version]))) + .filter_map(|(package, version)| { + if let PubGrubPackageInner::Package { + name, + extra, + dev, + url, + marker: None, + } = &*package + { + Some(( + ResolutionPackage { + name: name.clone(), + extra: extra.clone(), + dev: dev.clone(), + url: url.clone(), + }, + FxHashSet::from_iter([version]), + )) + } else { + None + } + }) .collect(); + Resolution { packages, dependencies, @@ -1663,12 +1686,20 @@ impl SolveState { #[derive(Debug, Default)] pub(crate) struct Resolution { - pub(crate) packages: FxHashMap>, + pub(crate) packages: FxHashMap>, pub(crate) dependencies: FxHashMap>, pub(crate) pins: FilePins, } +#[derive(Clone, Debug, Eq, Hash, PartialEq)] +pub(crate) struct ResolutionPackage { + pub(crate) name: PackageName, + pub(crate) extra: Option, + pub(crate) dev: Option, + pub(crate) url: Option, +} + #[derive(Clone, Debug, Eq, Hash, PartialEq)] pub(crate) struct ResolutionDependencyNames { pub(crate) from: PackageName, diff --git a/crates/uv/src/commands/project/lock.rs b/crates/uv/src/commands/project/lock.rs index 1c1950ef5..20758f369 100644 --- a/crates/uv/src/commands/project/lock.rs +++ b/crates/uv/src/commands/project/lock.rs @@ -166,7 +166,7 @@ pub(super) async fn do_lock( let flat_index = { let client = FlatIndexClient::new(&client, cache); let entries = client.fetch(index_locations.flat_index()).await?; - FlatIndex::from_entries(entries, tags, &hasher, &no_build, &no_binary) + FlatIndex::from_entries(entries, None, &hasher, &no_build, &no_binary) }; // If an existing lockfile exists, build up a set of preferences. diff --git a/crates/uv/src/commands/project/sync.rs b/crates/uv/src/commands/project/sync.rs index 9e5750390..9ab29088b 100644 --- a/crates/uv/src/commands/project/sync.rs +++ b/crates/uv/src/commands/project/sync.rs @@ -130,7 +130,7 @@ pub(super) async fn do_sync( let flat_index = { let client = FlatIndexClient::new(&client, cache); let entries = client.fetch(index_locations.flat_index()).await?; - FlatIndex::from_entries(entries, tags, &hasher, &no_build, &no_binary) + FlatIndex::from_entries(entries, Some(tags), &hasher, &no_build, &no_binary) }; // Create a build dispatch.