Remove PubGrubPackage dependency from ResolutionGraph (#4168)

## Summary

Similar to how we abstracted the dependencies into
`ResolutionDependencyNames`, I think it makes sense to abstract the base
packages into a `ResolutionPackage`. This also avoids leaking details
about the various `PubGrubPackage` enum variants to `ResolutionGraph`.
This commit is contained in:
Charlie Marsh 2024-06-10 05:50:32 -07:00 committed by GitHub
parent 18b40b0c7d
commit 72bc739a64
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
4 changed files with 173 additions and 182 deletions

View file

@ -16,11 +16,11 @@ use uv_git::GitResolver;
use uv_normalize::{ExtraName, GroupName, PackageName};
use crate::preferences::Preferences;
use crate::pubgrub::{PubGrubDistribution, PubGrubPackageInner};
use crate::pubgrub::PubGrubDistribution;
use crate::python_requirement::PythonTarget;
use crate::redirect::url_to_precise;
use crate::resolution::AnnotatedDist;
use crate::resolver::Resolution;
use crate::resolver::{Resolution, ResolutionPackage};
use crate::{
InMemoryIndex, Manifest, MetadataResponse, PythonRequirement, RequiresPython, ResolveError,
VersionsResponse,
@ -47,7 +47,6 @@ type NodeKey<'a> = (
impl ResolutionGraph {
/// Create a new graph from the resolved PubGrub state.
#[allow(clippy::too_many_arguments)]
pub(crate) fn from_state(
index: &InMemoryIndex,
preferences: &Preferences,
@ -55,7 +54,6 @@ impl ResolutionGraph {
python: &PythonRequirement,
resolution: Resolution,
) -> anyhow::Result<Self, ResolveError> {
// Add every package to the graph.
let mut petgraph: Graph<AnnotatedDist, Option<MarkerTree>, Directed> =
Graph::with_capacity(resolution.packages.len(), resolution.packages.len());
let mut inverse: FxHashMap<NodeKey, NodeIndex<u32>> = FxHashMap::with_capacity_and_hasher(
@ -64,210 +62,172 @@ impl ResolutionGraph {
);
let mut diagnostics = Vec::new();
// Add every package to the graph.
for (package, versions) in &resolution.packages {
match &**package {
PubGrubPackageInner::Package {
name,
extra,
dev,
marker: None,
url: None,
} => {
for version in versions {
// Create the distribution.
let dist = resolution
.pins
.get(name, version)
.expect("Every package should be pinned")
.clone();
let ResolutionPackage {
name,
extra,
dev,
url,
} = &package;
// Track yanks for any registry distributions.
match dist.yanked() {
None | Some(Yanked::Bool(false)) => {}
Some(Yanked::Bool(true)) => {
diagnostics.push(ResolutionDiagnostic::YankedVersion {
dist: dist.clone(),
reason: None,
});
}
Some(Yanked::Reason(reason)) => {
diagnostics.push(ResolutionDiagnostic::YankedVersion {
dist: dist.clone(),
reason: Some(reason.clone()),
});
}
}
for version in versions {
// Map the package to a distribution.
let (dist, hashes, metadata) = if let Some(url) = url {
// Create the distribution.
let dist = Dist::from_url(name.clone(), url_to_precise(url.clone(), git))?;
// Extract the hashes, preserving those that were already present in the
// lockfile if necessary.
let hashes = if let Some(digests) = preferences
.match_hashes(name, version)
.filter(|digests| !digests.is_empty())
{
digests.to_vec()
} else if let Some(versions_response) = index.packages().get(name) {
if let VersionsResponse::Found(ref version_maps) = *versions_response {
version_maps
.iter()
.find_map(|version_map| version_map.hashes(version))
.map(|mut digests| {
digests.sort_unstable();
digests
})
.unwrap_or_default()
} else {
vec![]
}
// Extract the hashes, preserving those that were already present in the
// lockfile if necessary.
let hashes = if let Some(digests) = preferences
.match_hashes(name, version)
.filter(|digests| !digests.is_empty())
{
digests.to_vec()
} else if let Some(metadata_response) =
index.distributions().get(&dist.version_id())
{
if let MetadataResponse::Found(ref archive) = *metadata_response {
let mut digests = archive.hashes.clone();
digests.sort_unstable();
digests
} else {
vec![]
};
}
} else {
vec![]
};
// Extract the metadata.
let metadata = {
let dist = PubGrubDistribution::from_registry(name, version);
// Extract the metadata.
let metadata = {
let dist = PubGrubDistribution::from_url(name, url);
let response = index
.distributions()
.get(&dist.version_id())
.unwrap_or_else(|| {
panic!(
"Every package should have metadata: {:?}",
dist.version_id()
)
});
let MetadataResponse::Found(archive) = &*response else {
let response = index
.distributions()
.get(&dist.version_id())
.unwrap_or_else(|| {
panic!(
"Every package should have metadata: {:?}",
dist.version_id()
)
};
});
archive.metadata.clone()
let MetadataResponse::Found(archive) = &*response else {
panic!(
"Every package should have metadata: {:?}",
dist.version_id()
)
};
// Validate the extra.
if let Some(extra) = extra {
if !metadata.provides_extras.contains(extra) {
diagnostics.push(ResolutionDiagnostic::MissingExtra {
dist: dist.clone(),
extra: extra.clone(),
});
}
}
archive.metadata.clone()
};
// Validate the development dependency group.
if let Some(dev) = dev {
if !metadata.dev_dependencies.contains_key(dev) {
diagnostics.push(ResolutionDiagnostic::MissingDev {
dist: dist.clone(),
dev: dev.clone(),
});
}
}
(dist.into(), hashes, metadata)
} else {
let dist = resolution
.pins
.get(name, version)
.expect("Every package should be pinned")
.clone();
// Add the distribution to the graph.
let index = petgraph.add_node(AnnotatedDist {
dist,
extra: extra.clone(),
dev: dev.clone(),
hashes,
metadata,
});
inverse.insert((name, version, extra.as_ref(), dev.as_ref()), index);
// Track yanks for any registry distributions.
match dist.yanked() {
None | Some(Yanked::Bool(false)) => {}
Some(Yanked::Bool(true)) => {
diagnostics.push(ResolutionDiagnostic::YankedVersion {
dist: dist.clone(),
reason: None,
});
}
Some(Yanked::Reason(reason)) => {
diagnostics.push(ResolutionDiagnostic::YankedVersion {
dist: dist.clone(),
reason: Some(reason.clone()),
});
}
}
}
PubGrubPackageInner::Package {
name,
extra,
dev,
marker: None,
url: Some(url),
} => {
for version in versions {
// Create the distribution.
let dist = Dist::from_url(name.clone(), url_to_precise(url.clone(), git))?;
// Extract the hashes, preserving those that were already present in the
// lockfile if necessary.
let hashes = if let Some(digests) = preferences
.match_hashes(name, version)
.filter(|digests| !digests.is_empty())
{
digests.to_vec()
} else if let Some(metadata_response) =
index.distributions().get(&dist.version_id())
{
if let MetadataResponse::Found(ref archive) = *metadata_response {
let mut digests = archive.hashes.clone();
digests.sort_unstable();
digests
} else {
vec![]
}
// Extract the hashes, preserving those that were already present in the
// lockfile if necessary.
let hashes = if let Some(digests) = preferences
.match_hashes(name, version)
.filter(|digests| !digests.is_empty())
{
digests.to_vec()
} else if let Some(versions_response) = index.packages().get(name) {
if let VersionsResponse::Found(ref version_maps) = *versions_response {
version_maps
.iter()
.find_map(|version_map| version_map.hashes(version))
.map(|mut digests| {
digests.sort_unstable();
digests
})
.unwrap_or_default()
} else {
vec![]
};
}
} else {
vec![]
};
// Extract the metadata.
let metadata = {
let dist = PubGrubDistribution::from_url(name, url);
// Extract the metadata.
let metadata = {
let dist = PubGrubDistribution::from_registry(name, version);
let response = index
.distributions()
.get(&dist.version_id())
.unwrap_or_else(|| {
panic!(
"Every package should have metadata: {:?}",
dist.version_id()
)
});
let MetadataResponse::Found(archive) = &*response else {
let response = index
.distributions()
.get(&dist.version_id())
.unwrap_or_else(|| {
panic!(
"Every package should have metadata: {:?}",
dist.version_id()
)
};
});
archive.metadata.clone()
let MetadataResponse::Found(archive) = &*response else {
panic!(
"Every package should have metadata: {:?}",
dist.version_id()
)
};
// Validate the extra.
if let Some(extra) = extra {
if !metadata.provides_extras.contains(extra) {
diagnostics.push(ResolutionDiagnostic::MissingExtra {
dist: dist.clone().into(),
extra: extra.clone(),
});
}
}
archive.metadata.clone()
};
// Validate the development dependency group.
if let Some(dev) = dev {
if !metadata.dev_dependencies.contains_key(dev) {
diagnostics.push(ResolutionDiagnostic::MissingDev {
dist: dist.clone().into(),
dev: dev.clone(),
});
}
}
(dist, hashes, metadata)
};
// Add the distribution to the graph.
let index = petgraph.add_node(AnnotatedDist {
dist: dist.into(),
// Validate the extra.
if let Some(extra) = extra {
if !metadata.provides_extras.contains(extra) {
diagnostics.push(ResolutionDiagnostic::MissingExtra {
dist: dist.clone(),
extra: extra.clone(),
dev: dev.clone(),
hashes,
metadata,
});
inverse.insert((name, version, extra.as_ref(), dev.as_ref()), index);
}
}
_ => {}
};
// Validate the development dependency group.
if let Some(dev) = dev {
if !metadata.dev_dependencies.contains_key(dev) {
diagnostics.push(ResolutionDiagnostic::MissingDev {
dist: dist.clone(),
dev: dev.clone(),
});
}
}
// Add the distribution to the graph.
let index = petgraph.add_node(AnnotatedDist {
dist,
extra: extra.clone(),
dev: dev.clone(),
hashes,
metadata,
});
inverse.insert((name, version, extra.as_ref(), dev.as_ref()), index);
}
}
// Add every edge to the graph.

View file

@ -28,7 +28,7 @@ pub(crate) use locals::Locals;
use pep440_rs::{Version, MIN_VERSION};
use pep508_rs::{MarkerEnvironment, MarkerTree};
use platform_tags::Tags;
use pypi_types::{Metadata23, Requirement};
use pypi_types::{Metadata23, Requirement, VerbatimParsedUrl};
pub(crate) use urls::Urls;
use uv_configuration::{Constraints, Overrides};
use uv_distribution::{ArchiveMetadata, DistributionDatabase};
@ -529,7 +529,7 @@ impl<InstalledPackages: InstalledPackagesProvider> ResolverState<InstalledPackag
// that our resolver state is only cloned as much
// as it needs to be. We basically move the state
// into `forked_states`, and then only clone it if
// there it at least one more fork to visit.
// there is at least one more fork to visit.
let mut cur_state = Some(state);
let forks_len = forks.len();
for (i, fork) in forks.into_iter().enumerate() {
@ -1506,12 +1506,12 @@ struct SolveState {
impl SolveState {
fn into_resolution(self) -> Resolution {
let packages = self.pubgrub.partial_solution.extract_solution();
let solution = self.pubgrub.partial_solution.extract_solution();
let mut dependencies: FxHashMap<
ResolutionDependencyNames,
FxHashSet<ResolutionDependencyVersions>,
> = FxHashMap::default();
for (package, self_version) in &packages {
for (package, self_version) in &solution {
for id in &self.pubgrub.incompatibilities[package] {
let pubgrub::solver::Kind::FromDependencyOf(
ref self_package,
@ -1528,7 +1528,7 @@ impl SolveState {
if !self_range.contains(self_version) {
continue;
}
let Some(dependency_version) = packages.get(dependency_package) else {
let Some(dependency_version) = solution.get(dependency_package) else {
continue;
};
if !dependency_range.contains(dependency_version) {
@ -1649,10 +1649,33 @@ impl SolveState {
}
}
}
let packages = packages
let packages = solution
.into_iter()
.map(|(package, version)| (package, FxHashSet::from_iter([version])))
.filter_map(|(package, version)| {
if let PubGrubPackageInner::Package {
name,
extra,
dev,
url,
marker: None,
} = &*package
{
Some((
ResolutionPackage {
name: name.clone(),
extra: extra.clone(),
dev: dev.clone(),
url: url.clone(),
},
FxHashSet::from_iter([version]),
))
} else {
None
}
})
.collect();
Resolution {
packages,
dependencies,
@ -1663,12 +1686,20 @@ impl SolveState {
#[derive(Debug, Default)]
pub(crate) struct Resolution {
pub(crate) packages: FxHashMap<PubGrubPackage, FxHashSet<Version>>,
pub(crate) packages: FxHashMap<ResolutionPackage, FxHashSet<Version>>,
pub(crate) dependencies:
FxHashMap<ResolutionDependencyNames, FxHashSet<ResolutionDependencyVersions>>,
pub(crate) pins: FilePins,
}
#[derive(Clone, Debug, Eq, Hash, PartialEq)]
pub(crate) struct ResolutionPackage {
pub(crate) name: PackageName,
pub(crate) extra: Option<ExtraName>,
pub(crate) dev: Option<GroupName>,
pub(crate) url: Option<VerbatimParsedUrl>,
}
#[derive(Clone, Debug, Eq, Hash, PartialEq)]
pub(crate) struct ResolutionDependencyNames {
pub(crate) from: PackageName,

View file

@ -166,7 +166,7 @@ pub(super) async fn do_lock(
let flat_index = {
let client = FlatIndexClient::new(&client, cache);
let entries = client.fetch(index_locations.flat_index()).await?;
FlatIndex::from_entries(entries, tags, &hasher, &no_build, &no_binary)
FlatIndex::from_entries(entries, None, &hasher, &no_build, &no_binary)
};
// If an existing lockfile exists, build up a set of preferences.

View file

@ -130,7 +130,7 @@ pub(super) async fn do_sync(
let flat_index = {
let client = FlatIndexClient::new(&client, cache);
let entries = client.fetch(index_locations.flat_index()).await?;
FlatIndex::from_entries(entries, tags, &hasher, &no_build, &no_binary)
FlatIndex::from_entries(entries, Some(tags), &hasher, &no_build, &no_binary)
};
// Create a build dispatch.