mirror of
https://github.com/astral-sh/uv.git
synced 2025-10-02 06:51:14 +00:00
uv-resolver: implement merging of forked resolutions
This commit is a pretty invasive change that implements the merging of resolutions created by each fork of the resolver. The main idea here is that each `SolveState` is converted into a `Resolution` (a new type) and stored on the heap after its fork completes. When all forks complete, they are all merged into a single `Resolution`. This `Resolution` is then used to build a `ResolutionGraph`. Construction of `ResolutionGraph` mostly stays the same (despite the gnarly diff due to an indent change) with one exception: the code to extract dependency edges out of PubGrub's state has been moved to `SolveState::into_resolution`. The idea here is that once a fork completes, we extract what we need from the PubGrub state and then throw it away. We store these edges in our own intermediate type which is then converted into petgraph edges in the `ResolutionGraph` constructor. One interesting change we make here is that our edge data is now a `Version` instead of a `Range<Version>`. I don't think `Range<Version>` was actually being used anywhere, so this seems okay? In any case, I think `Version` here is correct because a resolution corresponds to specific dependencies of each package. Moreover, I didn't see an easy way to make things work with `Range<Version>`. Notably, since we no longer have the guarantee that there is only one version of each package, we need to use `(PackageName, Version)` instead of just `PackageName` for inverted lookups in `ResolutionGraph::from_state`. Finally, the main resolver loop itself is changed a bit to track all forked resolutions and then merge them at the end. Note that we don't really have any dealings with markers in this commit. We'll get to that in a subsequent commit.
This commit is contained in:
parent
9e977aa1be
commit
f865406ab4
3 changed files with 324 additions and 227 deletions
|
@ -29,4 +29,15 @@ impl FilePins {
|
||||||
) -> Option<&ResolvedDist> {
|
) -> Option<&ResolvedDist> {
|
||||||
self.0.get(name)?.get(version)
|
self.0.get(name)?.get(version)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Add the pins in `other` to `self`.
|
||||||
|
///
|
||||||
|
/// This assumes that if a version for a particular package exists in
|
||||||
|
/// both `self` and `other`, then they will both correspond to identical
|
||||||
|
/// distributions.
|
||||||
|
pub(crate) fn union(&mut self, other: FilePins) {
|
||||||
|
for (name, versions) in other.0 {
|
||||||
|
self.0.entry(name).or_default().extend(versions);
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,23 +1,24 @@
|
||||||
use std::hash::BuildHasherDefault;
|
use std::hash::BuildHasherDefault;
|
||||||
|
|
||||||
use pubgrub::solver::{Kind, State};
|
|
||||||
use pubgrub::type_aliases::SelectedDependencies;
|
|
||||||
use rustc_hash::{FxHashMap, FxHashSet};
|
use rustc_hash::{FxHashMap, FxHashSet};
|
||||||
|
|
||||||
use distribution_types::{
|
use distribution_types::{
|
||||||
Dist, DistributionMetadata, Name, Requirement, ResolutionDiagnostic, VersionId, VersionOrUrlRef,
|
Dist, DistributionMetadata, Name, Requirement, ResolutionDiagnostic, VersionId, VersionOrUrlRef,
|
||||||
};
|
};
|
||||||
use pep440_rs::VersionSpecifier;
|
use pep440_rs::{Version, VersionSpecifier};
|
||||||
use pep508_rs::MarkerEnvironment;
|
use pep508_rs::MarkerEnvironment;
|
||||||
|
use petgraph::{
|
||||||
|
graph::{Graph, NodeIndex},
|
||||||
|
Directed,
|
||||||
|
};
|
||||||
use pypi_types::{ParsedUrlError, Yanked};
|
use pypi_types::{ParsedUrlError, Yanked};
|
||||||
use uv_normalize::PackageName;
|
use uv_normalize::{ExtraName, PackageName};
|
||||||
|
|
||||||
use crate::dependency_provider::UvDependencyProvider;
|
|
||||||
use crate::pins::FilePins;
|
|
||||||
use crate::preferences::Preferences;
|
use crate::preferences::Preferences;
|
||||||
use crate::pubgrub::{PubGrubDistribution, PubGrubPackageInner};
|
use crate::pubgrub::{PubGrubDistribution, PubGrubPackageInner};
|
||||||
use crate::redirect::url_to_precise;
|
use crate::redirect::url_to_precise;
|
||||||
use crate::resolution::AnnotatedDist;
|
use crate::resolution::AnnotatedDist;
|
||||||
|
use crate::resolver::Resolution;
|
||||||
use crate::{
|
use crate::{
|
||||||
lock, InMemoryIndex, Lock, LockError, Manifest, MetadataResponse, ResolveError,
|
lock, InMemoryIndex, Lock, LockError, Manifest, MetadataResponse, ResolveError,
|
||||||
VersionsResponse,
|
VersionsResponse,
|
||||||
|
@ -28,7 +29,7 @@ use crate::{
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub struct ResolutionGraph {
|
pub struct ResolutionGraph {
|
||||||
/// The underlying graph.
|
/// The underlying graph.
|
||||||
pub(crate) petgraph: petgraph::graph::Graph<AnnotatedDist, (), petgraph::Directed>,
|
pub(crate) petgraph: Graph<AnnotatedDist, Version, Directed>,
|
||||||
/// Any diagnostics that were encountered while building the graph.
|
/// Any diagnostics that were encountered while building the graph.
|
||||||
pub(crate) diagnostics: Vec<ResolutionDiagnostic>,
|
pub(crate) diagnostics: Vec<ResolutionDiagnostic>,
|
||||||
}
|
}
|
||||||
|
@ -38,250 +39,206 @@ impl ResolutionGraph {
|
||||||
#[allow(clippy::too_many_arguments)]
|
#[allow(clippy::too_many_arguments)]
|
||||||
pub(crate) fn from_state(
|
pub(crate) fn from_state(
|
||||||
index: &InMemoryIndex,
|
index: &InMemoryIndex,
|
||||||
selection: &SelectedDependencies<UvDependencyProvider>,
|
|
||||||
pins: &FilePins,
|
|
||||||
state: &State<UvDependencyProvider>,
|
|
||||||
preferences: &Preferences,
|
preferences: &Preferences,
|
||||||
|
resolution: Resolution,
|
||||||
) -> anyhow::Result<Self, ResolveError> {
|
) -> anyhow::Result<Self, ResolveError> {
|
||||||
// Add every package to the graph.
|
// Add every package to the graph.
|
||||||
let mut petgraph =
|
let mut petgraph: Graph<AnnotatedDist, Version, Directed> =
|
||||||
petgraph::graph::Graph::<AnnotatedDist, (), petgraph::Directed>::with_capacity(
|
Graph::with_capacity(resolution.packages.len(), resolution.packages.len());
|
||||||
selection.len(),
|
let mut inverse: FxHashMap<(&PackageName, &Version, &Option<ExtraName>), NodeIndex<u32>> =
|
||||||
selection.len(),
|
FxHashMap::with_capacity_and_hasher(
|
||||||
|
resolution.packages.len(),
|
||||||
|
BuildHasherDefault::default(),
|
||||||
);
|
);
|
||||||
let mut inverse =
|
|
||||||
FxHashMap::with_capacity_and_hasher(selection.len(), BuildHasherDefault::default());
|
|
||||||
let mut diagnostics = Vec::new();
|
let mut diagnostics = Vec::new();
|
||||||
|
|
||||||
for (package, version) in selection {
|
for (package, versions) in &resolution.packages {
|
||||||
match &**package {
|
for version in versions {
|
||||||
PubGrubPackageInner::Package {
|
match &**package {
|
||||||
name,
|
PubGrubPackageInner::Package {
|
||||||
extra,
|
name,
|
||||||
marker: None,
|
extra,
|
||||||
url: None,
|
marker: None,
|
||||||
} => {
|
url: None,
|
||||||
// Create the distribution.
|
} => {
|
||||||
let dist = pins
|
// Create the distribution.
|
||||||
.get(name, version)
|
let dist = resolution
|
||||||
.expect("Every package should be pinned")
|
.pins
|
||||||
.clone();
|
.get(name, version)
|
||||||
|
.expect("Every package should be pinned")
|
||||||
|
.clone();
|
||||||
|
|
||||||
// Track yanks for any registry distributions.
|
// Track yanks for any registry distributions.
|
||||||
match dist.yanked() {
|
match dist.yanked() {
|
||||||
None | Some(Yanked::Bool(false)) => {}
|
None | Some(Yanked::Bool(false)) => {}
|
||||||
Some(Yanked::Bool(true)) => {
|
Some(Yanked::Bool(true)) => {
|
||||||
diagnostics.push(ResolutionDiagnostic::YankedVersion {
|
diagnostics.push(ResolutionDiagnostic::YankedVersion {
|
||||||
dist: dist.clone(),
|
dist: dist.clone(),
|
||||||
reason: None,
|
reason: None,
|
||||||
});
|
});
|
||||||
|
}
|
||||||
|
Some(Yanked::Reason(reason)) => {
|
||||||
|
diagnostics.push(ResolutionDiagnostic::YankedVersion {
|
||||||
|
dist: dist.clone(),
|
||||||
|
reason: Some(reason.clone()),
|
||||||
|
});
|
||||||
|
}
|
||||||
}
|
}
|
||||||
Some(Yanked::Reason(reason)) => {
|
|
||||||
diagnostics.push(ResolutionDiagnostic::YankedVersion {
|
|
||||||
dist: dist.clone(),
|
|
||||||
reason: Some(reason.clone()),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Extract the hashes, preserving those that were already present in the
|
// Extract the hashes, preserving those that were already present in the
|
||||||
// lockfile if necessary.
|
// lockfile if necessary.
|
||||||
let hashes = if let Some(digests) = preferences
|
let hashes = if let Some(digests) = preferences
|
||||||
.match_hashes(name, version)
|
.match_hashes(name, version)
|
||||||
.filter(|digests| !digests.is_empty())
|
.filter(|digests| !digests.is_empty())
|
||||||
{
|
{
|
||||||
digests.to_vec()
|
digests.to_vec()
|
||||||
} else if let Some(versions_response) = index.packages().get(name) {
|
} else if let Some(versions_response) = index.packages().get(name) {
|
||||||
if let VersionsResponse::Found(ref version_maps) = *versions_response {
|
if let VersionsResponse::Found(ref version_maps) = *versions_response {
|
||||||
version_maps
|
version_maps
|
||||||
.iter()
|
.iter()
|
||||||
.find_map(|version_map| version_map.hashes(version))
|
.find_map(|version_map| version_map.hashes(version))
|
||||||
.map(|mut digests| {
|
.map(|mut digests| {
|
||||||
digests.sort_unstable();
|
digests.sort_unstable();
|
||||||
digests
|
digests
|
||||||
})
|
})
|
||||||
.unwrap_or_default()
|
.unwrap_or_default()
|
||||||
|
} else {
|
||||||
|
vec![]
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
vec![]
|
vec![]
|
||||||
}
|
};
|
||||||
} else {
|
|
||||||
vec![]
|
|
||||||
};
|
|
||||||
|
|
||||||
// Extract the metadata.
|
// Extract the metadata.
|
||||||
let metadata = {
|
let metadata = {
|
||||||
let dist = PubGrubDistribution::from_registry(name, version);
|
let dist = PubGrubDistribution::from_registry(name, version);
|
||||||
|
|
||||||
let response = index
|
let response = index
|
||||||
.distributions()
|
.distributions()
|
||||||
.get(&dist.version_id())
|
.get(&dist.version_id())
|
||||||
.unwrap_or_else(|| {
|
.unwrap_or_else(|| {
|
||||||
|
panic!(
|
||||||
|
"Every package should have metadata: {:?}",
|
||||||
|
dist.version_id()
|
||||||
|
)
|
||||||
|
});
|
||||||
|
|
||||||
|
let MetadataResponse::Found(archive) = &*response else {
|
||||||
panic!(
|
panic!(
|
||||||
"Every package should have metadata: {:?}",
|
"Every package should have metadata: {:?}",
|
||||||
dist.version_id()
|
dist.version_id()
|
||||||
)
|
)
|
||||||
});
|
};
|
||||||
|
|
||||||
let MetadataResponse::Found(archive) = &*response else {
|
archive.metadata.clone()
|
||||||
panic!(
|
|
||||||
"Every package should have metadata: {:?}",
|
|
||||||
dist.version_id()
|
|
||||||
)
|
|
||||||
};
|
};
|
||||||
|
|
||||||
archive.metadata.clone()
|
// Validate the extra.
|
||||||
};
|
if let Some(extra) = extra {
|
||||||
|
if !metadata.provides_extras.contains(extra) {
|
||||||
// Validate the extra.
|
diagnostics.push(ResolutionDiagnostic::MissingExtra {
|
||||||
if let Some(extra) = extra {
|
dist: dist.clone(),
|
||||||
if !metadata.provides_extras.contains(extra) {
|
extra: extra.clone(),
|
||||||
diagnostics.push(ResolutionDiagnostic::MissingExtra {
|
});
|
||||||
dist: dist.clone(),
|
}
|
||||||
extra: extra.clone(),
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Add the distribution to the graph.
|
||||||
|
let index = petgraph.add_node(AnnotatedDist {
|
||||||
|
dist,
|
||||||
|
extra: extra.clone(),
|
||||||
|
hashes,
|
||||||
|
metadata,
|
||||||
|
});
|
||||||
|
inverse.insert((name, version, extra), index);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Add the distribution to the graph.
|
PubGrubPackageInner::Package {
|
||||||
let index = petgraph.add_node(AnnotatedDist {
|
name,
|
||||||
dist,
|
extra,
|
||||||
extra: extra.clone(),
|
marker: None,
|
||||||
hashes,
|
url: Some(url),
|
||||||
metadata,
|
} => {
|
||||||
});
|
// Create the distribution.
|
||||||
inverse.insert((name, extra), index);
|
let dist = Dist::from_url(name.clone(), url_to_precise(url.clone()))?;
|
||||||
}
|
|
||||||
|
|
||||||
PubGrubPackageInner::Package {
|
// Extract the hashes, preserving those that were already present in the
|
||||||
name,
|
// lockfile if necessary.
|
||||||
extra,
|
let hashes = if let Some(digests) = preferences
|
||||||
marker: None,
|
.match_hashes(name, version)
|
||||||
url: Some(url),
|
.filter(|digests| !digests.is_empty())
|
||||||
} => {
|
{
|
||||||
// Create the distribution.
|
digests.to_vec()
|
||||||
let dist = Dist::from_url(name.clone(), url_to_precise(url.clone()))?;
|
} else if let Some(metadata_response) =
|
||||||
|
index.distributions().get(&dist.version_id())
|
||||||
// Extract the hashes, preserving those that were already present in the
|
{
|
||||||
// lockfile if necessary.
|
if let MetadataResponse::Found(ref archive) = *metadata_response {
|
||||||
let hashes = if let Some(digests) = preferences
|
let mut digests = archive.hashes.clone();
|
||||||
.match_hashes(name, version)
|
digests.sort_unstable();
|
||||||
.filter(|digests| !digests.is_empty())
|
digests
|
||||||
{
|
} else {
|
||||||
digests.to_vec()
|
vec![]
|
||||||
} else if let Some(metadata_response) =
|
}
|
||||||
index.distributions().get(&dist.version_id())
|
|
||||||
{
|
|
||||||
if let MetadataResponse::Found(ref archive) = *metadata_response {
|
|
||||||
let mut digests = archive.hashes.clone();
|
|
||||||
digests.sort_unstable();
|
|
||||||
digests
|
|
||||||
} else {
|
} else {
|
||||||
vec![]
|
vec![]
|
||||||
}
|
};
|
||||||
} else {
|
|
||||||
vec![]
|
|
||||||
};
|
|
||||||
|
|
||||||
// Extract the metadata.
|
// Extract the metadata.
|
||||||
let metadata = {
|
let metadata = {
|
||||||
let dist = PubGrubDistribution::from_url(name, url);
|
let dist = PubGrubDistribution::from_url(name, url);
|
||||||
|
|
||||||
let response = index
|
let response = index
|
||||||
.distributions()
|
.distributions()
|
||||||
.get(&dist.version_id())
|
.get(&dist.version_id())
|
||||||
.unwrap_or_else(|| {
|
.unwrap_or_else(|| {
|
||||||
|
panic!(
|
||||||
|
"Every package should have metadata: {:?}",
|
||||||
|
dist.version_id()
|
||||||
|
)
|
||||||
|
});
|
||||||
|
|
||||||
|
let MetadataResponse::Found(archive) = &*response else {
|
||||||
panic!(
|
panic!(
|
||||||
"Every package should have metadata: {:?}",
|
"Every package should have metadata: {:?}",
|
||||||
dist.version_id()
|
dist.version_id()
|
||||||
)
|
)
|
||||||
});
|
};
|
||||||
|
|
||||||
let MetadataResponse::Found(archive) = &*response else {
|
archive.metadata.clone()
|
||||||
panic!(
|
|
||||||
"Every package should have metadata: {:?}",
|
|
||||||
dist.version_id()
|
|
||||||
)
|
|
||||||
};
|
};
|
||||||
|
|
||||||
archive.metadata.clone()
|
// Validate the extra.
|
||||||
};
|
if let Some(extra) = extra {
|
||||||
|
if !metadata.provides_extras.contains(extra) {
|
||||||
// Validate the extra.
|
diagnostics.push(ResolutionDiagnostic::MissingExtra {
|
||||||
if let Some(extra) = extra {
|
dist: dist.clone().into(),
|
||||||
if !metadata.provides_extras.contains(extra) {
|
extra: extra.clone(),
|
||||||
diagnostics.push(ResolutionDiagnostic::MissingExtra {
|
});
|
||||||
dist: dist.clone().into(),
|
}
|
||||||
extra: extra.clone(),
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Add the distribution to the graph.
|
||||||
|
let index = petgraph.add_node(AnnotatedDist {
|
||||||
|
dist: dist.into(),
|
||||||
|
extra: extra.clone(),
|
||||||
|
hashes,
|
||||||
|
metadata,
|
||||||
|
});
|
||||||
|
inverse.insert((name, version, extra), index);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Add the distribution to the graph.
|
_ => {}
|
||||||
let index = petgraph.add_node(AnnotatedDist {
|
};
|
||||||
dist: dist.into(),
|
}
|
||||||
extra: extra.clone(),
|
|
||||||
hashes,
|
|
||||||
metadata,
|
|
||||||
});
|
|
||||||
inverse.insert((name, extra), index);
|
|
||||||
}
|
|
||||||
|
|
||||||
_ => {}
|
|
||||||
};
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Add every edge to the graph.
|
// Add every edge to the graph.
|
||||||
for (package, version) in selection {
|
for (names, version_set) in resolution.dependencies {
|
||||||
for id in &state.incompatibilities[package] {
|
for versions in version_set {
|
||||||
if let Kind::FromDependencyOf(self_package, self_version, dependency_package, ..) =
|
let from_index =
|
||||||
&state.incompatibility_store[*id].kind
|
inverse[&(&names.from, &versions.from_version, &versions.from_extra)];
|
||||||
{
|
let to_index = inverse[&(&names.to, &versions.to_version, &versions.to_extra)];
|
||||||
// `Kind::FromDependencyOf` will include inverse dependencies. That is, if we're
|
petgraph.update_edge(from_index, to_index, versions.to_version.clone());
|
||||||
// looking for a package `A`, this list will include incompatibilities of
|
|
||||||
// package `B` _depending on_ `A`. We're only interested in packages that `A`
|
|
||||||
// depends on.
|
|
||||||
if package != self_package {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
if !self_version.contains(version) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
let PubGrubPackageInner::Package {
|
|
||||||
name: self_name,
|
|
||||||
extra: self_extra,
|
|
||||||
..
|
|
||||||
} = &**self_package
|
|
||||||
else {
|
|
||||||
continue;
|
|
||||||
};
|
|
||||||
|
|
||||||
match &**dependency_package {
|
|
||||||
PubGrubPackageInner::Package {
|
|
||||||
name: dependency_name,
|
|
||||||
extra: dependency_extra,
|
|
||||||
..
|
|
||||||
} => {
|
|
||||||
let self_index = &inverse[&(self_name, self_extra)];
|
|
||||||
let dependency_index = &inverse[&(dependency_name, dependency_extra)];
|
|
||||||
petgraph.update_edge(*self_index, *dependency_index, ());
|
|
||||||
}
|
|
||||||
|
|
||||||
PubGrubPackageInner::Extra {
|
|
||||||
name: dependency_name,
|
|
||||||
extra: dependency_extra,
|
|
||||||
..
|
|
||||||
} => {
|
|
||||||
let self_index = &inverse[&(self_name, self_extra)];
|
|
||||||
let dependency_extra = Some(dependency_extra.clone());
|
|
||||||
let dependency_index = &inverse[&(dependency_name, &dependency_extra)];
|
|
||||||
petgraph.update_edge(*self_index, *dependency_index, ());
|
|
||||||
}
|
|
||||||
|
|
||||||
_ => {}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -462,8 +419,8 @@ impl ResolutionGraph {
|
||||||
for node_index in self.petgraph.node_indices() {
|
for node_index in self.petgraph.node_indices() {
|
||||||
let dist = &self.petgraph[node_index];
|
let dist = &self.petgraph[node_index];
|
||||||
let mut locked_dist = lock::Distribution::from_annotated_dist(dist)?;
|
let mut locked_dist = lock::Distribution::from_annotated_dist(dist)?;
|
||||||
for edge in self.petgraph.neighbors(node_index) {
|
for neighbor in self.petgraph.neighbors(node_index) {
|
||||||
let dependency_dist = &self.petgraph[edge];
|
let dependency_dist = &self.petgraph[neighbor];
|
||||||
locked_dist.add_dependency(dependency_dist);
|
locked_dist.add_dependency(dependency_dist);
|
||||||
}
|
}
|
||||||
locked_dists.push(locked_dist);
|
locked_dists.push(locked_dist);
|
||||||
|
|
|
@ -32,7 +32,7 @@ use pypi_types::Metadata23;
|
||||||
pub(crate) use urls::Urls;
|
pub(crate) use urls::Urls;
|
||||||
use uv_configuration::{Constraints, Overrides};
|
use uv_configuration::{Constraints, Overrides};
|
||||||
use uv_distribution::{ArchiveMetadata, DistributionDatabase};
|
use uv_distribution::{ArchiveMetadata, DistributionDatabase};
|
||||||
use uv_normalize::PackageName;
|
use uv_normalize::{ExtraName, PackageName};
|
||||||
use uv_types::{BuildContext, HashStrategy, InstalledPackagesProvider};
|
use uv_types::{BuildContext, HashStrategy, InstalledPackagesProvider};
|
||||||
|
|
||||||
use crate::candidate_selector::{CandidateDist, CandidateSelector};
|
use crate::candidate_selector::{CandidateDist, CandidateSelector};
|
||||||
|
@ -315,7 +315,7 @@ impl<InstalledPackages: InstalledPackagesProvider> ResolverState<InstalledPackag
|
||||||
'FORK: while let Some(mut state) = forked_states.pop() {
|
'FORK: while let Some(mut state) = forked_states.pop() {
|
||||||
loop {
|
loop {
|
||||||
// Run unit propagation.
|
// Run unit propagation.
|
||||||
state.pubgrub.unit_propagation(state.next)?;
|
state.pubgrub.unit_propagation(state.next.clone())?;
|
||||||
|
|
||||||
// Pre-visit all candidate packages, to allow metadata to be fetched in parallel. If
|
// Pre-visit all candidate packages, to allow metadata to be fetched in parallel. If
|
||||||
// the dependency mode is direct, we only need to visit the root package.
|
// the dependency mode is direct, we only need to visit the root package.
|
||||||
|
@ -335,14 +335,7 @@ impl<InstalledPackages: InstalledPackagesProvider> ResolverState<InstalledPackag
|
||||||
if enabled!(Level::DEBUG) {
|
if enabled!(Level::DEBUG) {
|
||||||
prefetcher.log_tried_versions();
|
prefetcher.log_tried_versions();
|
||||||
}
|
}
|
||||||
let selection = state.pubgrub.partial_solution.extract_solution();
|
resolutions.push(state.into_resolution());
|
||||||
resolutions.push(ResolutionGraph::from_state(
|
|
||||||
&self.index,
|
|
||||||
&selection,
|
|
||||||
&state.pins,
|
|
||||||
&state.pubgrub,
|
|
||||||
&self.preferences,
|
|
||||||
)?);
|
|
||||||
continue 'FORK;
|
continue 'FORK;
|
||||||
};
|
};
|
||||||
state.next = highest_priority_pkg;
|
state.next = highest_priority_pkg;
|
||||||
|
@ -534,11 +527,11 @@ impl<InstalledPackages: InstalledPackagesProvider> ResolverState<InstalledPackag
|
||||||
.add_decision(state.next.clone(), version);
|
.add_decision(state.next.clone(), version);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// This unwrap is okay because every code path above leads to at least
|
let mut combined = Resolution::default();
|
||||||
// one resolution being pushed.
|
for resolution in resolutions {
|
||||||
//
|
combined.union(resolution);
|
||||||
// TODO: Implement merging of resolutions.
|
}
|
||||||
Ok(resolutions.pop().unwrap())
|
ResolutionGraph::from_state(&self.index, &self.preferences, combined)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Visit a [`PubGrubPackage`] prior to selection. This should be called on a [`PubGrubPackage`]
|
/// Visit a [`PubGrubPackage`] prior to selection. This should be called on a [`PubGrubPackage`]
|
||||||
|
@ -1365,6 +1358,142 @@ struct SolveState {
|
||||||
added_dependencies: FxHashMap<PubGrubPackage, FxHashSet<Version>>,
|
added_dependencies: FxHashMap<PubGrubPackage, FxHashSet<Version>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl SolveState {
|
||||||
|
fn into_resolution(self) -> Resolution {
|
||||||
|
let packages = self.pubgrub.partial_solution.extract_solution();
|
||||||
|
let mut dependencies: FxHashMap<
|
||||||
|
ResolutionDependencyNames,
|
||||||
|
FxHashSet<ResolutionDependencyVersions>,
|
||||||
|
> = FxHashMap::default();
|
||||||
|
for (package, self_version) in &packages {
|
||||||
|
for id in &self.pubgrub.incompatibilities[package] {
|
||||||
|
let pubgrub::solver::Kind::FromDependencyOf(
|
||||||
|
ref self_package,
|
||||||
|
ref self_range,
|
||||||
|
ref dependency_package,
|
||||||
|
ref dependency_range,
|
||||||
|
) = self.pubgrub.incompatibility_store[*id].kind
|
||||||
|
else {
|
||||||
|
continue;
|
||||||
|
};
|
||||||
|
if package != self_package {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
if !self_range.contains(self_version) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
let Some(dependency_version) = packages.get(dependency_package) else {
|
||||||
|
continue;
|
||||||
|
};
|
||||||
|
if !dependency_range.contains(dependency_version) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
let PubGrubPackageInner::Package {
|
||||||
|
name: ref self_name,
|
||||||
|
extra: ref self_extra,
|
||||||
|
..
|
||||||
|
} = &**self_package
|
||||||
|
else {
|
||||||
|
continue;
|
||||||
|
};
|
||||||
|
|
||||||
|
match **dependency_package {
|
||||||
|
PubGrubPackageInner::Package {
|
||||||
|
name: ref dependency_name,
|
||||||
|
extra: ref dependency_extra,
|
||||||
|
..
|
||||||
|
} => {
|
||||||
|
if self_name == dependency_name {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
let names = ResolutionDependencyNames {
|
||||||
|
from: self_name.clone(),
|
||||||
|
to: dependency_name.clone(),
|
||||||
|
};
|
||||||
|
let versions = ResolutionDependencyVersions {
|
||||||
|
from_version: self_version.clone(),
|
||||||
|
from_extra: self_extra.clone(),
|
||||||
|
to_version: dependency_version.clone(),
|
||||||
|
to_extra: dependency_extra.clone(),
|
||||||
|
};
|
||||||
|
dependencies.entry(names).or_default().insert(versions);
|
||||||
|
}
|
||||||
|
|
||||||
|
PubGrubPackageInner::Extra {
|
||||||
|
name: ref dependency_name,
|
||||||
|
extra: ref dependency_extra,
|
||||||
|
..
|
||||||
|
} => {
|
||||||
|
if self_name == dependency_name {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
let names = ResolutionDependencyNames {
|
||||||
|
from: self_name.clone(),
|
||||||
|
to: dependency_name.clone(),
|
||||||
|
};
|
||||||
|
let versions = ResolutionDependencyVersions {
|
||||||
|
from_version: self_version.clone(),
|
||||||
|
from_extra: self_extra.clone(),
|
||||||
|
to_version: dependency_version.clone(),
|
||||||
|
to_extra: Some(dependency_extra.clone()),
|
||||||
|
};
|
||||||
|
dependencies.entry(names).or_default().insert(versions);
|
||||||
|
}
|
||||||
|
|
||||||
|
_ => {}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
let packages = packages
|
||||||
|
.into_iter()
|
||||||
|
.map(|(package, version)| (package, FxHashSet::from_iter([version])))
|
||||||
|
.collect();
|
||||||
|
Resolution {
|
||||||
|
packages,
|
||||||
|
dependencies,
|
||||||
|
pins: self.pins,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Default)]
|
||||||
|
pub(crate) struct Resolution {
|
||||||
|
pub(crate) packages: FxHashMap<PubGrubPackage, FxHashSet<Version>>,
|
||||||
|
pub(crate) dependencies:
|
||||||
|
FxHashMap<ResolutionDependencyNames, FxHashSet<ResolutionDependencyVersions>>,
|
||||||
|
pub(crate) pins: FilePins,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, Eq, Hash, PartialEq)]
|
||||||
|
pub(crate) struct ResolutionDependencyNames {
|
||||||
|
pub(crate) from: PackageName,
|
||||||
|
pub(crate) to: PackageName,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, Eq, Hash, PartialEq)]
|
||||||
|
pub(crate) struct ResolutionDependencyVersions {
|
||||||
|
pub(crate) from_version: Version,
|
||||||
|
pub(crate) from_extra: Option<ExtraName>,
|
||||||
|
pub(crate) to_version: Version,
|
||||||
|
pub(crate) to_extra: Option<ExtraName>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Resolution {
|
||||||
|
fn union(&mut self, other: Resolution) {
|
||||||
|
for (other_package, other_versions) in other.packages {
|
||||||
|
self.packages
|
||||||
|
.entry(other_package)
|
||||||
|
.or_default()
|
||||||
|
.extend(other_versions);
|
||||||
|
}
|
||||||
|
for (names, versions) in other.dependencies {
|
||||||
|
self.dependencies.entry(names).or_default().extend(versions);
|
||||||
|
}
|
||||||
|
self.pins.union(other.pins);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// Fetch the metadata for an item
|
/// Fetch the metadata for an item
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
#[allow(clippy::large_enum_variant)]
|
#[allow(clippy::large_enum_variant)]
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue