Use lockfile to prefill resolver index (#4495)

## Summary

Use the lockfile to prefill the `InMemoryIndex` used by the resolver.
This enables us to resolve completely from the lockfile without making
any network requests/builds if the requirements are unchanged. It also
means that if new requirements are added we can still avoid most I/O
during resolution, partially addressing
https://github.com/astral-sh/uv/issues/3925.

The main limitation of this PR is that resolution from the lockfile can
fail if new versions are requested that are not present in the lockfile,
in which case we have to perform a fresh resolution. Fixing this would
likely require lazy version/metadata requests by `VersionMap` (this is
different from the lazy parsing we do, the list of versions in a
`VersionMap` is currently immutable).

Resolves https://github.com/astral-sh/uv/issues/3892.

## Test Plan

Added a `deterministic!` macro that ensures that a resolve from the
lockfile and a clean resolve result in the same lockfile output for all
our current tests.
This commit is contained in:
Ibraheem Ahmed 2024-07-12 18:49:28 -04:00 committed by GitHub
parent df2ee8ad14
commit ba217f1059
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
13 changed files with 2578 additions and 1798 deletions

View file

@ -214,7 +214,7 @@ impl PrioritizedDist {
pub fn insert_built(
&mut self,
dist: RegistryBuiltWheel,
hashes: Vec<HashDigest>,
hashes: impl IntoIterator<Item = HashDigest>,
compatibility: WheelCompatibility,
) {
// Track the highest-priority wheel.
@ -233,7 +233,7 @@ impl PrioritizedDist {
pub fn insert_source(
&mut self,
dist: RegistrySourceDist,
hashes: Vec<HashDigest>,
hashes: impl IntoIterator<Item = HashDigest>,
compatibility: SourceDistCompatibility,
) {
// Track the highest-priority source.

View file

@ -20,6 +20,13 @@ pub struct OnceMap<K, V, H = RandomState> {
}
impl<K: Eq + Hash, V: Clone, H: BuildHasher + Clone> OnceMap<K, V, H> {
// Create a [`OnceMap`] with the specified capacity and hasher.
pub fn with_capacity_and_hasher(capacity: usize, hasher: H) -> OnceMap<K, V, H> {
OnceMap {
items: DashMap::with_capacity_and_hasher(capacity, hasher),
}
}
/// Register that you want to start a job.
///
/// If this method returns `true`, you need to start a job and call [`OnceMap::done`] eventually
@ -101,6 +108,21 @@ impl<K: Eq + Hash + Clone, V, H: Default + BuildHasher + Clone> Default for Once
}
}
impl<K, V, H> FromIterator<(K, V)> for OnceMap<K, V, H>
where
K: Eq + Hash,
H: Default + Clone + BuildHasher,
{
fn from_iter<T: IntoIterator<Item = (K, V)>>(iter: T) -> Self {
OnceMap {
items: iter
.into_iter()
.map(|(k, v)| (k, Value::Filled(v)))
.collect(),
}
}
}
enum Value<V> {
Waiting(Arc<Notify>),
Filled(V),

View file

@ -107,6 +107,11 @@ impl ArchiveMetadata {
hashes: vec![],
}
}
/// Create an [`ArchiveMetadata`] with the given metadata and hashes.
pub fn with_hashes(metadata: Metadata, hashes: Vec<HashDigest>) -> Self {
Self { metadata, hashes }
}
}
impl From<Metadata> for ArchiveMetadata {

View file

@ -1,8 +1,12 @@
#![allow(clippy::default_trait_access)]
use std::borrow::Cow;
use std::collections::{BTreeMap, VecDeque};
use std::fmt::{Debug, Display};
use std::iter;
use std::path::{Path, PathBuf};
use std::str::FromStr;
use std::sync::Arc;
use either::Either;
use path_slash::PathExt;
@ -15,9 +19,11 @@ use url::Url;
use cache_key::RepositoryUrl;
use distribution_filename::WheelFilename;
use distribution_types::{
BuiltDist, DirectUrlBuiltDist, DirectUrlSourceDist, DirectorySourceDist, Dist, FileLocation,
GitSourceDist, IndexUrl, PathBuiltDist, PathSourceDist, RegistryBuiltDist, RegistryBuiltWheel,
RegistrySourceDist, RemoteSource, Resolution, ResolvedDist, ToUrlError, UrlString,
BuiltDist, DirectUrlBuiltDist, DirectUrlSourceDist, DirectorySourceDist, Dist,
DistributionMetadata, FileLocation, GitSourceDist, HashComparison, IndexUrl, PathBuiltDist,
PathSourceDist, PrioritizedDist, RegistryBuiltDist, RegistryBuiltWheel, RegistrySourceDist,
RemoteSource, Resolution, ResolvedDist, SourceDistCompatibility, ToUrlError, UrlString,
VersionId, WheelCompatibility,
};
use pep440_rs::{Version, VersionSpecifiers};
use pep508_rs::{
@ -27,13 +33,16 @@ use platform_tags::{TagCompatibility, TagPriority, Tags};
use pypi_types::{
HashDigest, ParsedArchiveUrl, ParsedGitUrl, ParsedUrl, Requirement, RequirementSource,
};
use uv_configuration::ExtrasSpecification;
use uv_distribution::{Metadata, VirtualProject};
use uv_configuration::{ExtrasSpecification, Upgrade};
use uv_distribution::{ArchiveMetadata, Metadata, VirtualProject};
use uv_git::{GitReference, GitSha, RepositoryReference, ResolvedRepositoryReference};
use uv_normalize::{ExtraName, GroupName, PackageName};
use crate::resolution::{AnnotatedDist, ResolutionGraphNode};
use crate::{RequiresPython, ResolutionGraph};
use crate::resolver::FxOnceMap;
use crate::{
InMemoryIndex, MetadataResponse, RequiresPython, ResolutionGraph, VersionMap, VersionsResponse,
};
/// The current version of the lock file format.
const VERSION: u32 = 1;
@ -459,6 +468,64 @@ impl Lock {
.expect("valid index for distribution");
dist
}
/// Convert the [`Lock`] to a [`InMemoryIndex`] that can be used for resolution.
///
/// Any packages specified to be upgraded will be ignored.
pub fn to_index(
&self,
install_path: &Path,
upgrade: &Upgrade,
) -> Result<InMemoryIndex, LockError> {
let distributions =
FxOnceMap::with_capacity_and_hasher(self.distributions.len(), Default::default());
let mut packages: FxHashMap<_, BTreeMap<Version, PrioritizedDist>> =
FxHashMap::with_capacity_and_hasher(self.distributions.len(), Default::default());
for distribution in &self.distributions {
// Skip packages that may be upgraded from their pinned version.
if upgrade.contains(distribution.name()) {
continue;
}
match distribution.id.source {
Source::Registry(..) | Source::Git(..) => {}
// Skip local and direct URL dependencies, as their metadata may have been mutated
// without a version change.
Source::Path(..)
| Source::Directory(..)
| Source::Editable(..)
| Source::Direct(..) => continue,
}
// Add registry distributions to the package index.
if let Some(prioritized_dist) = distribution.to_prioritized_dist(install_path)? {
packages
.entry(distribution.name().clone())
.or_default()
.insert(distribution.id.version.clone(), prioritized_dist);
}
// Extract the distribution metadata.
let version_id = distribution.version_id(install_path)?;
let hashes = distribution.hashes();
let metadata = distribution.to_metadata(install_path)?;
// Add metadata to the distributions index.
let response = MetadataResponse::Found(ArchiveMetadata::with_hashes(metadata, hashes));
distributions.done(version_id, Arc::new(response));
}
let packages = packages
.into_iter()
.map(|(name, versions)| {
let response = VersionsResponse::Found(vec![VersionMap::from(versions)]);
(name, Arc::new(response))
})
.collect();
Ok(InMemoryIndex::with(packages, distributions))
}
}
#[derive(Clone, Debug, serde::Deserialize)]
@ -630,7 +697,25 @@ impl Distribution {
};
}
match &self.id.source {
if let Some(sdist) = self.to_source_dist(workspace_root)? {
return Ok(Dist::Source(sdist));
}
Err(LockErrorKind::NeitherSourceDistNorWheel {
id: self.id.clone(),
}
.into())
}
/// Convert the source of this [`Distribution`] to a [`SourceDist`] that can be used in installation.
///
/// Returns `Ok(None)` if the source cannot be converted because `self.sdist` is `None`. This is required
/// for registry sources.
fn to_source_dist(
&self,
workspace_root: &Path,
) -> Result<Option<distribution_types::SourceDist>, LockError> {
let sdist = match &self.id.source {
Source::Path(path) => {
let path_dist = PathSourceDist {
name: self.id.name.clone(),
@ -638,8 +723,7 @@ impl Distribution {
install_path: workspace_root.join(path),
lock_path: path.clone(),
};
let source_dist = distribution_types::SourceDist::Path(path_dist);
return Ok(Dist::Source(source_dist));
distribution_types::SourceDist::Path(path_dist)
}
Source::Directory(path) => {
let dir_dist = DirectorySourceDist {
@ -649,8 +733,7 @@ impl Distribution {
lock_path: path.clone(),
editable: false,
};
let source_dist = distribution_types::SourceDist::Directory(dir_dist);
return Ok(Dist::Source(source_dist));
distribution_types::SourceDist::Directory(dir_dist)
}
Source::Editable(path) => {
let dir_dist = DirectorySourceDist {
@ -660,8 +743,7 @@ impl Distribution {
lock_path: path.clone(),
editable: true,
};
let source_dist = distribution_types::SourceDist::Directory(dir_dist);
return Ok(Dist::Source(source_dist));
distribution_types::SourceDist::Directory(dir_dist)
}
Source::Git(url, git) => {
// Reconstruct the `GitUrl` from the `GitSource`.
@ -681,8 +763,7 @@ impl Distribution {
git: Box::new(git_url),
subdirectory: git.subdirectory.as_ref().map(PathBuf::from),
};
let source_dist = distribution_types::SourceDist::Git(git_dist);
return Ok(Dist::Source(source_dist));
distribution_types::SourceDist::Git(git_dist)
}
Source::Direct(url, direct) => {
let url = Url::from(ParsedArchiveUrl {
@ -695,52 +776,86 @@ impl Distribution {
subdirectory: direct.subdirectory.as_ref().map(PathBuf::from),
url: VerbatimUrl::from_url(url),
};
let source_dist = distribution_types::SourceDist::DirectUrl(direct_dist);
return Ok(Dist::Source(source_dist));
distribution_types::SourceDist::DirectUrl(direct_dist)
}
Source::Registry(url) => {
if let Some(ref sdist) = self.sdist {
let file_url = sdist.url().ok_or_else(|| LockErrorKind::MissingUrl {
let Some(ref sdist) = self.sdist else {
return Ok(None);
};
let file_url = sdist.url().ok_or_else(|| LockErrorKind::MissingUrl {
id: self.id.clone(),
})?;
let filename = sdist
.filename()
.ok_or_else(|| LockErrorKind::MissingFilename {
id: self.id.clone(),
})?;
let filename =
sdist
.filename()
.ok_or_else(|| LockErrorKind::MissingFilename {
id: self.id.clone(),
})?;
let file = Box::new(distribution_types::File {
dist_info_metadata: false,
filename: filename.to_string(),
hashes: vec![],
requires_python: None,
size: sdist.size(),
upload_time_utc_ms: None,
url: FileLocation::AbsoluteUrl(file_url.clone().into()),
yanked: None,
});
let index = IndexUrl::Url(VerbatimUrl::from_url(url.clone()));
let reg_dist = RegistrySourceDist {
name: self.id.name.clone(),
version: self.id.version.clone(),
file,
index,
wheels: vec![],
};
let source_dist = distribution_types::SourceDist::Registry(reg_dist);
return Ok(Dist::Source(source_dist));
}
}
}
let file = Box::new(distribution_types::File {
dist_info_metadata: false,
filename: filename.to_string(),
hashes: vec![sdist.hash().0.clone()],
requires_python: None,
size: sdist.size(),
upload_time_utc_ms: None,
url: FileLocation::AbsoluteUrl(file_url.clone().into()),
yanked: None,
});
let index = IndexUrl::Url(VerbatimUrl::from_url(url.clone()));
Err(LockErrorKind::NeitherSourceDistNorWheel {
id: self.id.clone(),
}
.into())
let reg_dist = RegistrySourceDist {
name: self.id.name.clone(),
version: self.id.version.clone(),
file,
index,
wheels: vec![],
};
distribution_types::SourceDist::Registry(reg_dist)
}
};
Ok(Some(sdist))
}
/// Convert the [`Distribution`] to a [`PrioritizedDist`] that can be used for resolution, if
/// it has a registry source.
fn to_prioritized_dist(
&self,
workspace_root: &Path,
) -> Result<Option<PrioritizedDist>, LockError> {
let prioritized_dist = match &self.id.source {
Source::Registry(url) => {
let mut prioritized_dist = PrioritizedDist::default();
// Add the source distribution.
if let Some(distribution_types::SourceDist::Registry(sdist)) =
self.to_source_dist(workspace_root)?
{
// When resolving from a lockfile all sources are equally compatible.
let compat = SourceDistCompatibility::Compatible(HashComparison::Matched);
let hash = self.sdist.as_ref().unwrap().hash().0.clone();
prioritized_dist.insert_source(sdist, iter::once(hash), compat);
};
// Add any wheels.
for wheel in &self.wheels {
let hash = wheel.hash.as_ref().map(|h| h.0.clone());
let wheel = wheel.to_registry_dist(url);
let compat =
WheelCompatibility::Compatible(HashComparison::Matched, None, None);
prioritized_dist.insert_built(wheel, hash, compat);
}
prioritized_dist
}
_ => return Ok(None),
};
Ok(Some(prioritized_dist))
}
/// Convert the [`Distribution`] to [`Metadata`] that can be used for resolution.
pub fn into_metadata(self, workspace_root: &Path) -> Result<Metadata, LockError> {
pub fn to_metadata(&self, workspace_root: &Path) -> Result<Metadata, LockError> {
let name = self.name().clone();
let version = self.id.version.clone();
let provides_extras = self.optional_dependencies.keys().cloned().collect();
@ -748,19 +863,17 @@ impl Distribution {
let mut dependency_extras = FxHashMap::default();
let mut requires_dist = self
.dependencies
.into_iter()
.iter()
.filter_map(|dep| {
dep.into_requirement(workspace_root, &mut dependency_extras)
dep.to_requirement(workspace_root, &mut dependency_extras)
.transpose()
})
.collect::<Result<Vec<_>, LockError>>()?;
// Denormalize optional dependencies.
for (extra, deps) in self.optional_dependencies {
for (extra, deps) in &self.optional_dependencies {
for dep in deps {
if let Some(mut dep) =
dep.into_requirement(workspace_root, &mut dependency_extras)?
{
if let Some(mut dep) = dep.to_requirement(workspace_root, &mut dependency_extras)? {
// Add back the extra marker expression.
let marker = MarkerTree::Expression(MarkerExpression::Extra {
operator: ExtraOperator::Equal,
@ -785,13 +898,13 @@ impl Distribution {
let dev_dependencies = self
.dev_dependencies
.into_iter()
.iter()
.map(|(group, deps)| {
let mut dependency_extras = FxHashMap::default();
let mut deps = deps
.into_iter()
.iter()
.filter_map(|dep| {
dep.into_requirement(workspace_root, &mut dependency_extras)
dep.to_requirement(workspace_root, &mut dependency_extras)
.transpose()
})
.collect::<Result<Vec<_>, LockError>>()?;
@ -803,7 +916,7 @@ impl Distribution {
}
}
Ok((group, deps))
Ok((group.clone(), deps))
})
.collect::<Result<_, LockError>>()?;
@ -898,6 +1011,29 @@ impl Distribution {
&self.id.name
}
/// Returns a [`VersionId`] for this package that can be used for resolution.
fn version_id(&self, workspace_root: &Path) -> Result<VersionId, LockError> {
match &self.id.source {
Source::Registry(_) => Ok(VersionId::NameVersion(
self.name().clone(),
self.id.version.clone(),
)),
_ => Ok(self.to_source_dist(workspace_root)?.unwrap().version_id()),
}
}
/// Returns all the hashes associated with this [`Distribution`].
fn hashes(&self) -> Vec<HashDigest> {
let mut hashes = Vec::new();
if let Some(ref sdist) = self.sdist {
hashes.push(sdist.hash().0.clone());
}
for wheel in &self.wheels {
hashes.extend(wheel.hash.as_ref().map(|h| h.0.clone()));
}
hashes
}
/// Returns the [`ResolvedRepositoryReference`] for the distribution, if it is a Git source.
pub fn as_git_ref(&self) -> Option<ResolvedRepositoryReference> {
match &self.id.source {
@ -1809,7 +1945,7 @@ impl Wheel {
let file = Box::new(distribution_types::File {
dist_info_metadata: false,
filename: filename.to_string(),
hashes: vec![],
hashes: self.hash.iter().map(|h| h.0.clone()).collect(),
requires_python: None,
size: self.size,
upload_time_utc_ms: None,
@ -1904,23 +2040,23 @@ impl Dependency {
}
/// Convert the [`Dependency`] to a [`Requirement`] that can be used for resolution.
pub(crate) fn into_requirement(
self,
pub(crate) fn to_requirement(
&self,
workspace_root: &Path,
extras: &mut FxHashMap<PackageName, Vec<ExtraName>>,
) -> Result<Option<Requirement>, LockError> {
// Keep track of extras, these will be denormalized later.
if let Some(extra) = self.extra {
if let Some(ref extra) = self.extra {
extras
.entry(self.distribution_id.name)
.entry(self.distribution_id.name.clone())
.or_default()
.push(extra);
.push(extra.clone());
return Ok(None);
}
// Reconstruct the `RequirementSource` from the `Source`.
let source = match self.distribution_id.source {
let source = match &self.distribution_id.source {
Source::Registry(_) => RequirementSource::Registry {
specifier: VersionSpecifiers::empty(),
index: None,
@ -1964,7 +2100,7 @@ impl Dependency {
let requirement = Requirement {
name: self.distribution_id.name.clone(),
marker: self.marker,
marker: self.marker.clone(),
origin: None,
extras: Vec::new(),
source,

View file

@ -25,6 +25,17 @@ struct SharedInMemoryIndex {
pub(crate) type FxOnceMap<K, V> = OnceMap<K, V, BuildHasherDefault<FxHasher>>;
impl InMemoryIndex {
/// Create an `InMemoryIndex` with pre-filled packages and distributions.
pub fn with(
packages: FxOnceMap<PackageName, Arc<VersionsResponse>>,
distributions: FxOnceMap<VersionId, Arc<MetadataResponse>>,
) -> InMemoryIndex {
InMemoryIndex(Arc::new(SharedInMemoryIndex {
packages,
distributions,
}))
}
/// Returns a reference to the package metadata map.
pub fn packages(&self) -> &FxOnceMap<PackageName, Arc<VersionsResponse>> {
&self.0.packages

View file

@ -345,6 +345,7 @@ pub(crate) async fn pip_compile(
options,
printer,
preview,
false,
)
.await
{

View file

@ -336,6 +336,7 @@ pub(crate) async fn pip_install(
options,
printer,
preview,
false,
)
.await
{

View file

@ -1,7 +1,8 @@
//! Common operations shared across the `pip` API and subcommands.
use std::fmt::Write;
use std::fmt::{self, Write};
use std::path::PathBuf;
use std::time::Instant;
use anyhow::{anyhow, Context};
use itertools::Itertools;
@ -97,8 +98,9 @@ pub(crate) async fn resolve<InstalledPackages: InstalledPackagesProvider>(
options: Options,
printer: Printer,
preview: PreviewMode,
quiet: bool,
) -> Result<ResolutionGraph, Error> {
let start = std::time::Instant::now();
let start = Instant::now();
// Resolve the requirements from the provided sources.
let requirements = {
@ -250,7 +252,21 @@ pub(crate) async fn resolve<InstalledPackages: InstalledPackagesProvider>(
resolver.resolve().await?
};
if !quiet {
resolution_success(&resolution, start, printer)?;
}
Ok(resolution)
}
// Prints a success message after completing resolution.
pub(crate) fn resolution_success(
resolution: &ResolutionGraph,
start: Instant,
printer: Printer,
) -> fmt::Result {
let s = if resolution.len() == 1 { "" } else { "s" };
writeln!(
printer.stderr(),
"{}",
@ -260,9 +276,7 @@ pub(crate) async fn resolve<InstalledPackages: InstalledPackagesProvider>(
format!("in {}", elapsed(start.elapsed())).dimmed()
)
.dimmed()
)?;
Ok(resolution)
)
}
#[derive(Debug, Clone, Copy)]

View file

@ -291,6 +291,7 @@ pub(crate) async fn pip_sync(
options,
printer,
preview,
false,
)
.await
{

View file

@ -1,6 +1,9 @@
#![allow(clippy::single_match_else)]
use anstream::eprint;
use distribution_types::UnresolvedRequirementSpecification;
use distribution_types::{Diagnostic, UnresolvedRequirementSpecification};
use tracing::debug;
use uv_cache::Cache;
use uv_client::{Connectivity, FlatIndexClient, RegistryClientBuilder};
use uv_configuration::{Concurrency, ExtrasSpecification, PreviewMode, Reinstall, SetupPyStrategy};
@ -93,7 +96,7 @@ pub(crate) async fn lock(
pub(super) async fn do_lock(
workspace: &Workspace,
interpreter: &Interpreter,
existing: Option<&Lock>,
existing_lock: Option<&Lock>,
settings: ResolverSettingsRef<'_>,
state: &SharedState,
preview: PreviewMode,
@ -122,12 +125,12 @@ pub(super) async fn do_lock(
.members_as_requirements()
.into_iter()
.map(UnresolvedRequirementSpecification::from)
.collect();
.collect::<Vec<_>>();
let overrides = workspace
.overrides()
.into_iter()
.map(UnresolvedRequirementSpecification::from)
.collect();
.collect::<Vec<_>>();
let constraints = vec![];
let dev = vec![DEV_DEPENDENCIES.clone()];
let source_trees = vec![];
@ -185,7 +188,8 @@ pub(super) async fn do_lock(
};
// If an existing lockfile exists, build up a set of preferences.
let LockedRequirements { preferences, git } = existing
let LockedRequirements { preferences, git } = existing_lock
.as_ref()
.map(|lock| read_lock_requirements(lock, upgrade))
.unwrap_or_default();
@ -194,54 +198,171 @@ pub(super) async fn do_lock(
state.git.insert(reference, sha);
}
// Create a build dispatch.
let build_dispatch = BuildDispatch::new(
&client,
cache,
interpreter,
index_locations,
&flat_index,
&state.index,
&state.git,
&state.in_flight,
index_strategy,
setup_py,
config_setting,
build_isolation,
link_mode,
build_options,
exclude_newer,
concurrency,
preview,
);
let start = std::time::Instant::now();
// Resolve the requirements.
let resolution = pip::operations::resolve(
requirements,
constraints,
overrides,
dev,
source_trees,
None,
&extras,
preferences,
EmptyInstalledPackages,
&hasher,
&Reinstall::default(),
upgrade,
None,
None,
python_requirement,
&client,
&flat_index,
&state.index,
&build_dispatch,
concurrency,
options,
printer,
preview,
)
.await?;
let requires_python = find_requires_python(workspace)?;
let existing_lock = existing_lock.filter(|lock| {
match (lock.requires_python(), requires_python.as_ref()) {
// If the Requires-Python bound in the lockfile is weaker or equivalent to the
// Requires-Python bound in the workspace, we should have the necessary wheels to perform
// a locked resolution.
(None, Some(_)) => true,
(Some(locked), Some(specified)) if locked.bound() == specified.bound() => true,
// On the other hand, if the bound in the lockfile is stricter, meaning the
// bound has since been weakened, we have to perform a clean resolution to ensure
// we fetch the necessary wheels.
_ => false,
}
});
let resolution = match existing_lock {
None => None,
// If we are ignoring pinned versions in the lockfile, we need to do a full resolution.
Some(_) if upgrade.is_all() => None,
// Otherwise, we can try to resolve using metadata in the lockfile.
//
// When resolving from the lockfile we can still download and install new distributions,
// but we rely on the lockfile for the metadata of any existing distributions. If we have
// any outdated metadata we fall back to a clean resolve.
Some(lock) => {
debug!("Resolving with existing `uv.lock`");
// Prefill the index with the lockfile metadata.
let index = lock.to_index(workspace.install_path(), upgrade)?;
// Create a build dispatch.
let build_dispatch = BuildDispatch::new(
&client,
cache,
interpreter,
index_locations,
&flat_index,
&index,
&state.git,
&state.in_flight,
index_strategy,
setup_py,
config_setting,
build_isolation,
link_mode,
build_options,
exclude_newer,
concurrency,
preview,
);
// Resolve the requirements.
pip::operations::resolve(
requirements.clone(),
constraints.clone(),
overrides.clone(),
dev.clone(),
source_trees.clone(),
None,
&extras,
preferences.clone(),
EmptyInstalledPackages,
&hasher,
&Reinstall::default(),
upgrade,
None,
None,
python_requirement.clone(),
&client,
&flat_index,
&index,
&build_dispatch,
concurrency,
options,
printer,
preview,
true,
)
.await
.inspect_err(|err| debug!("Resolution with `uv.lock` failed: {err}"))
.ok()
.filter(|resolution| {
// Ensure no diagnostics were emitted that may be caused by stale metadata in the lockfile.
if resolution.diagnostics().is_empty() {
return true;
}
debug!("Resolution with `uv.lock` failed due to diagnostics:");
for diagnostic in resolution.diagnostics() {
debug!("{}", diagnostic.message());
}
false
})
}
};
let resolution = match resolution {
// Resolution from the lockfile succeeded.
Some(resolution) => resolution,
// The lockfile did not contain enough information to obtain a resolution, fallback
// to a fresh resolve.
None => {
debug!("Starting clean resolution.");
// Create a build dispatch.
let build_dispatch = BuildDispatch::new(
&client,
cache,
interpreter,
index_locations,
&flat_index,
&state.index,
&state.git,
&state.in_flight,
index_strategy,
setup_py,
config_setting,
build_isolation,
link_mode,
build_options,
exclude_newer,
concurrency,
preview,
);
// Resolve the requirements.
pip::operations::resolve(
requirements,
constraints,
overrides,
dev,
source_trees,
None,
&extras,
preferences,
EmptyInstalledPackages,
&hasher,
&Reinstall::default(),
upgrade,
None,
None,
python_requirement,
&client,
&flat_index,
&state.index,
&build_dispatch,
concurrency,
options,
printer,
preview,
true,
)
.await?
}
};
// Print the success message after completing resolution.
pip::operations::resolution_success(&resolution, start, printer)?;
// Notify the user of any resolution diagnostics.
pip::operations::diagnose_resolution(resolution.diagnostics(), printer)?;

View file

@ -495,6 +495,7 @@ pub(crate) async fn resolve_environment<'a>(
options,
printer,
preview,
false,
)
.await?)
}
@ -746,6 +747,7 @@ pub(crate) async fn update_environment(
options,
printer,
preview,
false,
)
.await
{

View file

@ -86,7 +86,7 @@ pub(crate) async fn tree(
let mut packages: IndexMap<_, Vec<_>> = IndexMap::new();
for dist in lock.into_distributions() {
let name = dist.name().clone();
let metadata = dist.into_metadata(workspace.install_path())?;
let metadata = dist.to_metadata(workspace.install_path())?;
packages.entry(name).or_default().push(metadata);
}

File diff suppressed because it is too large Load diff