mirror of
https://github.com/astral-sh/uv.git
synced 2025-08-04 10:58:28 +00:00
Use Resolver
in pip sync
(#2696)
## Summary This PR removes the custom `DistFinder` that we use in `pip sync`. This originally existed because `VersionMap` wasn't lazy, and so we saved a lot of time in `DistFinder` by reading distribution data lazily. But now, AFAICT, there's really no benefit. Maintaining `DistFinder` means we effectively have to maintain two resolvers, and end up fixing bugs in `DistFinder` that don't exist in the `Resolver` (like #2688. Closes #2694. Closes #2443. ## Test Plan I ran this benchmark a bunch. It's basically a wash. Sometimes one is faster than the other. ``` ❯ python -m scripts.bench \ --uv-path ./target/release/main \ --uv-path ./target/release/uv \ scripts/requirements/compiled/trio.txt --min-runs 50 --benchmark install-warm --warmup 25 Benchmark 1: ./target/release/main (install-warm) Time (mean ± σ): 54.0 ms ± 10.6 ms [User: 8.7 ms, System: 98.1 ms] Range (min … max): 45.5 ms … 94.3 ms 50 runs Warning: Statistical outliers were detected. Consider re-running this benchmark on a quiet PC without any interferences from other programs. It might help to use the '--warmup' or '--prepare' options. Benchmark 2: ./target/release/uv (install-warm) Time (mean ± σ): 50.7 ms ± 9.2 ms [User: 8.7 ms, System: 98.6 ms] Range (min … max): 44.0 ms … 98.6 ms 50 runs Warning: The first benchmarking run for this command was significantly slower than the rest (77.6 ms). This could be caused by (filesystem) caches that were not filled until after the first run. You should consider using the '--warmup' option to fill those caches before the actual benchmark. Alternatively, use the '--prepare' option to clear the caches before each timing run. Summary './target/release/uv (install-warm)' ran 1.06 ± 0.29 times faster than './target/release/main (install-warm)' ```
This commit is contained in:
parent
d41ab0ef4d
commit
f8fa887c0b
9 changed files with 64 additions and 570 deletions
|
@ -90,9 +90,9 @@ pub enum Error {
|
||||||
"The .dist-info directory {0} does not consist of the normalized package name and version"
|
"The .dist-info directory {0} does not consist of the normalized package name and version"
|
||||||
)]
|
)]
|
||||||
MissingDistInfoSegments(String),
|
MissingDistInfoSegments(String),
|
||||||
#[error("The .dist-info directory {0} does not start with the normalized package name: {0}")]
|
#[error("The .dist-info directory {0} does not start with the normalized package name: {1}")]
|
||||||
MissingDistInfoPackageName(String, String),
|
MissingDistInfoPackageName(String, String),
|
||||||
#[error("The .dist-info directory {0} does not start with the normalized version: {0}")]
|
#[error("The .dist-info directory {0} does not start with the normalized version: {1}")]
|
||||||
MissingDistInfoVersion(String, String),
|
MissingDistInfoVersion(String, String),
|
||||||
#[error("The .dist-info directory name contains invalid characters")]
|
#[error("The .dist-info directory name contains invalid characters")]
|
||||||
InvalidDistInfoPrefix,
|
InvalidDistInfoPrefix,
|
||||||
|
|
|
@ -1,195 +0,0 @@
|
||||||
use std::path::PathBuf;
|
|
||||||
use std::str::FromStr;
|
|
||||||
|
|
||||||
use anstream::eprintln;
|
|
||||||
use anyhow::{Context, Result};
|
|
||||||
use clap::Parser;
|
|
||||||
use futures::StreamExt;
|
|
||||||
use itertools::{Either, Itertools};
|
|
||||||
use rustc_hash::FxHashMap;
|
|
||||||
use tracing::info;
|
|
||||||
|
|
||||||
use distribution_types::{
|
|
||||||
CachedDist, Dist, DistributionMetadata, IndexLocations, Name, Resolution, VersionOrUrl,
|
|
||||||
};
|
|
||||||
use install_wheel_rs::linker::LinkMode;
|
|
||||||
use pep508_rs::Requirement;
|
|
||||||
use platform_tags::Tags;
|
|
||||||
use uv_cache::{Cache, CacheArgs};
|
|
||||||
use uv_client::{FlatIndex, RegistryClient, RegistryClientBuilder};
|
|
||||||
use uv_dispatch::BuildDispatch;
|
|
||||||
use uv_distribution::RegistryWheelIndex;
|
|
||||||
use uv_installer::{Downloader, NoBinary};
|
|
||||||
use uv_interpreter::PythonEnvironment;
|
|
||||||
use uv_normalize::PackageName;
|
|
||||||
use uv_resolver::{DistFinder, InMemoryIndex};
|
|
||||||
use uv_types::{BuildContext, BuildIsolation, ConfigSettings, InFlight, NoBuild, SetupPyStrategy};
|
|
||||||
|
|
||||||
#[derive(Parser)]
|
|
||||||
pub(crate) struct InstallManyArgs {
|
|
||||||
/// Path to a file containing one requirement per line.
|
|
||||||
requirements: PathBuf,
|
|
||||||
#[clap(long)]
|
|
||||||
limit: Option<usize>,
|
|
||||||
/// Don't build source distributions. This means resolving will not run arbitrary code. The
|
|
||||||
/// cached wheels of already built source distributions will be reused.
|
|
||||||
#[clap(long)]
|
|
||||||
no_build: bool,
|
|
||||||
/// Run this many tasks in parallel
|
|
||||||
#[clap(long, default_value = "50")]
|
|
||||||
num_tasks: usize,
|
|
||||||
#[command(flatten)]
|
|
||||||
cache_args: CacheArgs,
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) async fn install_many(args: InstallManyArgs) -> Result<()> {
|
|
||||||
let data = fs_err::read_to_string(&args.requirements)?;
|
|
||||||
|
|
||||||
let lines = data.lines().map(Requirement::from_str);
|
|
||||||
let requirements: Vec<Requirement> = if let Some(limit) = args.limit {
|
|
||||||
lines.take(limit).collect::<Result<_, _>>()?
|
|
||||||
} else {
|
|
||||||
lines.collect::<Result<_, _>>()?
|
|
||||||
};
|
|
||||||
info!("Got {} requirements", requirements.len());
|
|
||||||
|
|
||||||
let cache = Cache::try_from(args.cache_args)?;
|
|
||||||
let venv = PythonEnvironment::from_virtualenv(&cache)?;
|
|
||||||
let client = RegistryClientBuilder::new(cache.clone()).build();
|
|
||||||
let index_locations = IndexLocations::default();
|
|
||||||
let flat_index = FlatIndex::default();
|
|
||||||
let index = InMemoryIndex::default();
|
|
||||||
let setup_py = SetupPyStrategy::default();
|
|
||||||
let in_flight = InFlight::default();
|
|
||||||
let tags = venv.interpreter().tags()?;
|
|
||||||
let no_build = if args.no_build {
|
|
||||||
NoBuild::All
|
|
||||||
} else {
|
|
||||||
NoBuild::None
|
|
||||||
};
|
|
||||||
let config_settings = ConfigSettings::default();
|
|
||||||
|
|
||||||
let build_dispatch = BuildDispatch::new(
|
|
||||||
&client,
|
|
||||||
&cache,
|
|
||||||
venv.interpreter(),
|
|
||||||
&index_locations,
|
|
||||||
&flat_index,
|
|
||||||
&index,
|
|
||||||
&in_flight,
|
|
||||||
setup_py,
|
|
||||||
&config_settings,
|
|
||||||
BuildIsolation::Isolated,
|
|
||||||
&no_build,
|
|
||||||
&NoBinary::None,
|
|
||||||
);
|
|
||||||
|
|
||||||
for (idx, requirements) in requirements.chunks(100).enumerate() {
|
|
||||||
info!("Chunk {idx}");
|
|
||||||
if let Err(err) = install_chunk(
|
|
||||||
requirements,
|
|
||||||
&build_dispatch,
|
|
||||||
tags,
|
|
||||||
&client,
|
|
||||||
&venv,
|
|
||||||
&index_locations,
|
|
||||||
)
|
|
||||||
.await
|
|
||||||
{
|
|
||||||
eprintln!("💥 Chunk {idx} failed");
|
|
||||||
for cause in err.chain() {
|
|
||||||
eprintln!(" Caused by: {cause}");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn install_chunk(
|
|
||||||
requirements: &[Requirement],
|
|
||||||
build_dispatch: &BuildDispatch<'_>,
|
|
||||||
tags: &Tags,
|
|
||||||
client: &RegistryClient,
|
|
||||||
venv: &PythonEnvironment,
|
|
||||||
index_locations: &IndexLocations,
|
|
||||||
) -> Result<()> {
|
|
||||||
let resolution: Vec<_> = DistFinder::new(
|
|
||||||
tags,
|
|
||||||
client,
|
|
||||||
venv.interpreter(),
|
|
||||||
&FlatIndex::default(),
|
|
||||||
&NoBinary::None,
|
|
||||||
&NoBuild::None,
|
|
||||||
)
|
|
||||||
.resolve_stream(requirements)
|
|
||||||
.collect()
|
|
||||||
.await;
|
|
||||||
let (resolution, failures): (FxHashMap<PackageName, Dist>, Vec<_>) =
|
|
||||||
resolution.into_iter().partition_result();
|
|
||||||
for failure in &failures {
|
|
||||||
info!("Failed to find wheel: {failure}");
|
|
||||||
}
|
|
||||||
if !failures.is_empty() {
|
|
||||||
info!("Failed to find {} wheel(s)", failures.len());
|
|
||||||
}
|
|
||||||
let wheels_and_source_dist = resolution.len();
|
|
||||||
let resolution = if build_dispatch.no_build().is_none() {
|
|
||||||
resolution
|
|
||||||
} else {
|
|
||||||
let only_wheels: FxHashMap<_, _> = resolution
|
|
||||||
.into_iter()
|
|
||||||
.filter(|(_, dist)| match dist {
|
|
||||||
Dist::Built(_) => true,
|
|
||||||
Dist::Source(_) => false,
|
|
||||||
})
|
|
||||||
.collect();
|
|
||||||
info!(
|
|
||||||
"Removed {} source dists",
|
|
||||||
wheels_and_source_dist - only_wheels.len()
|
|
||||||
);
|
|
||||||
only_wheels
|
|
||||||
};
|
|
||||||
|
|
||||||
let dists = Resolution::new(resolution)
|
|
||||||
.into_distributions()
|
|
||||||
.collect::<Vec<_>>();
|
|
||||||
|
|
||||||
let mut registry_index = RegistryWheelIndex::new(build_dispatch.cache(), tags, index_locations);
|
|
||||||
let (cached, uncached): (Vec<_>, Vec<_>) = dists.iter().partition_map(|dist| {
|
|
||||||
// We always want the wheel for the latest version not whatever matching is in cache.
|
|
||||||
let VersionOrUrl::Version(version) = dist.version_or_url() else {
|
|
||||||
unreachable!("Only registry distributions are supported");
|
|
||||||
};
|
|
||||||
|
|
||||||
if let Some(cached) = registry_index.get_version(dist.name(), version) {
|
|
||||||
Either::Left(CachedDist::Registry(cached.clone()))
|
|
||||||
} else {
|
|
||||||
Either::Right(dist.clone())
|
|
||||||
}
|
|
||||||
});
|
|
||||||
info!("Cached: {}, Uncached {}", cached.len(), uncached.len());
|
|
||||||
|
|
||||||
let downloader = Downloader::new(build_dispatch.cache(), tags, client, build_dispatch);
|
|
||||||
let in_flight = InFlight::default();
|
|
||||||
let fetches: Vec<_> = futures::stream::iter(uncached)
|
|
||||||
.map(|dist| downloader.get_wheel(dist, &in_flight))
|
|
||||||
.buffer_unordered(50)
|
|
||||||
.collect()
|
|
||||||
.await;
|
|
||||||
let (wheels, failures): (Vec<_>, Vec<_>) = fetches.into_iter().partition_result();
|
|
||||||
for failure in &failures {
|
|
||||||
info!("Failed to fetch wheel: {failure}");
|
|
||||||
}
|
|
||||||
if !failures.is_empty() {
|
|
||||||
info!("Failed to fetch {} wheel(s)", failures.len());
|
|
||||||
}
|
|
||||||
|
|
||||||
let wheels: Vec<_> = wheels.into_iter().chain(cached).collect();
|
|
||||||
uv_installer::Installer::new(venv)
|
|
||||||
.with_link_mode(LinkMode::default())
|
|
||||||
.install(&wheels)
|
|
||||||
.context("Failed to install")?;
|
|
||||||
info!("Installed {} wheels", wheels.len());
|
|
||||||
Ok(())
|
|
||||||
}
|
|
|
@ -21,7 +21,6 @@ use resolve_many::ResolveManyArgs;
|
||||||
use crate::build::{build, BuildArgs};
|
use crate::build::{build, BuildArgs};
|
||||||
use crate::clear_compile::ClearCompileArgs;
|
use crate::clear_compile::ClearCompileArgs;
|
||||||
use crate::compile::CompileArgs;
|
use crate::compile::CompileArgs;
|
||||||
use crate::install_many::InstallManyArgs;
|
|
||||||
use crate::render_benchmarks::RenderBenchmarksArgs;
|
use crate::render_benchmarks::RenderBenchmarksArgs;
|
||||||
use crate::resolve_cli::ResolveCliArgs;
|
use crate::resolve_cli::ResolveCliArgs;
|
||||||
use crate::wheel_metadata::WheelMetadataArgs;
|
use crate::wheel_metadata::WheelMetadataArgs;
|
||||||
|
@ -45,7 +44,6 @@ static GLOBAL: tikv_jemallocator::Jemalloc = tikv_jemallocator::Jemalloc;
|
||||||
mod build;
|
mod build;
|
||||||
mod clear_compile;
|
mod clear_compile;
|
||||||
mod compile;
|
mod compile;
|
||||||
mod install_many;
|
|
||||||
mod render_benchmarks;
|
mod render_benchmarks;
|
||||||
mod resolve_cli;
|
mod resolve_cli;
|
||||||
mod resolve_many;
|
mod resolve_many;
|
||||||
|
@ -66,7 +64,6 @@ enum Cli {
|
||||||
/// cargo run --bin uv-dev -- resolve-many scripts/popular_packages/pypi_10k_most_dependents.txt
|
/// cargo run --bin uv-dev -- resolve-many scripts/popular_packages/pypi_10k_most_dependents.txt
|
||||||
/// ```
|
/// ```
|
||||||
ResolveMany(ResolveManyArgs),
|
ResolveMany(ResolveManyArgs),
|
||||||
InstallMany(InstallManyArgs),
|
|
||||||
/// Resolve requirements passed on the CLI
|
/// Resolve requirements passed on the CLI
|
||||||
Resolve(ResolveCliArgs),
|
Resolve(ResolveCliArgs),
|
||||||
WheelMetadata(WheelMetadataArgs),
|
WheelMetadata(WheelMetadataArgs),
|
||||||
|
@ -88,9 +85,6 @@ async fn run() -> Result<()> {
|
||||||
Cli::ResolveMany(args) => {
|
Cli::ResolveMany(args) => {
|
||||||
resolve_many::resolve_many(args).await?;
|
resolve_many::resolve_many(args).await?;
|
||||||
}
|
}
|
||||||
Cli::InstallMany(args) => {
|
|
||||||
install_many::install_many(args).await?;
|
|
||||||
}
|
|
||||||
Cli::Resolve(args) => {
|
Cli::Resolve(args) => {
|
||||||
resolve_cli::resolve_cli(args).await?;
|
resolve_cli::resolve_cli(args).await?;
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,298 +0,0 @@
|
||||||
//! Given a set of selected packages, find a compatible set of distributions to install.
|
|
||||||
//!
|
|
||||||
//! This is similar to running `pip install` with the `--no-deps` flag.
|
|
||||||
|
|
||||||
use anyhow::Result;
|
|
||||||
use futures::{stream, Stream, StreamExt, TryStreamExt};
|
|
||||||
use rustc_hash::FxHashMap;
|
|
||||||
use uv_types::{NoBinary, NoBuild};
|
|
||||||
|
|
||||||
use distribution_filename::DistFilename;
|
|
||||||
use distribution_types::{Dist, IndexUrl, Resolution};
|
|
||||||
use pep508_rs::{Requirement, VersionOrUrl};
|
|
||||||
use platform_tags::{TagCompatibility, Tags};
|
|
||||||
use uv_client::{
|
|
||||||
FlatDistributions, FlatIndex, OwnedArchive, RegistryClient, SimpleMetadata, SimpleMetadatum,
|
|
||||||
};
|
|
||||||
use uv_interpreter::Interpreter;
|
|
||||||
use uv_normalize::PackageName;
|
|
||||||
|
|
||||||
use crate::error::ResolveError;
|
|
||||||
|
|
||||||
pub struct DistFinder<'a> {
|
|
||||||
tags: &'a Tags,
|
|
||||||
client: &'a RegistryClient,
|
|
||||||
reporter: Option<Box<dyn Reporter>>,
|
|
||||||
interpreter: &'a Interpreter,
|
|
||||||
flat_index: &'a FlatIndex,
|
|
||||||
no_binary: &'a NoBinary,
|
|
||||||
no_build: &'a NoBuild,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'a> DistFinder<'a> {
|
|
||||||
/// Initialize a new distribution finder.
|
|
||||||
pub fn new(
|
|
||||||
tags: &'a Tags,
|
|
||||||
client: &'a RegistryClient,
|
|
||||||
interpreter: &'a Interpreter,
|
|
||||||
flat_index: &'a FlatIndex,
|
|
||||||
no_binary: &'a NoBinary,
|
|
||||||
no_build: &'a NoBuild,
|
|
||||||
) -> Self {
|
|
||||||
Self {
|
|
||||||
tags,
|
|
||||||
client,
|
|
||||||
reporter: None,
|
|
||||||
interpreter,
|
|
||||||
flat_index,
|
|
||||||
no_binary,
|
|
||||||
no_build,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Set the [`Reporter`] to use for this resolution.
|
|
||||||
#[must_use]
|
|
||||||
pub fn with_reporter(self, reporter: impl Reporter + 'static) -> Self {
|
|
||||||
Self {
|
|
||||||
reporter: Some(Box::new(reporter)),
|
|
||||||
..self
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Resolve a single pinned package, either as cached network request
|
|
||||||
/// (version or no constraint) or by constructing a URL [`Dist`] from the
|
|
||||||
/// specifier URL.
|
|
||||||
async fn resolve_requirement(
|
|
||||||
&self,
|
|
||||||
requirement: &Requirement,
|
|
||||||
flat_index: Option<&FlatDistributions>,
|
|
||||||
) -> Result<(PackageName, Dist), ResolveError> {
|
|
||||||
match requirement.version_or_url.as_ref() {
|
|
||||||
None | Some(VersionOrUrl::VersionSpecifier(_)) => {
|
|
||||||
// Query the index(es) (cached) to get the URLs for the available files.
|
|
||||||
let dist = match self.client.simple(&requirement.name).await {
|
|
||||||
Ok((index, raw_metadata)) => {
|
|
||||||
let metadata = OwnedArchive::deserialize(&raw_metadata);
|
|
||||||
|
|
||||||
// Pick a version that satisfies the requirement.
|
|
||||||
self.select_from_index(requirement, metadata, &index, flat_index)
|
|
||||||
}
|
|
||||||
Err(err) => match err.kind() {
|
|
||||||
uv_client::ErrorKind::PackageNotFound(_)
|
|
||||||
| uv_client::ErrorKind::NoIndex(_)
|
|
||||||
| uv_client::ErrorKind::Offline(_) => {
|
|
||||||
if let Some(flat_index) = self.flat_index.get(&requirement.name) {
|
|
||||||
Self::select_from_flat_index(requirement, flat_index)
|
|
||||||
} else {
|
|
||||||
return Err(ResolveError::Client(err));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
_ => return Err(ResolveError::Client(err)),
|
|
||||||
},
|
|
||||||
};
|
|
||||||
|
|
||||||
// Verify that a distribution was found.
|
|
||||||
let Some(dist) = dist else {
|
|
||||||
return Err(ResolveError::NotFound(requirement.clone()));
|
|
||||||
};
|
|
||||||
|
|
||||||
if let Some(reporter) = self.reporter.as_ref() {
|
|
||||||
reporter.on_progress(&dist);
|
|
||||||
}
|
|
||||||
|
|
||||||
let normalized_name = requirement.name.clone();
|
|
||||||
Ok((normalized_name, dist))
|
|
||||||
}
|
|
||||||
Some(VersionOrUrl::Url(url)) => {
|
|
||||||
// We have a URL; fetch the distribution directly.
|
|
||||||
let package_name = requirement.name.clone();
|
|
||||||
let package = Dist::from_url(package_name.clone(), url.clone())?;
|
|
||||||
Ok((package_name, package))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Resolve the pinned packages in parallel
|
|
||||||
pub fn resolve_stream<'data>(
|
|
||||||
&'data self,
|
|
||||||
requirements: &'data [Requirement],
|
|
||||||
) -> impl Stream<Item = Result<(PackageName, Dist), ResolveError>> + 'data {
|
|
||||||
stream::iter(requirements)
|
|
||||||
.map(move |requirement| {
|
|
||||||
self.resolve_requirement(requirement, self.flat_index.get(&requirement.name))
|
|
||||||
})
|
|
||||||
.buffer_unordered(32)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Resolve a set of pinned packages into a set of wheels.
|
|
||||||
pub async fn resolve(&self, requirements: &[Requirement]) -> Result<Resolution, ResolveError> {
|
|
||||||
if requirements.is_empty() {
|
|
||||||
return Ok(Resolution::default());
|
|
||||||
}
|
|
||||||
|
|
||||||
let resolution: FxHashMap<PackageName, Dist> =
|
|
||||||
self.resolve_stream(requirements).try_collect().await?;
|
|
||||||
|
|
||||||
if let Some(reporter) = self.reporter.as_ref() {
|
|
||||||
reporter.on_complete();
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(Resolution::new(resolution))
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Select a version that satisfies the requirement.
|
|
||||||
///
|
|
||||||
/// Wheels are preferred to source distributions unless `no_binary` excludes wheels
|
|
||||||
/// for the requirement.
|
|
||||||
fn select_from_index(
|
|
||||||
&self,
|
|
||||||
requirement: &Requirement,
|
|
||||||
metadata: SimpleMetadata,
|
|
||||||
index: &IndexUrl,
|
|
||||||
flat_index: Option<&FlatDistributions>,
|
|
||||||
) -> Option<Dist> {
|
|
||||||
let no_binary = match self.no_binary {
|
|
||||||
NoBinary::None => false,
|
|
||||||
NoBinary::All => true,
|
|
||||||
NoBinary::Packages(packages) => packages.contains(&requirement.name),
|
|
||||||
};
|
|
||||||
let no_build = match self.no_build {
|
|
||||||
NoBuild::None => false,
|
|
||||||
NoBuild::All => true,
|
|
||||||
NoBuild::Packages(packages) => packages.contains(&requirement.name),
|
|
||||||
};
|
|
||||||
|
|
||||||
// Prioritize the flat index by initializing the "best" matches with its entries.
|
|
||||||
let matching_override = if let Some(flat_index) = flat_index {
|
|
||||||
match &requirement.version_or_url {
|
|
||||||
None => flat_index.iter().next(),
|
|
||||||
Some(VersionOrUrl::Url(_)) => None,
|
|
||||||
Some(VersionOrUrl::VersionSpecifier(specifiers)) => flat_index
|
|
||||||
.iter()
|
|
||||||
.find(|(version, _)| specifiers.contains(version)),
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
};
|
|
||||||
let (mut best_version, mut best_wheel, mut best_sdist) =
|
|
||||||
if let Some((version, resolvable_dist)) = matching_override {
|
|
||||||
(
|
|
||||||
Some(version.clone()),
|
|
||||||
resolvable_dist
|
|
||||||
.compatible_wheel()
|
|
||||||
.map(|(dist, tag_priority)| (dist.clone(), tag_priority)),
|
|
||||||
resolvable_dist.compatible_source().cloned(),
|
|
||||||
)
|
|
||||||
} else {
|
|
||||||
(None, None, None)
|
|
||||||
};
|
|
||||||
|
|
||||||
for SimpleMetadatum { version, files } in metadata.into_iter().rev() {
|
|
||||||
// If we iterated past the first-compatible version, break.
|
|
||||||
if best_version
|
|
||||||
.as_ref()
|
|
||||||
.is_some_and(|best_version| *best_version != version)
|
|
||||||
{
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
// If the version does not satisfy the requirement, continue.
|
|
||||||
if !requirement.is_satisfied_by(&version) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
if !no_binary {
|
|
||||||
// Find the most-compatible wheel
|
|
||||||
for version_wheel in files.wheels {
|
|
||||||
// Only add dists compatible with the python version.
|
|
||||||
// This is relevant for source dists which give no other indication of their
|
|
||||||
// compatibility and wheels which may be tagged `py3-none-any` but
|
|
||||||
// have `requires-python: ">=3.9"`
|
|
||||||
if !version_wheel.file.requires_python.as_ref().map_or(
|
|
||||||
true,
|
|
||||||
|requires_python| {
|
|
||||||
requires_python.contains(self.interpreter.python_version())
|
|
||||||
},
|
|
||||||
) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
best_version = Some(version.clone());
|
|
||||||
if let TagCompatibility::Compatible(priority) =
|
|
||||||
version_wheel.name.compatibility(self.tags)
|
|
||||||
{
|
|
||||||
if best_wheel
|
|
||||||
.as_ref()
|
|
||||||
.map_or(true, |(.., existing)| priority > *existing)
|
|
||||||
{
|
|
||||||
best_wheel = Some((
|
|
||||||
Dist::from_registry(
|
|
||||||
DistFilename::WheelFilename(version_wheel.name),
|
|
||||||
version_wheel.file,
|
|
||||||
index.clone(),
|
|
||||||
),
|
|
||||||
priority,
|
|
||||||
));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Find the most-compatible sdist, if no wheel was found.
|
|
||||||
if !no_build && best_wheel.is_none() {
|
|
||||||
for version_sdist in files.source_dists {
|
|
||||||
// Only add dists compatible with the python version.
|
|
||||||
// This is relevant for source dists which give no other indication of their
|
|
||||||
// compatibility and wheels which may be tagged `py3-none-any` but
|
|
||||||
// have `requires-python: ">=3.9"`
|
|
||||||
if !version_sdist.file.requires_python.as_ref().map_or(
|
|
||||||
true,
|
|
||||||
|requires_python| {
|
|
||||||
requires_python.contains(self.interpreter.python_version())
|
|
||||||
},
|
|
||||||
) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
best_version = Some(version_sdist.name.version.clone());
|
|
||||||
best_sdist = Some(Dist::from_registry(
|
|
||||||
DistFilename::SourceDistFilename(version_sdist.name),
|
|
||||||
version_sdist.file,
|
|
||||||
index.clone(),
|
|
||||||
));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
best_wheel.map_or(best_sdist, |(wheel, ..)| Some(wheel))
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Select a matching version from a flat index.
|
|
||||||
fn select_from_flat_index(
|
|
||||||
requirement: &Requirement,
|
|
||||||
flat_index: &FlatDistributions,
|
|
||||||
) -> Option<Dist> {
|
|
||||||
let matching_override = match &requirement.version_or_url {
|
|
||||||
None => flat_index.iter().next(),
|
|
||||||
Some(VersionOrUrl::Url(_)) => None,
|
|
||||||
Some(VersionOrUrl::VersionSpecifier(specifiers)) => flat_index
|
|
||||||
.iter()
|
|
||||||
.find(|(version, _)| specifiers.contains(version)),
|
|
||||||
};
|
|
||||||
|
|
||||||
let (_, resolvable_dist) = matching_override?;
|
|
||||||
|
|
||||||
resolvable_dist.compatible_wheel().map_or_else(
|
|
||||||
|| resolvable_dist.compatible_source().cloned(),
|
|
||||||
|(dist, _)| Some(dist.clone()),
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub trait Reporter: Send + Sync {
|
|
||||||
/// Callback to invoke when a package is resolved to a specific distribution.
|
|
||||||
fn on_progress(&self, dist: &Dist);
|
|
||||||
|
|
||||||
/// Callback to invoke when the resolution is complete.
|
|
||||||
fn on_complete(&self);
|
|
||||||
}
|
|
|
@ -1,6 +1,5 @@
|
||||||
pub use dependency_mode::DependencyMode;
|
pub use dependency_mode::DependencyMode;
|
||||||
pub use error::ResolveError;
|
pub use error::ResolveError;
|
||||||
pub use finder::{DistFinder, Reporter as FinderReporter};
|
|
||||||
pub use manifest::Manifest;
|
pub use manifest::Manifest;
|
||||||
pub use options::{Options, OptionsBuilder};
|
pub use options::{Options, OptionsBuilder};
|
||||||
pub use preferences::{Preference, PreferenceError};
|
pub use preferences::{Preference, PreferenceError};
|
||||||
|
@ -23,7 +22,6 @@ mod dependency_mode;
|
||||||
mod dependency_provider;
|
mod dependency_provider;
|
||||||
mod editables;
|
mod editables;
|
||||||
mod error;
|
mod error;
|
||||||
mod finder;
|
|
||||||
mod manifest;
|
mod manifest;
|
||||||
mod options;
|
mod options;
|
||||||
mod overrides;
|
mod overrides;
|
||||||
|
|
|
@ -310,6 +310,15 @@ impl ResolutionGraph {
|
||||||
.any(|index| self.petgraph[index].name() == name)
|
.any(|index| self.petgraph[index].name() == name)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Iterate over the [`Dist`] entities in this resolution.
|
||||||
|
pub fn into_distributions(self) -> impl Iterator<Item = Dist> {
|
||||||
|
self.petgraph
|
||||||
|
.into_nodes_edges()
|
||||||
|
.0
|
||||||
|
.into_iter()
|
||||||
|
.map(|node| node.weight)
|
||||||
|
}
|
||||||
|
|
||||||
/// Return the [`Diagnostic`]s that were encountered while building the graph.
|
/// Return the [`Diagnostic`]s that were encountered while building the graph.
|
||||||
pub fn diagnostics(&self) -> &[Diagnostic] {
|
pub fn diagnostics(&self) -> &[Diagnostic] {
|
||||||
&self.diagnostics
|
&self.diagnostics
|
||||||
|
|
|
@ -26,13 +26,11 @@ use uv_requirements::{
|
||||||
ExtrasSpecification, NamedRequirementsResolver, RequirementsSource, RequirementsSpecification,
|
ExtrasSpecification, NamedRequirementsResolver, RequirementsSource, RequirementsSpecification,
|
||||||
SourceTreeResolver,
|
SourceTreeResolver,
|
||||||
};
|
};
|
||||||
use uv_resolver::InMemoryIndex;
|
use uv_resolver::{DependencyMode, InMemoryIndex, Manifest, OptionsBuilder, Resolver};
|
||||||
use uv_types::{BuildIsolation, ConfigSettings, InFlight, NoBuild, SetupPyStrategy};
|
use uv_types::{BuildIsolation, ConfigSettings, InFlight, NoBuild, SetupPyStrategy};
|
||||||
use uv_warnings::warn_user;
|
use uv_warnings::warn_user;
|
||||||
|
|
||||||
use crate::commands::reporters::{
|
use crate::commands::reporters::{DownloadReporter, InstallReporter, ResolverReporter};
|
||||||
DownloadReporter, FinderReporter, InstallReporter, ResolverReporter,
|
|
||||||
};
|
|
||||||
use crate::commands::{compile_bytecode, elapsed, ChangeEvent, ChangeEventKind, ExitStatus};
|
use crate::commands::{compile_bytecode, elapsed, ChangeEvent, ChangeEventKind, ExitStatus};
|
||||||
use crate::printer::Printer;
|
use crate::printer::Printer;
|
||||||
|
|
||||||
|
@ -268,16 +266,33 @@ pub(crate) async fn pip_sync(
|
||||||
} else {
|
} else {
|
||||||
let start = std::time::Instant::now();
|
let start = std::time::Instant::now();
|
||||||
|
|
||||||
let wheel_finder = uv_resolver::DistFinder::new(
|
// Determine the tags, markers, and interpreter to use for resolution.
|
||||||
|
let interpreter = venv.interpreter();
|
||||||
|
let tags = interpreter.tags()?;
|
||||||
|
let markers = interpreter.markers();
|
||||||
|
|
||||||
|
// Resolve with `--no-deps`.
|
||||||
|
let options = OptionsBuilder::new()
|
||||||
|
.dependency_mode(DependencyMode::Direct)
|
||||||
|
.build();
|
||||||
|
|
||||||
|
// Create a bound on the progress bar, since we know the number of packages upfront.
|
||||||
|
let reporter = ResolverReporter::from(printer).with_length(remote.len() as u64);
|
||||||
|
|
||||||
|
// Run the resolver.
|
||||||
|
let resolver = Resolver::new(
|
||||||
|
Manifest::simple(remote),
|
||||||
|
options,
|
||||||
|
markers,
|
||||||
|
interpreter,
|
||||||
tags,
|
tags,
|
||||||
&client,
|
&client,
|
||||||
venv.interpreter(),
|
|
||||||
&flat_index,
|
&flat_index,
|
||||||
&no_binary,
|
&index,
|
||||||
&no_build,
|
&build_dispatch,
|
||||||
)
|
)?
|
||||||
.with_reporter(FinderReporter::from(printer).with_length(remote.len() as u64));
|
.with_reporter(reporter);
|
||||||
let resolution = wheel_finder.resolve(&remote).await?;
|
let resolution = resolver.resolve().await?;
|
||||||
|
|
||||||
let s = if resolution.len() == 1 { "" } else { "s" };
|
let s = if resolution.len() == 1 { "" } else { "s" };
|
||||||
writeln!(
|
writeln!(
|
||||||
|
|
|
@ -6,48 +6,13 @@ use owo_colors::OwoColorize;
|
||||||
use url::Url;
|
use url::Url;
|
||||||
|
|
||||||
use distribution_types::{
|
use distribution_types::{
|
||||||
BuildableSource, CachedDist, Dist, DistributionMetadata, LocalEditable, Name, SourceDist,
|
BuildableSource, CachedDist, DistributionMetadata, LocalEditable, Name, SourceDist,
|
||||||
VersionOrUrl,
|
VersionOrUrl,
|
||||||
};
|
};
|
||||||
use uv_normalize::PackageName;
|
use uv_normalize::PackageName;
|
||||||
|
|
||||||
use crate::printer::Printer;
|
use crate::printer::Printer;
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub(crate) struct FinderReporter {
|
|
||||||
progress: ProgressBar,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<Printer> for FinderReporter {
|
|
||||||
fn from(printer: Printer) -> Self {
|
|
||||||
let progress = ProgressBar::with_draw_target(None, printer.target());
|
|
||||||
progress.set_style(
|
|
||||||
ProgressStyle::with_template("{bar:20} [{pos}/{len}] {wide_msg:.dim}").unwrap(),
|
|
||||||
);
|
|
||||||
progress.set_message("Resolving dependencies...");
|
|
||||||
Self { progress }
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl FinderReporter {
|
|
||||||
#[must_use]
|
|
||||||
pub(crate) fn with_length(self, length: u64) -> Self {
|
|
||||||
self.progress.set_length(length);
|
|
||||||
self
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl uv_resolver::FinderReporter for FinderReporter {
|
|
||||||
fn on_progress(&self, dist: &Dist) {
|
|
||||||
self.progress.set_message(format!("{dist}"));
|
|
||||||
self.progress.inc(1);
|
|
||||||
}
|
|
||||||
|
|
||||||
fn on_complete(&self) {
|
|
||||||
self.progress.finish_and_clear();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub(crate) struct DownloadReporter {
|
pub(crate) struct DownloadReporter {
|
||||||
printer: Printer,
|
printer: Printer,
|
||||||
|
@ -229,6 +194,12 @@ impl From<Printer> for ResolverReporter {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ResolverReporter {
|
impl ResolverReporter {
|
||||||
|
#[must_use]
|
||||||
|
pub(crate) fn with_length(self, length: u64) -> Self {
|
||||||
|
self.progress.set_length(length);
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
fn on_progress(&self, name: &PackageName, version_or_url: &VersionOrUrl) {
|
fn on_progress(&self, name: &PackageName, version_or_url: &VersionOrUrl) {
|
||||||
match version_or_url {
|
match version_or_url {
|
||||||
VersionOrUrl::Version(version) => {
|
VersionOrUrl::Version(version) => {
|
||||||
|
|
|
@ -584,7 +584,7 @@ fn install_git_tag() -> Result<()> {
|
||||||
Resolved 1 package in [TIME]
|
Resolved 1 package in [TIME]
|
||||||
Downloaded 1 package in [TIME]
|
Downloaded 1 package in [TIME]
|
||||||
Installed 1 package in [TIME]
|
Installed 1 package in [TIME]
|
||||||
+ werkzeug==2.0.0 (from git+https://github.com/pallets/werkzeug.git@2.0.0)
|
+ werkzeug==2.0.0 (from git+https://github.com/pallets/werkzeug.git@af160e0b6b7ddd81c22f1652c728ff5ac72d5c74)
|
||||||
"###
|
"###
|
||||||
);
|
);
|
||||||
|
|
||||||
|
@ -844,7 +844,9 @@ fn install_no_index() -> Result<()> {
|
||||||
----- stdout -----
|
----- stdout -----
|
||||||
|
|
||||||
----- stderr -----
|
----- stderr -----
|
||||||
error: markupsafe isn't available locally, but making network requests to registries was banned.
|
error: Because markupsafe==2.1.3 was not found in the provided package locations and you require markupsafe==2.1.3, we can conclude that the requirements are unsatisfiable.
|
||||||
|
|
||||||
|
hint: Packages were unavailable because index lookups were disabled and no additional package locations were provided (try: `--find-links <uri>`)
|
||||||
"###
|
"###
|
||||||
);
|
);
|
||||||
|
|
||||||
|
@ -894,7 +896,9 @@ fn install_no_index_cached() -> Result<()> {
|
||||||
----- stdout -----
|
----- stdout -----
|
||||||
|
|
||||||
----- stderr -----
|
----- stderr -----
|
||||||
error: markupsafe isn't available locally, but making network requests to registries was banned.
|
error: Because markupsafe==2.1.3 was not found in the provided package locations and you require markupsafe==2.1.3, we can conclude that the requirements are unsatisfiable.
|
||||||
|
|
||||||
|
hint: Packages were unavailable because index lookups were disabled and no additional package locations were provided (try: `--find-links <uri>`)
|
||||||
"###
|
"###
|
||||||
);
|
);
|
||||||
|
|
||||||
|
@ -1098,10 +1102,8 @@ fn mismatched_name() -> Result<()> {
|
||||||
----- stdout -----
|
----- stdout -----
|
||||||
|
|
||||||
----- stderr -----
|
----- stderr -----
|
||||||
Resolved 1 package in [TIME]
|
error: Failed to read: foo @ file://[TEMP_DIR]/foo-2.0.1-py3-none-any.whl
|
||||||
Downloaded 1 package in [TIME]
|
Caused by: The .dist-info directory tomli-2.0.1 does not start with the normalized package name: foo
|
||||||
error: Failed to install: foo-2.0.1-py3-none-any.whl (foo==2.0.1 (from file://[TEMP_DIR]/foo-2.0.1-py3-none-any.whl))
|
|
||||||
Caused by: Wheel package name does not match filename: tomli != foo
|
|
||||||
"###
|
"###
|
||||||
);
|
);
|
||||||
|
|
||||||
|
@ -1801,7 +1803,7 @@ fn install_url_built_dist_cached() -> Result<()> {
|
||||||
----- stdout -----
|
----- stdout -----
|
||||||
|
|
||||||
----- stderr -----
|
----- stderr -----
|
||||||
Removed 2 files for tqdm ([SIZE])
|
Removed 3 files for tqdm ([SIZE])
|
||||||
"###
|
"###
|
||||||
);
|
);
|
||||||
|
|
||||||
|
@ -2561,7 +2563,9 @@ fn find_links_offline_no_match() -> Result<()> {
|
||||||
----- stdout -----
|
----- stdout -----
|
||||||
|
|
||||||
----- stderr -----
|
----- stderr -----
|
||||||
error: Network connectivity is disabled, but the requested data wasn't found in the cache for: `numpy`
|
error: Because numpy was not found in the cache and you require numpy, we can conclude that the requirements are unsatisfiable.
|
||||||
|
|
||||||
|
hint: Packages were unavailable because the network was disabled
|
||||||
"###
|
"###
|
||||||
);
|
);
|
||||||
|
|
||||||
|
@ -2584,7 +2588,9 @@ fn offline() -> Result<()> {
|
||||||
----- stdout -----
|
----- stdout -----
|
||||||
|
|
||||||
----- stderr -----
|
----- stderr -----
|
||||||
error: Network connectivity is disabled, but the requested data wasn't found in the cache for: `black`
|
error: Because black==23.10.1 was not found in the cache and you require black==23.10.1, we can conclude that the requirements are unsatisfiable.
|
||||||
|
|
||||||
|
hint: Packages were unavailable because the network was disabled
|
||||||
"###
|
"###
|
||||||
);
|
);
|
||||||
|
|
||||||
|
@ -2982,10 +2988,6 @@ fn no_stream() -> Result<()> {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Raise an error when a direct URL dependency's `Requires-Python` constraint is not met.
|
/// Raise an error when a direct URL dependency's `Requires-Python` constraint is not met.
|
||||||
///
|
|
||||||
/// TODO(charlie): This currently passes, but should fail. `sync` does not currently validate the
|
|
||||||
/// `Requires-Python` constraint for direct URL dependencies. (It _does_ respect `Requires-Python`
|
|
||||||
/// for registry-based dependencies.)
|
|
||||||
#[test]
|
#[test]
|
||||||
fn requires_python_direct_url() -> Result<()> {
|
fn requires_python_direct_url() -> Result<()> {
|
||||||
let context = TestContext::new("3.12");
|
let context = TestContext::new("3.12");
|
||||||
|
@ -3017,15 +3019,13 @@ requires-python = "<=3.5"
|
||||||
|
|
||||||
uv_snapshot!(filters, command(&context)
|
uv_snapshot!(filters, command(&context)
|
||||||
.arg("requirements.in"), @r###"
|
.arg("requirements.in"), @r###"
|
||||||
success: true
|
success: false
|
||||||
exit_code: 0
|
exit_code: 2
|
||||||
----- stdout -----
|
----- stdout -----
|
||||||
|
|
||||||
----- stderr -----
|
----- stderr -----
|
||||||
Resolved 1 package in [TIME]
|
error: Because the current Python version (3.12.1) does not satisfy Python<=3.5 and example==0.0.0 depends on Python<=3.5, we can conclude that example==0.0.0 cannot be used.
|
||||||
Downloaded 1 package in [TIME]
|
And because only example==0.0.0 is available and you require example, we can conclude that the requirements are unsatisfiable.
|
||||||
Installed 1 package in [TIME]
|
|
||||||
+ example==0.0.0 (from file://[TEMP_DIR])
|
|
||||||
"###
|
"###
|
||||||
);
|
);
|
||||||
|
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue