Use common routines for pip install and pip sync (#3737)

## Summary

This PR takes the functions used in `pip install`, moves them into a
common module, and then replaces all the `pip sync` logic with calls
into those functions. The net effect is that `pip install` and `pip
sync` share far more code and demonstrate much more consistent behavior.

Closes https://github.com/astral-sh/uv/issues/3555.
This commit is contained in:
Charlie Marsh 2024-05-22 12:15:17 -04:00 committed by GitHub
parent b8ef436c42
commit 0313e7d78b
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
14 changed files with 1078 additions and 1031 deletions

View file

@ -205,4 +205,9 @@ impl RequirementSource {
}, },
} }
} }
/// Returns `true` if the source is editable.
pub fn is_editable(&self) -> bool {
matches!(self, Self::Path { editable: true, .. })
}
} }

View file

@ -59,16 +59,9 @@ impl Resolution {
self.0.is_empty() self.0.is_empty()
} }
/// Return the set of [`Requirement`]s that this resolution represents, exclusive of any /// Return the set of [`Requirement`]s that this resolution represents.
/// editable requirements.
pub fn requirements(&self) -> Vec<Requirement> { pub fn requirements(&self) -> Vec<Requirement> {
let mut requirements: Vec<_> = self let mut requirements: Vec<_> = self.0.values().map(Requirement::from).collect();
.0
.values()
// Remove editable requirements
.filter(|dist| !dist.is_editable())
.map(Requirement::from)
.collect();
requirements.sort_unstable_by(|a, b| a.name.cmp(&b.name)); requirements.sort_unstable_by(|a, b| a.name.cmp(&b.name));
requirements requirements
} }

View file

@ -44,9 +44,10 @@ impl Reinstall {
} }
/// Whether to allow package upgrades. /// Whether to allow package upgrades.
#[derive(Debug, Clone)] #[derive(Debug, Default, Clone)]
pub enum Upgrade { pub enum Upgrade {
/// Prefer pinned versions from the existing lockfile, if possible. /// Prefer pinned versions from the existing lockfile, if possible.
#[default]
None, None,
/// Allow package upgrades for all packages, ignoring the existing lockfile. /// Allow package upgrades for all packages, ignoring the existing lockfile.

View file

@ -139,7 +139,7 @@ impl Manifest {
// Include direct requirements, with constraints and overrides applied. // Include direct requirements, with constraints and overrides applied.
DependencyMode::Direct => Either::Right( DependencyMode::Direct => Either::Right(
self.overrides.apply(& self.requirements) self.overrides.apply(&self.requirements)
.chain(self.constraints.requirements()) .chain(self.constraints.requirements())
.chain(self.overrides.requirements()) .chain(self.overrides.requirements())
.filter(move |requirement| requirement.evaluate_markers(markers, &[]))), .filter(move |requirement| requirement.evaluate_markers(markers, &[]))),
@ -210,4 +210,9 @@ impl Manifest {
) -> impl Iterator<Item = &Requirement> { ) -> impl Iterator<Item = &Requirement> {
self.constraints.apply(self.overrides.apply(requirements)) self.constraints.apply(self.overrides.apply(requirements))
} }
/// Returns the number of input requirements.
pub fn num_requirements(&self) -> usize {
self.requirements.len() + self.editables.len()
}
} }

View file

@ -919,7 +919,7 @@ pub(crate) struct PipSyncArgs {
/// WARNING: When specified, uv will select wheels that are compatible with the _target_ /// WARNING: When specified, uv will select wheels that are compatible with the _target_
/// platform; as a result, the installed distributions may not be compatible with the _current_ /// platform; as a result, the installed distributions may not be compatible with the _current_
/// platform. Conversely, any distributions that are built from source may be incompatible with /// platform. Conversely, any distributions that are built from source may be incompatible with
/// the the _target_ platform, as they will be built for the _current_ platform. The /// the _target_ platform, as they will be built for the _current_ platform. The
/// `--python-platform` option is intended for advanced use cases. /// `--python-platform` option is intended for advanced use cases.
#[arg(long)] #[arg(long)]
pub(crate) python_platform: Option<TargetTriple>, pub(crate) python_platform: Option<TargetTriple>,
@ -932,6 +932,18 @@ pub(crate) struct PipSyncArgs {
#[arg(long, overrides_with("strict"), hide = true)] #[arg(long, overrides_with("strict"), hide = true)]
pub(crate) no_strict: bool, pub(crate) no_strict: bool,
/// Limit candidate packages to those that were uploaded prior to the given date.
///
/// Accepts both RFC 3339 timestamps (e.g., `2006-12-02T02:07:43Z`) and UTC dates in the same
/// format (e.g., `2006-12-02`).
#[arg(long)]
pub(crate) exclude_newer: Option<ExcludeNewer>,
/// Perform a dry run, i.e., don't actually install anything but resolve the dependencies and
/// print the resulting plan.
#[arg(long)]
pub(crate) dry_run: bool,
#[command(flatten)] #[command(flatten)]
pub(crate) compat_args: compat::PipSyncCompatArgs, pub(crate) compat_args: compat::PipSyncCompatArgs,
} }
@ -1301,7 +1313,7 @@ pub(crate) struct PipInstallArgs {
/// WARNING: When specified, uv will select wheels that are compatible with the _target_ /// WARNING: When specified, uv will select wheels that are compatible with the _target_
/// platform; as a result, the installed distributions may not be compatible with the _current_ /// platform; as a result, the installed distributions may not be compatible with the _current_
/// platform. Conversely, any distributions that are built from source may be incompatible with /// platform. Conversely, any distributions that are built from source may be incompatible with
/// the the _target_ platform, as they will be built for the _current_ platform. The /// the _target_ platform, as they will be built for the _current_ platform. The
/// `--python-platform` option is intended for advanced use cases. /// `--python-platform` option is intended for advanced use cases.
#[arg(long)] #[arg(long)]
pub(crate) python_platform: Option<TargetTriple>, pub(crate) python_platform: Option<TargetTriple>,

View file

@ -2,53 +2,42 @@ use std::borrow::Cow;
use std::fmt::Write; use std::fmt::Write;
use anstream::eprint; use anstream::eprint;
use anyhow::{anyhow, Context, Result};
use fs_err as fs; use fs_err as fs;
use itertools::Itertools; use itertools::Itertools;
use owo_colors::OwoColorize; use owo_colors::OwoColorize;
use tracing::{debug, enabled, Level}; use tracing::{debug, enabled, Level};
use distribution_types::Requirement; use distribution_types::{IndexLocations, Resolution};
use distribution_types::{
DistributionMetadata, IndexLocations, InstalledMetadata, InstalledVersion, LocalDist, Name,
ParsedUrl, RequirementSource, Resolution,
};
use install_wheel_rs::linker::LinkMode; use install_wheel_rs::linker::LinkMode;
use pep440_rs::{VersionSpecifier, VersionSpecifiers};
use pep508_rs::{MarkerEnvironment, VerbatimUrl};
use platform_tags::Tags; use platform_tags::Tags;
use pypi_types::Yanked;
use uv_auth::store_credentials_from_url; use uv_auth::store_credentials_from_url;
use uv_cache::Cache; use uv_cache::Cache;
use uv_client::{ use uv_client::{BaseClientBuilder, Connectivity, FlatIndexClient, RegistryClientBuilder};
BaseClientBuilder, Connectivity, FlatIndexClient, RegistryClient, RegistryClientBuilder,
};
use uv_configuration::{ use uv_configuration::{
Concurrency, ConfigSettings, Constraints, IndexStrategy, NoBinary, NoBuild, Overrides, Concurrency, ConfigSettings, IndexStrategy, NoBinary, NoBuild, PreviewMode, Reinstall,
PreviewMode, Reinstall, SetupPyStrategy, Upgrade, SetupPyStrategy, Upgrade,
}; };
use uv_configuration::{KeyringProviderType, TargetTriple}; use uv_configuration::{KeyringProviderType, TargetTriple};
use uv_dispatch::BuildDispatch; use uv_dispatch::BuildDispatch;
use uv_distribution::DistributionDatabase; use uv_distribution::DistributionDatabase;
use uv_fs::Simplified; use uv_fs::Simplified;
use uv_installer::{Downloader, Plan, Planner, ResolvedEditable, SatisfiesResult, SitePackages}; use uv_installer::{SatisfiesResult, SitePackages};
use uv_interpreter::{Interpreter, PythonEnvironment, PythonVersion, SystemPython, Target}; use uv_interpreter::{PythonEnvironment, PythonVersion, SystemPython, Target};
use uv_normalize::PackageName; use uv_normalize::PackageName;
use uv_requirements::{ use uv_requirements::{
ExtrasSpecification, LookaheadResolver, NamedRequirementsResolver, RequirementsSource, ExtrasSpecification, NamedRequirementsResolver, RequirementsSource, RequirementsSpecification,
RequirementsSpecification, SourceTreeResolver, SourceTreeResolver,
}; };
use uv_resolver::{ use uv_resolver::{
DependencyMode, ExcludeNewer, Exclusions, FlatIndex, InMemoryIndex, Lock, Manifest, Options, DependencyMode, ExcludeNewer, FlatIndex, InMemoryIndex, Lock, OptionsBuilder, PreReleaseMode,
OptionsBuilder, PreReleaseMode, Preference, PythonRequirement, ResolutionGraph, ResolutionMode, ResolutionMode,
Resolver,
}; };
use uv_types::{BuildIsolation, HashStrategy, InFlight}; use uv_types::{BuildIsolation, HashStrategy, InFlight};
use uv_warnings::warn_user;
use crate::commands::reporters::{DownloadReporter, InstallReporter, ResolverReporter}; use crate::commands::pip::operations;
use crate::commands::DryRunEvent; use crate::commands::pip::operations::Modifications;
use crate::commands::{compile_bytecode, elapsed, ChangeEvent, ChangeEventKind, ExitStatus}; use crate::commands::reporters::ResolverReporter;
use crate::commands::{elapsed, ExitStatus};
use crate::editables::ResolvedEditables; use crate::editables::ResolvedEditables;
use crate::printer::Printer; use crate::printer::Printer;
@ -91,7 +80,7 @@ pub(crate) async fn pip_install(
cache: Cache, cache: Cache,
dry_run: bool, dry_run: bool,
printer: Printer, printer: Printer,
) -> Result<ExitStatus> { ) -> anyhow::Result<ExitStatus> {
let start = std::time::Instant::now(); let start = std::time::Instant::now();
let client_builder = BaseClientBuilder::new() let client_builder = BaseClientBuilder::new()
@ -114,7 +103,7 @@ pub(crate) async fn pip_install(
no_binary: specified_no_binary, no_binary: specified_no_binary,
no_build: specified_no_build, no_build: specified_no_build,
extras: _, extras: _,
} = read_requirements( } = operations::read_requirements(
requirements, requirements,
constraints, constraints,
overrides, overrides,
@ -420,7 +409,7 @@ pub(crate) async fn pip_install(
.index_strategy(index_strategy) .index_strategy(index_strategy)
.build(); .build();
match resolve( match operations::resolve(
requirements, requirements,
constraints, constraints,
overrides, overrides,
@ -444,7 +433,7 @@ pub(crate) async fn pip_install(
.await .await
{ {
Ok(resolution) => Resolution::from(resolution), Ok(resolution) => Resolution::from(resolution),
Err(Error::Resolve(uv_resolver::ResolveError::NoSolution(err))) => { Err(operations::Error::Resolve(uv_resolver::ResolveError::NoSolution(err))) => {
let report = miette::Report::msg(format!("{err}")) let report = miette::Report::msg(format!("{err}"))
.context("No solution found when resolving dependencies:"); .context("No solution found when resolving dependencies:");
eprint!("{report:?}"); eprint!("{report:?}");
@ -482,10 +471,11 @@ pub(crate) async fn pip_install(
}; };
// Sync the environment. // Sync the environment.
install( operations::install(
&resolution, &resolution,
&editables, &editables,
site_packages, site_packages,
Modifications::Sufficient,
&reinstall, &reinstall,
&no_binary, &no_binary,
link_mode, link_mode,
@ -506,632 +496,8 @@ pub(crate) async fn pip_install(
// Validate the environment. // Validate the environment.
if strict { if strict {
validate(&resolution, &venv, printer)?; operations::validate(&resolution, &venv, printer)?;
} }
Ok(ExitStatus::Success) Ok(ExitStatus::Success)
} }
/// Consolidate the requirements for an installation.
async fn read_requirements(
requirements: &[RequirementsSource],
constraints: &[RequirementsSource],
overrides: &[RequirementsSource],
extras: &ExtrasSpecification,
client_builder: &BaseClientBuilder<'_>,
preview: PreviewMode,
) -> Result<RequirementsSpecification, Error> {
// If the user requests `extras` but does not provide a valid source (e.g., a `pyproject.toml`),
// return an error.
if !extras.is_empty() && !requirements.iter().any(RequirementsSource::allows_extras) {
return Err(anyhow!(
"Requesting extras requires a `pyproject.toml`, `setup.cfg`, or `setup.py` file."
)
.into());
}
// Read all requirements from the provided sources.
let spec = RequirementsSpecification::from_sources(
requirements,
constraints,
overrides,
extras,
client_builder,
preview,
)
.await?;
// If all the metadata could be statically resolved, validate that every extra was used. If we
// need to resolve metadata via PEP 517, we don't know which extras are used until much later.
if spec.source_trees.is_empty() {
if let ExtrasSpecification::Some(extras) = extras {
let mut unused_extras = extras
.iter()
.filter(|extra| !spec.extras.contains(extra))
.collect::<Vec<_>>();
if !unused_extras.is_empty() {
unused_extras.sort_unstable();
unused_extras.dedup();
let s = if unused_extras.len() == 1 { "" } else { "s" };
return Err(anyhow!(
"Requested extra{s} not found: {}",
unused_extras.iter().join(", ")
)
.into());
}
}
}
Ok(spec)
}
/// Resolve a set of requirements, similar to running `pip compile`.
#[allow(clippy::too_many_arguments)]
async fn resolve(
requirements: Vec<Requirement>,
constraints: Vec<Requirement>,
overrides: Vec<Requirement>,
project: Option<PackageName>,
editables: &ResolvedEditables,
hasher: &HashStrategy,
site_packages: SitePackages,
reinstall: &Reinstall,
upgrade: &Upgrade,
interpreter: &Interpreter,
tags: &Tags,
markers: &MarkerEnvironment,
client: &RegistryClient,
flat_index: &FlatIndex,
index: &InMemoryIndex,
build_dispatch: &BuildDispatch<'_>,
concurrency: Concurrency,
options: Options,
printer: Printer,
) -> Result<ResolutionGraph, Error> {
let start = std::time::Instant::now();
// TODO(zanieb): Consider consuming these instead of cloning
let exclusions = Exclusions::new(reinstall.clone(), upgrade.clone());
// Prefer current site packages; filter out packages that are marked for reinstall or upgrade
let preferences = site_packages
.iter()
.filter(|dist| !exclusions.contains(dist.name()))
.map(|dist| {
let source = match dist.installed_version() {
InstalledVersion::Version(version) => RequirementSource::Registry {
specifier: VersionSpecifiers::from(VersionSpecifier::equals_version(
version.clone(),
)),
// TODO(konstin): track index
index: None,
},
InstalledVersion::Url(url, _version) => {
let parsed_url = ParsedUrl::try_from(url.clone())?;
RequirementSource::from_parsed_url(
parsed_url,
VerbatimUrl::from_url(url.clone()),
)
}
};
let requirement = Requirement {
name: dist.name().clone(),
extras: vec![],
marker: None,
source,
origin: None,
};
Ok(Preference::from_requirement(requirement))
})
.collect::<Result<_, _>>()
.map_err(Error::UnsupportedInstalledDist)?;
// Collect constraints and overrides.
let constraints = Constraints::from_requirements(constraints);
let overrides = Overrides::from_requirements(overrides);
let python_requirement = PythonRequirement::from_marker_environment(interpreter, markers);
// Map the editables to their metadata.
let editables = editables.as_metadata().map_err(Error::ParsedUrl)?;
// Determine any lookahead requirements.
let lookaheads = match options.dependency_mode {
DependencyMode::Transitive => {
LookaheadResolver::new(
&requirements,
&constraints,
&overrides,
&editables,
hasher,
index,
DistributionDatabase::new(client, build_dispatch, concurrency.downloads),
)
.with_reporter(ResolverReporter::from(printer))
.resolve(Some(markers))
.await?
}
DependencyMode::Direct => Vec::new(),
};
// Create a manifest of the requirements.
let manifest = Manifest::new(
requirements,
constraints,
overrides,
preferences,
project,
editables,
exclusions,
lookaheads,
);
// Resolve the dependencies.
let resolver = Resolver::new(
manifest,
options,
&python_requirement,
Some(markers),
tags,
flat_index,
index,
hasher,
build_dispatch,
site_packages,
DistributionDatabase::new(client, build_dispatch, concurrency.downloads),
)?
.with_reporter(ResolverReporter::from(printer));
let resolution = resolver.resolve().await?;
let s = if resolution.len() == 1 { "" } else { "s" };
writeln!(
printer.stderr(),
"{}",
format!(
"Resolved {} in {}",
format!("{} package{}", resolution.len(), s).bold(),
elapsed(start.elapsed())
)
.dimmed()
)?;
// Notify the user of any diagnostics.
for diagnostic in resolution.diagnostics() {
writeln!(
printer.stderr(),
"{}{} {}",
"warning".yellow().bold(),
":".bold(),
diagnostic.message().bold()
)?;
}
Ok(resolution)
}
/// Install a set of requirements into the current environment.
#[allow(clippy::too_many_arguments)]
async fn install(
resolution: &Resolution,
editables: &[ResolvedEditable],
site_packages: SitePackages,
reinstall: &Reinstall,
no_binary: &NoBinary,
link_mode: LinkMode,
compile: bool,
index_urls: &IndexLocations,
hasher: &HashStrategy,
tags: &Tags,
client: &RegistryClient,
in_flight: &InFlight,
concurrency: Concurrency,
build_dispatch: &BuildDispatch<'_>,
cache: &Cache,
venv: &PythonEnvironment,
dry_run: bool,
printer: Printer,
) -> Result<(), Error> {
let start = std::time::Instant::now();
let requirements = resolution.requirements();
// Partition into those that should be linked from the cache (`local`), those that need to be
// downloaded (`remote`), and those that should be removed (`extraneous`).
let plan = Planner::with_requirements(&requirements)
.with_editable_requirements(editables)
.build(
site_packages,
reinstall,
no_binary,
hasher,
index_urls,
cache,
venv,
tags,
)
.context("Failed to determine installation plan")?;
if dry_run {
return report_dry_run(resolution, plan, start, printer);
}
let Plan {
cached,
remote,
reinstalls,
extraneous: _,
} = plan;
// Nothing to do.
if remote.is_empty() && cached.is_empty() && reinstalls.is_empty() {
let s = if resolution.len() == 1 { "" } else { "s" };
writeln!(
printer.stderr(),
"{}",
format!(
"Audited {} in {}",
format!("{} package{}", resolution.len(), s).bold(),
elapsed(start.elapsed())
)
.dimmed()
)?;
return Ok(());
}
// Map any registry-based requirements back to those returned by the resolver.
let remote = remote
.iter()
.map(|dist| {
resolution
.get_remote(&dist.name)
.cloned()
.expect("Resolution should contain all packages")
})
.collect::<Vec<_>>();
// Download, build, and unzip any missing distributions.
let wheels = if remote.is_empty() {
vec![]
} else {
let start = std::time::Instant::now();
let downloader = Downloader::new(
cache,
tags,
hasher,
DistributionDatabase::new(client, build_dispatch, concurrency.downloads),
)
.with_reporter(DownloadReporter::from(printer).with_length(remote.len() as u64));
let wheels = downloader
.download(remote.clone(), in_flight)
.await
.context("Failed to download distributions")?;
let s = if wheels.len() == 1 { "" } else { "s" };
writeln!(
printer.stderr(),
"{}",
format!(
"Downloaded {} in {}",
format!("{} package{}", wheels.len(), s).bold(),
elapsed(start.elapsed())
)
.dimmed()
)?;
wheels
};
// Remove any existing installations.
if !reinstalls.is_empty() {
for dist_info in &reinstalls {
match uv_installer::uninstall(dist_info).await {
Ok(summary) => {
debug!(
"Uninstalled {} ({} file{}, {} director{})",
dist_info.name(),
summary.file_count,
if summary.file_count == 1 { "" } else { "s" },
summary.dir_count,
if summary.dir_count == 1 { "y" } else { "ies" },
);
}
Err(uv_installer::UninstallError::Uninstall(
install_wheel_rs::Error::MissingRecord(_),
)) => {
warn_user!(
"Failed to uninstall package at {} due to missing RECORD file. Installation may result in an incomplete environment.",
dist_info.path().user_display().cyan(),
);
}
Err(err) => return Err(err.into()),
}
}
}
// Install the resolved distributions.
let wheels = wheels.into_iter().chain(cached).collect::<Vec<_>>();
if !wheels.is_empty() {
let start = std::time::Instant::now();
uv_installer::Installer::new(venv)
.with_link_mode(link_mode)
.with_reporter(InstallReporter::from(printer).with_length(wheels.len() as u64))
.install(&wheels)?;
let s = if wheels.len() == 1 { "" } else { "s" };
writeln!(
printer.stderr(),
"{}",
format!(
"Installed {} in {}",
format!("{} package{}", wheels.len(), s).bold(),
elapsed(start.elapsed())
)
.dimmed()
)?;
}
if compile {
compile_bytecode(venv, cache, printer).await?;
}
for event in reinstalls
.into_iter()
.map(|distribution| ChangeEvent {
dist: LocalDist::from(distribution),
kind: ChangeEventKind::Removed,
})
.chain(wheels.into_iter().map(|distribution| ChangeEvent {
dist: LocalDist::from(distribution),
kind: ChangeEventKind::Added,
}))
.sorted_unstable_by(|a, b| {
a.dist
.name()
.cmp(b.dist.name())
.then_with(|| a.kind.cmp(&b.kind))
.then_with(|| a.dist.installed_version().cmp(&b.dist.installed_version()))
})
{
match event.kind {
ChangeEventKind::Added => {
writeln!(
printer.stderr(),
" {} {}{}",
"+".green(),
event.dist.name().as_ref().bold(),
event.dist.installed_version().to_string().dimmed()
)?;
}
ChangeEventKind::Removed => {
writeln!(
printer.stderr(),
" {} {}{}",
"-".red(),
event.dist.name().as_ref().bold(),
event.dist.installed_version().to_string().dimmed()
)?;
}
}
}
// TODO(konstin): Also check the cache whether any cached or installed dist is already known to
// have been yanked, we currently don't show this message on the second run anymore
for dist in &remote {
let Some(file) = dist.file() else {
continue;
};
match &file.yanked {
None | Some(Yanked::Bool(false)) => {}
Some(Yanked::Bool(true)) => {
writeln!(
printer.stderr(),
"{}{} {dist} is yanked.",
"warning".yellow().bold(),
":".bold(),
)?;
}
Some(Yanked::Reason(reason)) => {
writeln!(
printer.stderr(),
"{}{} {dist} is yanked (reason: \"{reason}\").",
"warning".yellow().bold(),
":".bold(),
)?;
}
}
}
Ok(())
}
/// Report on the results of a dry-run installation.
fn report_dry_run(
resolution: &Resolution,
plan: Plan,
start: std::time::Instant,
printer: Printer,
) -> Result<(), Error> {
let Plan {
cached,
remote,
reinstalls,
extraneous: _,
} = plan;
// Nothing to do.
if remote.is_empty() && cached.is_empty() && reinstalls.is_empty() {
let s = if resolution.len() == 1 { "" } else { "s" };
writeln!(
printer.stderr(),
"{}",
format!(
"Audited {} in {}",
format!("{} package{}", resolution.len(), s).bold(),
elapsed(start.elapsed())
)
.dimmed()
)?;
writeln!(printer.stderr(), "Would make no changes")?;
return Ok(());
}
// Map any registry-based requirements back to those returned by the resolver.
let remote = remote
.iter()
.map(|dist| {
resolution
.get_remote(&dist.name)
.cloned()
.expect("Resolution should contain all packages")
})
.collect::<Vec<_>>();
// Download, build, and unzip any missing distributions.
let wheels = if remote.is_empty() {
vec![]
} else {
let s = if remote.len() == 1 { "" } else { "s" };
writeln!(
printer.stderr(),
"{}",
format!(
"Would download {}",
format!("{} package{}", remote.len(), s).bold(),
)
.dimmed()
)?;
remote
};
// Remove any existing installations.
if !reinstalls.is_empty() {
let s = if reinstalls.len() == 1 { "" } else { "s" };
writeln!(
printer.stderr(),
"{}",
format!(
"Would uninstall {}",
format!("{} package{}", reinstalls.len(), s).bold(),
)
.dimmed()
)?;
}
// Install the resolved distributions.
let installs = wheels.len() + cached.len();
if installs > 0 {
let s = if installs == 1 { "" } else { "s" };
writeln!(
printer.stderr(),
"{}",
format!("Would install {}", format!("{installs} package{s}").bold()).dimmed()
)?;
}
for event in reinstalls
.into_iter()
.map(|distribution| DryRunEvent {
name: distribution.name().clone(),
version: distribution.installed_version().to_string(),
kind: ChangeEventKind::Removed,
})
.chain(wheels.into_iter().map(|distribution| DryRunEvent {
name: distribution.name().clone(),
version: distribution.version_or_url().to_string(),
kind: ChangeEventKind::Added,
}))
.chain(cached.into_iter().map(|distribution| DryRunEvent {
name: distribution.name().clone(),
version: distribution.installed_version().to_string(),
kind: ChangeEventKind::Added,
}))
.sorted_unstable_by(|a, b| a.name.cmp(&b.name).then_with(|| a.kind.cmp(&b.kind)))
{
match event.kind {
ChangeEventKind::Added => {
writeln!(
printer.stderr(),
" {} {}{}",
"+".green(),
event.name.as_ref().bold(),
event.version.dimmed()
)?;
}
ChangeEventKind::Removed => {
writeln!(
printer.stderr(),
" {} {}{}",
"-".red(),
event.name.as_ref().bold(),
event.version.dimmed()
)?;
}
}
}
Ok(())
}
/// Validate the installed packages in the virtual environment.
fn validate(
resolution: &Resolution,
venv: &PythonEnvironment,
printer: Printer,
) -> Result<(), Error> {
let site_packages = SitePackages::from_executable(venv)?;
let diagnostics = site_packages.diagnostics()?;
for diagnostic in diagnostics {
// Only surface diagnostics that are "relevant" to the current resolution.
if resolution
.packages()
.any(|package| diagnostic.includes(package))
{
writeln!(
printer.stderr(),
"{}{} {}",
"warning".yellow().bold(),
":".bold(),
diagnostic.message().bold()
)?;
}
}
Ok(())
}
#[derive(thiserror::Error, Debug)]
enum Error {
#[error(transparent)]
Resolve(#[from] uv_resolver::ResolveError),
#[error(transparent)]
Uninstall(#[from] uv_installer::UninstallError),
#[error(transparent)]
Client(#[from] uv_client::Error),
#[error(transparent)]
Platform(#[from] platform_tags::PlatformError),
#[error(transparent)]
Hash(#[from] uv_types::HashStrategyError),
#[error(transparent)]
Io(#[from] std::io::Error),
#[error(transparent)]
Fmt(#[from] std::fmt::Error),
#[error(transparent)]
Lookahead(#[from] uv_requirements::LookaheadError),
#[error(transparent)]
ParsedUrl(Box<distribution_types::ParsedUrlError>),
#[error(transparent)]
Anyhow(#[from] anyhow::Error),
#[error("Installed distribution has unsupported type")]
UnsupportedInstalledDist(#[source] Box<distribution_types::ParsedUrlError>),
}

View file

@ -3,6 +3,7 @@ pub(crate) mod compile;
pub(crate) mod freeze; pub(crate) mod freeze;
pub(crate) mod install; pub(crate) mod install;
pub(crate) mod list; pub(crate) mod list;
mod operations;
pub(crate) mod show; pub(crate) mod show;
pub(crate) mod sync; pub(crate) mod sync;
pub(crate) mod uninstall; pub(crate) mod uninstall;

View file

@ -0,0 +1,741 @@
//! Common operations shared across the `pip` API and subcommands.
use std::fmt::Write;
use anyhow::{anyhow, Context};
use itertools::Itertools;
use owo_colors::OwoColorize;
use tracing::debug;
use distribution_types::Requirement;
use distribution_types::{
DistributionMetadata, IndexLocations, InstalledMetadata, InstalledVersion, LocalDist, Name,
ParsedUrl, RequirementSource, Resolution,
};
use install_wheel_rs::linker::LinkMode;
use pep440_rs::{VersionSpecifier, VersionSpecifiers};
use pep508_rs::{MarkerEnvironment, VerbatimUrl};
use platform_tags::Tags;
use pypi_types::Yanked;
use uv_cache::Cache;
use uv_client::{BaseClientBuilder, RegistryClient};
use uv_configuration::{
Concurrency, Constraints, NoBinary, Overrides, PreviewMode, Reinstall, Upgrade,
};
use uv_dispatch::BuildDispatch;
use uv_distribution::DistributionDatabase;
use uv_fs::Simplified;
use uv_installer::{Downloader, Plan, Planner, ResolvedEditable, SitePackages};
use uv_interpreter::{Interpreter, PythonEnvironment};
use uv_normalize::PackageName;
use uv_requirements::{
ExtrasSpecification, LookaheadResolver, RequirementsSource, RequirementsSpecification,
};
use uv_resolver::{
DependencyMode, Exclusions, FlatIndex, InMemoryIndex, Manifest, Options, Preference,
PythonRequirement, ResolutionGraph, Resolver,
};
use uv_types::{HashStrategy, InFlight};
use uv_warnings::warn_user;
use crate::commands::reporters::{DownloadReporter, InstallReporter, ResolverReporter};
use crate::commands::DryRunEvent;
use crate::commands::{compile_bytecode, elapsed, ChangeEvent, ChangeEventKind};
use crate::editables::ResolvedEditables;
use crate::printer::Printer;
/// Consolidate the requirements for an installation.
pub(crate) async fn read_requirements(
requirements: &[RequirementsSource],
constraints: &[RequirementsSource],
overrides: &[RequirementsSource],
extras: &ExtrasSpecification,
client_builder: &BaseClientBuilder<'_>,
preview: PreviewMode,
) -> Result<RequirementsSpecification, Error> {
// If the user requests `extras` but does not provide a valid source (e.g., a `pyproject.toml`),
// return an error.
if !extras.is_empty() && !requirements.iter().any(RequirementsSource::allows_extras) {
return Err(anyhow!(
"Requesting extras requires a `pyproject.toml`, `setup.cfg`, or `setup.py` file."
)
.into());
}
// Read all requirements from the provided sources.
let spec = RequirementsSpecification::from_sources(
requirements,
constraints,
overrides,
extras,
client_builder,
preview,
)
.await?;
// If all the metadata could be statically resolved, validate that every extra was used. If we
// need to resolve metadata via PEP 517, we don't know which extras are used until much later.
if spec.source_trees.is_empty() {
if let ExtrasSpecification::Some(extras) = extras {
let mut unused_extras = extras
.iter()
.filter(|extra| !spec.extras.contains(extra))
.collect::<Vec<_>>();
if !unused_extras.is_empty() {
unused_extras.sort_unstable();
unused_extras.dedup();
let s = if unused_extras.len() == 1 { "" } else { "s" };
return Err(anyhow!(
"Requested extra{s} not found: {}",
unused_extras.iter().join(", ")
)
.into());
}
}
}
Ok(spec)
}
/// Resolve a set of requirements, similar to running `pip compile`.
#[allow(clippy::too_many_arguments)]
pub(crate) async fn resolve(
requirements: Vec<Requirement>,
constraints: Vec<Requirement>,
overrides: Vec<Requirement>,
project: Option<PackageName>,
editables: &ResolvedEditables,
hasher: &HashStrategy,
site_packages: SitePackages,
reinstall: &Reinstall,
upgrade: &Upgrade,
interpreter: &Interpreter,
tags: &Tags,
markers: &MarkerEnvironment,
client: &RegistryClient,
flat_index: &FlatIndex,
index: &InMemoryIndex,
build_dispatch: &BuildDispatch<'_>,
concurrency: Concurrency,
options: Options,
printer: Printer,
) -> Result<ResolutionGraph, Error> {
let start = std::time::Instant::now();
// TODO(zanieb): Consider consuming these instead of cloning
let exclusions = Exclusions::new(reinstall.clone(), upgrade.clone());
// Prefer current site packages; filter out packages that are marked for reinstall or upgrade
let preferences = site_packages
.iter()
.filter(|dist| !exclusions.contains(dist.name()))
.map(|dist| {
let source = match dist.installed_version() {
InstalledVersion::Version(version) => RequirementSource::Registry {
specifier: VersionSpecifiers::from(VersionSpecifier::equals_version(
version.clone(),
)),
// TODO(konstin): track index
index: None,
},
InstalledVersion::Url(url, _version) => {
let parsed_url = ParsedUrl::try_from(url.clone())?;
RequirementSource::from_parsed_url(
parsed_url,
VerbatimUrl::from_url(url.clone()),
)
}
};
let requirement = Requirement {
name: dist.name().clone(),
extras: vec![],
marker: None,
source,
origin: None,
};
Ok(Preference::from_requirement(requirement))
})
.collect::<Result<_, _>>()
.map_err(Error::UnsupportedInstalledDist)?;
// Collect constraints and overrides.
let constraints = Constraints::from_requirements(constraints);
let overrides = Overrides::from_requirements(overrides);
let python_requirement = PythonRequirement::from_marker_environment(interpreter, markers);
// Map the editables to their metadata.
let editables = editables.as_metadata().map_err(Error::ParsedUrl)?;
// Determine any lookahead requirements.
let lookaheads = match options.dependency_mode {
DependencyMode::Transitive => {
LookaheadResolver::new(
&requirements,
&constraints,
&overrides,
&editables,
hasher,
index,
DistributionDatabase::new(client, build_dispatch, concurrency.downloads),
)
.with_reporter(ResolverReporter::from(printer))
.resolve(Some(markers))
.await?
}
DependencyMode::Direct => Vec::new(),
};
// Create a manifest of the requirements.
let manifest = Manifest::new(
requirements,
constraints,
overrides,
preferences,
project,
editables,
exclusions,
lookaheads,
);
// Resolve the dependencies.
let resolution = {
// If possible, create a bound on the progress bar.
let reporter = match options.dependency_mode {
DependencyMode::Transitive => ResolverReporter::from(printer),
DependencyMode::Direct => {
ResolverReporter::from(printer).with_length(manifest.num_requirements() as u64)
}
};
let resolver = Resolver::new(
manifest,
options,
&python_requirement,
Some(markers),
tags,
flat_index,
index,
hasher,
build_dispatch,
site_packages,
DistributionDatabase::new(client, build_dispatch, concurrency.downloads),
)?
.with_reporter(reporter);
resolver.resolve().await?
};
let s = if resolution.len() == 1 { "" } else { "s" };
writeln!(
printer.stderr(),
"{}",
format!(
"Resolved {} in {}",
format!("{} package{}", resolution.len(), s).bold(),
elapsed(start.elapsed())
)
.dimmed()
)?;
// Notify the user of any diagnostics.
for diagnostic in resolution.diagnostics() {
writeln!(
printer.stderr(),
"{}{} {}",
"warning".yellow().bold(),
":".bold(),
diagnostic.message().bold()
)?;
}
Ok(resolution)
}
#[derive(Debug, Clone, Copy)]
pub(crate) enum Modifications {
/// Use `pip install` semantics, whereby existing installations are left as-is, unless they are
/// marked for re-installation or upgrade.
///
/// Ensures that the resulting environment is sufficient to meet the requirements, but without
/// any unnecessary changes.
Sufficient,
/// Use `pip sync` semantics, whereby any existing, extraneous installations are removed.
///
/// Ensures that the resulting environment is an exact match for the requirements, but may
/// result in more changes than necessary.
Exact,
}
/// Install a set of requirements into the current environment.
#[allow(clippy::too_many_arguments)]
pub(crate) async fn install(
resolution: &Resolution,
editables: &[ResolvedEditable],
site_packages: SitePackages,
modifications: Modifications,
reinstall: &Reinstall,
no_binary: &NoBinary,
link_mode: LinkMode,
compile: bool,
index_urls: &IndexLocations,
hasher: &HashStrategy,
tags: &Tags,
client: &RegistryClient,
in_flight: &InFlight,
concurrency: Concurrency,
build_dispatch: &BuildDispatch<'_>,
cache: &Cache,
venv: &PythonEnvironment,
dry_run: bool,
printer: Printer,
) -> Result<(), Error> {
let start = std::time::Instant::now();
// Extract the requirements from the resolution, filtering out any editables that were already
// required. If a package is already installed as editable, it may appear in the resolution
// despite not being explicitly requested.
let requirements = resolution
.requirements()
.into_iter()
.filter(|requirement| {
if requirement.source.is_editable() {
!editables
.iter()
.any(|editable| requirement.name == *editable.name())
} else {
true
}
})
.collect::<Vec<_>>();
// Partition into those that should be linked from the cache (`local`), those that need to be
// downloaded (`remote`), and those that should be removed (`extraneous`).
let plan = Planner::with_requirements(&requirements)
.with_editable_requirements(editables)
.build(
site_packages,
reinstall,
no_binary,
hasher,
index_urls,
cache,
venv,
tags,
)
.context("Failed to determine installation plan")?;
if dry_run {
return report_dry_run(resolution, plan, modifications, start, printer);
}
let Plan {
cached,
remote,
reinstalls,
extraneous,
} = plan;
// If we're in `install` mode, ignore any extraneous distributions.
let extraneous = match modifications {
Modifications::Sufficient => vec![],
Modifications::Exact => extraneous,
};
// Nothing to do.
if remote.is_empty() && cached.is_empty() && reinstalls.is_empty() && extraneous.is_empty() {
let s = if resolution.len() == 1 { "" } else { "s" };
writeln!(
printer.stderr(),
"{}",
format!(
"Audited {} in {}",
format!("{} package{}", resolution.len(), s).bold(),
elapsed(start.elapsed())
)
.dimmed()
)?;
return Ok(());
}
// Map any registry-based requirements back to those returned by the resolver.
let remote = remote
.iter()
.map(|dist| {
resolution
.get_remote(&dist.name)
.cloned()
.expect("Resolution should contain all packages")
})
.collect::<Vec<_>>();
// Download, build, and unzip any missing distributions.
let wheels = if remote.is_empty() {
vec![]
} else {
let start = std::time::Instant::now();
let downloader = Downloader::new(
cache,
tags,
hasher,
DistributionDatabase::new(client, build_dispatch, concurrency.downloads),
)
.with_reporter(DownloadReporter::from(printer).with_length(remote.len() as u64));
let wheels = downloader
.download(remote.clone(), in_flight)
.await
.context("Failed to download distributions")?;
let s = if wheels.len() == 1 { "" } else { "s" };
writeln!(
printer.stderr(),
"{}",
format!(
"Downloaded {} in {}",
format!("{} package{}", wheels.len(), s).bold(),
elapsed(start.elapsed())
)
.dimmed()
)?;
wheels
};
// Remove any upgraded or extraneous installations.
if !extraneous.is_empty() || !reinstalls.is_empty() {
let start = std::time::Instant::now();
for dist_info in extraneous.iter().chain(reinstalls.iter()) {
match uv_installer::uninstall(dist_info).await {
Ok(summary) => {
debug!(
"Uninstalled {} ({} file{}, {} director{})",
dist_info.name(),
summary.file_count,
if summary.file_count == 1 { "" } else { "s" },
summary.dir_count,
if summary.dir_count == 1 { "y" } else { "ies" },
);
}
Err(uv_installer::UninstallError::Uninstall(
install_wheel_rs::Error::MissingRecord(_),
)) => {
warn_user!(
"Failed to uninstall package at {} due to missing RECORD file. Installation may result in an incomplete environment.",
dist_info.path().user_display().cyan(),
);
}
Err(err) => return Err(err.into()),
}
}
let s = if extraneous.len() + reinstalls.len() == 1 {
""
} else {
"s"
};
writeln!(
printer.stderr(),
"{}",
format!(
"Uninstalled {} in {}",
format!("{} package{}", extraneous.len() + reinstalls.len(), s).bold(),
elapsed(start.elapsed())
)
.dimmed()
)?;
}
// Install the resolved distributions.
let wheels = wheels.into_iter().chain(cached).collect::<Vec<_>>();
if !wheels.is_empty() {
let start = std::time::Instant::now();
uv_installer::Installer::new(venv)
.with_link_mode(link_mode)
.with_reporter(InstallReporter::from(printer).with_length(wheels.len() as u64))
.install(&wheels)?;
let s = if wheels.len() == 1 { "" } else { "s" };
writeln!(
printer.stderr(),
"{}",
format!(
"Installed {} in {}",
format!("{} package{}", wheels.len(), s).bold(),
elapsed(start.elapsed())
)
.dimmed()
)?;
}
if compile {
compile_bytecode(venv, cache, printer).await?;
}
for event in extraneous
.into_iter()
.chain(reinstalls.into_iter())
.map(|distribution| ChangeEvent {
dist: LocalDist::from(distribution),
kind: ChangeEventKind::Removed,
})
.chain(wheels.into_iter().map(|distribution| ChangeEvent {
dist: LocalDist::from(distribution),
kind: ChangeEventKind::Added,
}))
.sorted_unstable_by(|a, b| {
a.dist
.name()
.cmp(b.dist.name())
.then_with(|| a.kind.cmp(&b.kind))
.then_with(|| a.dist.installed_version().cmp(&b.dist.installed_version()))
})
{
match event.kind {
ChangeEventKind::Added => {
writeln!(
printer.stderr(),
" {} {}{}",
"+".green(),
event.dist.name().as_ref().bold(),
event.dist.installed_version().to_string().dimmed()
)?;
}
ChangeEventKind::Removed => {
writeln!(
printer.stderr(),
" {} {}{}",
"-".red(),
event.dist.name().as_ref().bold(),
event.dist.installed_version().to_string().dimmed()
)?;
}
}
}
// TODO(konstin): Also check the cache whether any cached or installed dist is already known to
// have been yanked, we currently don't show this message on the second run anymore
for dist in &remote {
let Some(file) = dist.file() else {
continue;
};
match &file.yanked {
None | Some(Yanked::Bool(false)) => {}
Some(Yanked::Bool(true)) => {
writeln!(
printer.stderr(),
"{}{} {dist} is yanked.",
"warning".yellow().bold(),
":".bold(),
)?;
}
Some(Yanked::Reason(reason)) => {
writeln!(
printer.stderr(),
"{}{} {dist} is yanked (reason: \"{reason}\").",
"warning".yellow().bold(),
":".bold(),
)?;
}
}
}
Ok(())
}
/// Report on the results of a dry-run installation.
fn report_dry_run(
resolution: &Resolution,
plan: Plan,
modifications: Modifications,
start: std::time::Instant,
printer: Printer,
) -> Result<(), Error> {
let Plan {
cached,
remote,
reinstalls,
extraneous,
} = plan;
// If we're in `install` mode, ignore any extraneous distributions.
let extraneous = match modifications {
Modifications::Sufficient => vec![],
Modifications::Exact => extraneous,
};
// Nothing to do.
if remote.is_empty() && cached.is_empty() && reinstalls.is_empty() && extraneous.is_empty() {
let s = if resolution.len() == 1 { "" } else { "s" };
writeln!(
printer.stderr(),
"{}",
format!(
"Audited {} in {}",
format!("{} package{}", resolution.len(), s).bold(),
elapsed(start.elapsed())
)
.dimmed()
)?;
writeln!(printer.stderr(), "Would make no changes")?;
return Ok(());
}
// Map any registry-based requirements back to those returned by the resolver.
let remote = remote
.iter()
.map(|dist| {
resolution
.get_remote(&dist.name)
.cloned()
.expect("Resolution should contain all packages")
})
.collect::<Vec<_>>();
// Download, build, and unzip any missing distributions.
let wheels = if remote.is_empty() {
vec![]
} else {
let s = if remote.len() == 1 { "" } else { "s" };
writeln!(
printer.stderr(),
"{}",
format!(
"Would download {}",
format!("{} package{}", remote.len(), s).bold(),
)
.dimmed()
)?;
remote
};
// Remove any upgraded or extraneous installations.
if !extraneous.is_empty() || !reinstalls.is_empty() {
let s = if extraneous.len() + reinstalls.len() == 1 {
""
} else {
"s"
};
writeln!(
printer.stderr(),
"{}",
format!(
"Would uninstall {}",
format!("{} package{}", extraneous.len() + reinstalls.len(), s).bold(),
)
.dimmed()
)?;
}
// Install the resolved distributions.
let installs = wheels.len() + cached.len();
if installs > 0 {
let s = if installs == 1 { "" } else { "s" };
writeln!(
printer.stderr(),
"{}",
format!("Would install {}", format!("{installs} package{s}").bold()).dimmed()
)?;
}
for event in extraneous
.into_iter()
.chain(reinstalls.into_iter())
.map(|distribution| DryRunEvent {
name: distribution.name().clone(),
version: distribution.installed_version().to_string(),
kind: ChangeEventKind::Removed,
})
.chain(wheels.into_iter().map(|distribution| DryRunEvent {
name: distribution.name().clone(),
version: distribution.version_or_url().to_string(),
kind: ChangeEventKind::Added,
}))
.chain(cached.into_iter().map(|distribution| DryRunEvent {
name: distribution.name().clone(),
version: distribution.installed_version().to_string(),
kind: ChangeEventKind::Added,
}))
.sorted_unstable_by(|a, b| a.name.cmp(&b.name).then_with(|| a.kind.cmp(&b.kind)))
{
match event.kind {
ChangeEventKind::Added => {
writeln!(
printer.stderr(),
" {} {}{}",
"+".green(),
event.name.as_ref().bold(),
event.version.dimmed()
)?;
}
ChangeEventKind::Removed => {
writeln!(
printer.stderr(),
" {} {}{}",
"-".red(),
event.name.as_ref().bold(),
event.version.dimmed()
)?;
}
}
}
Ok(())
}
/// Validate the installed packages in the virtual environment.
pub(crate) fn validate(
resolution: &Resolution,
venv: &PythonEnvironment,
printer: Printer,
) -> Result<(), Error> {
let site_packages = SitePackages::from_executable(venv)?;
for diagnostic in site_packages.diagnostics()? {
// Only surface diagnostics that are "relevant" to the current resolution.
if resolution
.packages()
.any(|package| diagnostic.includes(package))
{
writeln!(
printer.stderr(),
"{}{} {}",
"warning".yellow().bold(),
":".bold(),
diagnostic.message().bold()
)?;
}
}
Ok(())
}
#[derive(thiserror::Error, Debug)]
pub(crate) enum Error {
#[error(transparent)]
Resolve(#[from] uv_resolver::ResolveError),
#[error(transparent)]
Uninstall(#[from] uv_installer::UninstallError),
#[error(transparent)]
Hash(#[from] uv_types::HashStrategyError),
#[error(transparent)]
Io(#[from] std::io::Error),
#[error(transparent)]
Fmt(#[from] std::fmt::Error),
#[error(transparent)]
Lookahead(#[from] uv_requirements::LookaheadError),
#[error(transparent)]
ParsedUrl(Box<distribution_types::ParsedUrlError>),
#[error(transparent)]
Anyhow(#[from] anyhow::Error),
#[error("Installed distribution has unsupported type")]
UnsupportedInstalledDist(#[source] Box<distribution_types::ParsedUrlError>),
}

View file

@ -2,40 +2,40 @@ use std::borrow::Cow;
use std::fmt::Write; use std::fmt::Write;
use anstream::eprint; use anstream::eprint;
use anyhow::{Context, Result}; use anyhow::Result;
use itertools::Itertools;
use owo_colors::OwoColorize; use owo_colors::OwoColorize;
use tracing::debug; use tracing::debug;
use distribution_types::{IndexLocations, InstalledMetadata, LocalDist, Name, ResolvedDist}; use distribution_types::{IndexLocations, Resolution};
use install_wheel_rs::linker::LinkMode; use install_wheel_rs::linker::LinkMode;
use platform_tags::Tags; use platform_tags::Tags;
use pypi_types::Yanked;
use uv_auth::store_credentials_from_url; use uv_auth::store_credentials_from_url;
use uv_cache::Cache; use uv_cache::Cache;
use uv_client::{BaseClientBuilder, Connectivity, FlatIndexClient, RegistryClientBuilder}; use uv_client::{BaseClientBuilder, Connectivity, FlatIndexClient, RegistryClientBuilder};
use uv_configuration::{ use uv_configuration::{
Concurrency, ConfigSettings, IndexStrategy, NoBinary, NoBuild, PreviewMode, Reinstall, Concurrency, ConfigSettings, IndexStrategy, NoBinary, NoBuild, PreviewMode, Reinstall,
SetupPyStrategy, SetupPyStrategy, Upgrade,
}; };
use uv_configuration::{KeyringProviderType, TargetTriple}; use uv_configuration::{KeyringProviderType, TargetTriple};
use uv_dispatch::BuildDispatch; use uv_dispatch::BuildDispatch;
use uv_distribution::DistributionDatabase; use uv_distribution::DistributionDatabase;
use uv_fs::Simplified; use uv_fs::Simplified;
use uv_installer::{Downloader, Plan, Planner, SitePackages}; use uv_installer::SitePackages;
use uv_interpreter::{PythonEnvironment, PythonVersion, SystemPython, Target}; use uv_interpreter::{PythonEnvironment, PythonVersion, SystemPython, Target};
use uv_requirements::{ use uv_requirements::{
ExtrasSpecification, NamedRequirementsResolver, RequirementsSource, RequirementsSpecification, ExtrasSpecification, NamedRequirementsResolver, RequirementsSource, RequirementsSpecification,
SourceTreeResolver, SourceTreeResolver,
}; };
use uv_resolver::{ use uv_resolver::{
DependencyMode, FlatIndex, InMemoryIndex, Manifest, OptionsBuilder, PythonRequirement, Resolver, DependencyMode, ExcludeNewer, FlatIndex, InMemoryIndex, OptionsBuilder, PreReleaseMode,
ResolutionMode,
}; };
use uv_types::{BuildIsolation, EmptyInstalledPackages, HashStrategy, InFlight}; use uv_types::{BuildIsolation, HashStrategy, InFlight};
use uv_warnings::warn_user;
use crate::commands::reporters::{DownloadReporter, InstallReporter, ResolverReporter}; use crate::commands::pip::operations;
use crate::commands::{compile_bytecode, elapsed, ChangeEvent, ChangeEventKind, ExitStatus}; use crate::commands::pip::operations::Modifications;
use crate::commands::reporters::ResolverReporter;
use crate::commands::ExitStatus;
use crate::editables::ResolvedEditables; use crate::editables::ResolvedEditables;
use crate::printer::Printer; use crate::printer::Printer;
@ -59,6 +59,7 @@ pub(crate) async fn pip_sync(
python_version: Option<PythonVersion>, python_version: Option<PythonVersion>,
python_platform: Option<TargetTriple>, python_platform: Option<TargetTriple>,
strict: bool, strict: bool,
exclude_newer: Option<ExcludeNewer>,
python: Option<String>, python: Option<String>,
system: bool, system: bool,
break_system_packages: bool, break_system_packages: bool,
@ -67,31 +68,47 @@ pub(crate) async fn pip_sync(
native_tls: bool, native_tls: bool,
preview: PreviewMode, preview: PreviewMode,
cache: Cache, cache: Cache,
dry_run: bool,
printer: Printer, printer: Printer,
) -> Result<ExitStatus> { ) -> Result<ExitStatus> {
let start = std::time::Instant::now();
let client_builder = BaseClientBuilder::new() let client_builder = BaseClientBuilder::new()
.connectivity(connectivity) .connectivity(connectivity)
.native_tls(native_tls) .native_tls(native_tls)
.keyring(keyring_provider); .keyring(keyring_provider);
// Initialize a few defaults.
let constraints = &[];
let overrides = &[];
let extras = ExtrasSpecification::default();
let upgrade = Upgrade::default();
let resolution_mode = ResolutionMode::default();
let prerelease_mode = PreReleaseMode::default();
let dependency_mode = DependencyMode::Direct;
// Read all requirements from the provided sources. // Read all requirements from the provided sources.
let RequirementsSpecification { let RequirementsSpecification {
project: _, project,
requirements, requirements,
constraints: _, constraints,
overrides: _, overrides,
editables, editables,
source_trees, source_trees,
extras: _,
index_url, index_url,
extra_index_urls, extra_index_urls,
no_index, no_index,
find_links, find_links,
no_binary: specified_no_binary, no_binary: specified_no_binary,
no_build: specified_no_build, no_build: specified_no_build,
} = RequirementsSpecification::from_simple_sources(sources, &client_builder, preview).await?; extras: _,
} = operations::read_requirements(
sources,
constraints,
overrides,
&ExtrasSpecification::default(),
&client_builder,
preview,
)
.await?;
// Validate that the requirements are non-empty. // Validate that the requirements are non-empty.
let num_requirements = requirements.len() + source_trees.len() + editables.len(); let num_requirements = requirements.len() + source_trees.len() + editables.len();
@ -214,8 +231,8 @@ pub(crate) async fn pip_sync(
.index_urls(index_locations.index_urls()) .index_urls(index_locations.index_urls())
.index_strategy(index_strategy) .index_strategy(index_strategy)
.keyring(keyring_provider) .keyring(keyring_provider)
.markers(venv.interpreter().markers()) .markers(&markers)
.platform(venv.interpreter().platform()) .platform(interpreter.platform())
.build(); .build();
// Resolve the flat indexes from `--find-links`. // Resolve the flat indexes from `--find-links`.
@ -225,12 +242,6 @@ pub(crate) async fn pip_sync(
FlatIndex::from_entries(entries, &tags, &hasher, &no_build, &no_binary) FlatIndex::from_entries(entries, &tags, &hasher, &no_build, &no_binary)
}; };
// Create a shared in-memory index.
let index = InMemoryIndex::default();
// Track in-flight downloads, builds, etc., across resolutions.
let in_flight = InFlight::default();
// Determine whether to enable build isolation. // Determine whether to enable build isolation.
let build_isolation = if no_build_isolation { let build_isolation = if no_build_isolation {
BuildIsolation::Shared(&venv) BuildIsolation::Shared(&venv)
@ -242,14 +253,17 @@ pub(crate) async fn pip_sync(
let no_binary = no_binary.combine(specified_no_binary); let no_binary = no_binary.combine(specified_no_binary);
let no_build = no_build.combine(specified_no_build); let no_build = no_build.combine(specified_no_build);
// Determine the set of installed packages. // Create a shared in-memory index.
let site_packages = SitePackages::from_executable(&venv)?; let index = InMemoryIndex::default();
// Prep the build context. // Track in-flight downloads, builds, etc., across resolutions.
let build_dispatch = BuildDispatch::new( let in_flight = InFlight::default();
// Create a build dispatch for resolution.
let resolve_dispatch = BuildDispatch::new(
&client, &client,
&cache, &cache,
venv.interpreter(), interpreter,
&index_locations, &index_locations,
&flat_index, &flat_index,
&index, &index,
@ -261,41 +275,14 @@ pub(crate) async fn pip_sync(
&no_build, &no_build,
&no_binary, &no_binary,
concurrency, concurrency,
); )
.with_options(OptionsBuilder::new().exclude_newer(exclude_newer).build());
// Convert from unnamed to named requirements. // Determine the set of installed packages.
let requirements = { let site_packages = SitePackages::from_executable(&venv)?;
// Convert from unnamed to named requirements.
let mut requirements = NamedRequirementsResolver::new(
requirements,
&hasher,
&index,
DistributionDatabase::new(&client, &build_dispatch, concurrency.downloads),
)
.with_reporter(ResolverReporter::from(printer))
.resolve()
.await?;
// Resolve any source trees into requirements. // Build all editable distributions. The editables are shared between resolution and
if !source_trees.is_empty() { // installation, and should live for the duration of the command.
requirements.extend(
SourceTreeResolver::new(
source_trees,
&ExtrasSpecification::None,
&hasher,
&index,
DistributionDatabase::new(&client, &build_dispatch, concurrency.downloads),
)
.with_reporter(ResolverReporter::from(printer))
.resolve()
.await?,
);
}
requirements
};
// Resolve any editables.
let editables = ResolvedEditables::resolve( let editables = ResolvedEditables::resolve(
editables editables
.into_iter() .into_iter()
@ -307,304 +294,150 @@ pub(crate) async fn pip_sync(
&tags, &tags,
&cache, &cache,
&client, &client,
&build_dispatch, &resolve_dispatch,
concurrency, concurrency,
printer, printer,
) )
.await?; .await?;
// Partition into those that should be linked from the cache (`cached`), those that need to be // Resolve the requirements from the provided sources.
// downloaded (`remote`), and those that should be removed (`extraneous`). let requirements = {
let Plan { // Convert from unnamed to named requirements.
cached, let mut requirements = NamedRequirementsResolver::new(
remote, requirements,
reinstalls,
extraneous,
} = Planner::with_requirements(&requirements)
.with_editable_requirements(&editables)
.build(
site_packages,
reinstall,
&no_binary,
&hasher, &hasher,
&index_locations, &index,
&cache, DistributionDatabase::new(&client, &resolve_dispatch, concurrency.downloads),
&venv,
&tags,
) )
.context("Failed to determine installation plan")?; .with_reporter(ResolverReporter::from(printer))
.resolve()
.await?;
// Nothing to do. // Resolve any source trees into requirements.
if remote.is_empty() && cached.is_empty() && reinstalls.is_empty() && extraneous.is_empty() { if !source_trees.is_empty() {
let s = if num_requirements == 1 { "" } else { "s" }; requirements.extend(
writeln!( SourceTreeResolver::new(
printer.stderr(), source_trees,
"{}", &extras,
format!( &hasher,
"Audited {} in {}", &index,
format!("{num_requirements} package{s}").bold(), DistributionDatabase::new(&client, &resolve_dispatch, concurrency.downloads),
elapsed(start.elapsed()) )
) .with_reporter(ResolverReporter::from(printer))
.dimmed() .resolve()
)?; .await?,
);
}
return Ok(ExitStatus::Success); requirements
} };
// Resolve any registry-based requirements. // Resolve the overrides from the provided sources.
let remote = if remote.is_empty() { let overrides = NamedRequirementsResolver::new(
Vec::new() overrides,
&hasher,
&index,
DistributionDatabase::new(&client, &resolve_dispatch, concurrency.downloads),
)
.with_reporter(ResolverReporter::from(printer))
.resolve()
.await?;
let options = OptionsBuilder::new()
.resolution_mode(resolution_mode)
.prerelease_mode(prerelease_mode)
.dependency_mode(dependency_mode)
.exclude_newer(exclude_newer)
.index_strategy(index_strategy)
.build();
let resolution = match operations::resolve(
requirements,
constraints,
overrides,
project,
&editables,
&hasher,
site_packages.clone(),
reinstall,
&upgrade,
interpreter,
&tags,
&markers,
&client,
&flat_index,
&index,
&resolve_dispatch,
concurrency,
options,
printer,
)
.await
{
Ok(resolution) => Resolution::from(resolution),
Err(operations::Error::Resolve(uv_resolver::ResolveError::NoSolution(err))) => {
let report = miette::Report::msg(format!("{err}"))
.context("No solution found when resolving dependencies:");
eprint!("{report:?}");
return Ok(ExitStatus::Failure);
}
Err(err) => return Err(err.into()),
};
// Re-initialize the in-flight map.
let in_flight = InFlight::default();
// If we're running with `--reinstall`, initialize a separate `BuildDispatch`, since we may
// end up removing some distributions from the environment.
let install_dispatch = if reinstall.is_none() {
resolve_dispatch
} else { } else {
let start = std::time::Instant::now(); BuildDispatch::new(
&client,
// Determine the tags, markers, and interpreter to use for resolution. &cache,
let interpreter = venv.interpreter(); interpreter,
let tags = interpreter.tags()?; &index_locations,
let markers = interpreter.markers();
let python_requirement = PythonRequirement::from_marker_environment(interpreter, markers);
// Resolve with `--no-deps`.
let options = OptionsBuilder::new()
.dependency_mode(DependencyMode::Direct)
.build();
// Create a bound on the progress bar, since we know the number of packages upfront.
let reporter = ResolverReporter::from(printer).with_length(remote.len() as u64);
// Run the resolver.
let resolver = Resolver::new(
Manifest::simple(remote),
options,
&python_requirement,
Some(markers),
tags,
&flat_index, &flat_index,
&index, &index,
&hasher, &in_flight,
&build_dispatch, setup_py,
// TODO(zanieb): We should consider support for installed packages in pip sync config_settings,
EmptyInstalledPackages, build_isolation,
DistributionDatabase::new(&client, &build_dispatch, concurrency.downloads), link_mode,
)? &no_build,
.with_reporter(reporter); &no_binary,
concurrency,
let resolution = match resolver.resolve().await {
Err(uv_resolver::ResolveError::NoSolution(err)) => {
let report = miette::Report::msg(format!("{err}"))
.context("No solution found when resolving dependencies:");
eprint!("{report:?}");
return Ok(ExitStatus::Failure);
}
result => result,
}?;
let s = if resolution.len() == 1 { "" } else { "s" };
writeln!(
printer.stderr(),
"{}",
format!(
"Resolved {} in {}",
format!("{} package{}", resolution.len(), s).bold(),
elapsed(start.elapsed())
)
.dimmed()
)?;
resolution
.into_distributions()
.filter_map(|dist| match dist {
ResolvedDist::Installable(dist) => Some(dist),
ResolvedDist::Installed(_) => None,
})
.collect::<Vec<_>>()
};
// Download, build, and unzip any missing distributions.
let wheels = if remote.is_empty() {
Vec::new()
} else {
let start = std::time::Instant::now();
let downloader = Downloader::new(
&cache,
&tags,
&hasher,
DistributionDatabase::new(&client, &build_dispatch, concurrency.downloads),
) )
.with_reporter(DownloadReporter::from(printer).with_length(remote.len() as u64)); .with_options(OptionsBuilder::new().exclude_newer(exclude_newer).build())
let wheels = downloader
.download(remote.clone(), &in_flight)
.await
.context("Failed to download distributions")?;
let s = if wheels.len() == 1 { "" } else { "s" };
writeln!(
printer.stderr(),
"{}",
format!(
"Downloaded {} in {}",
format!("{} package{}", wheels.len(), s).bold(),
elapsed(start.elapsed())
)
.dimmed()
)?;
wheels
}; };
// Remove any unnecessary packages. // Sync the environment.
if !extraneous.is_empty() || !reinstalls.is_empty() { operations::install(
let start = std::time::Instant::now(); &resolution,
&editables,
site_packages,
Modifications::Exact,
reinstall,
&no_binary,
link_mode,
compile,
&index_locations,
&hasher,
&tags,
&client,
&in_flight,
concurrency,
&install_dispatch,
&cache,
&venv,
dry_run,
printer,
)
.await?;
for dist_info in extraneous.iter().chain(reinstalls.iter()) { // Validate the environment.
match uv_installer::uninstall(dist_info).await {
Ok(summary) => {
debug!(
"Uninstalled {} ({} file{}, {} director{})",
dist_info.name(),
summary.file_count,
if summary.file_count == 1 { "" } else { "s" },
summary.dir_count,
if summary.dir_count == 1 { "y" } else { "ies" },
);
}
Err(uv_installer::UninstallError::Uninstall(
install_wheel_rs::Error::MissingRecord(_),
)) => {
warn_user!(
"Failed to uninstall package at {} due to missing RECORD file. Installation may result in an incomplete environment.",
dist_info.path().user_display().cyan(),
);
}
Err(err) => return Err(err.into()),
}
}
let s = if extraneous.len() + reinstalls.len() == 1 {
""
} else {
"s"
};
writeln!(
printer.stderr(),
"{}",
format!(
"Uninstalled {} in {}",
format!("{} package{}", extraneous.len() + reinstalls.len(), s).bold(),
elapsed(start.elapsed())
)
.dimmed()
)?;
}
// Install the resolved distributions.
let wheels = wheels.into_iter().chain(cached).collect::<Vec<_>>();
if !wheels.is_empty() {
let start = std::time::Instant::now();
uv_installer::Installer::new(&venv)
.with_link_mode(link_mode)
.with_reporter(InstallReporter::from(printer).with_length(wheels.len() as u64))
.install(&wheels)?;
let s = if wheels.len() == 1 { "" } else { "s" };
writeln!(
printer.stderr(),
"{}",
format!(
"Installed {} in {}",
format!("{} package{}", wheels.len(), s).bold(),
elapsed(start.elapsed())
)
.dimmed()
)?;
}
if compile {
compile_bytecode(&venv, &cache, printer).await?;
}
// Report on any changes in the environment.
for event in extraneous
.into_iter()
.chain(reinstalls.into_iter())
.map(|distribution| ChangeEvent {
dist: LocalDist::from(distribution),
kind: ChangeEventKind::Removed,
})
.chain(wheels.into_iter().map(|distribution| ChangeEvent {
dist: LocalDist::from(distribution),
kind: ChangeEventKind::Added,
}))
.sorted_unstable_by(|a, b| {
a.dist
.name()
.cmp(b.dist.name())
.then_with(|| a.kind.cmp(&b.kind))
.then_with(|| a.dist.installed_version().cmp(&b.dist.installed_version()))
})
{
match event.kind {
ChangeEventKind::Added => {
writeln!(
printer.stderr(),
" {} {}{}",
"+".green(),
event.dist.name().as_ref().bold(),
event.dist.installed_version().to_string().dimmed()
)?;
}
ChangeEventKind::Removed => {
writeln!(
printer.stderr(),
" {} {}{}",
"-".red(),
event.dist.name().as_ref().bold(),
event.dist.installed_version().to_string().dimmed()
)?;
}
}
}
// Validate that the environment is consistent.
if strict { if strict {
let site_packages = SitePackages::from_executable(&venv)?; operations::validate(&resolution, &venv, printer)?;
for diagnostic in site_packages.diagnostics()? {
writeln!(
printer.stderr(),
"{}{} {}",
"warning".yellow().bold(),
":".bold(),
diagnostic.message().bold()
)?;
}
}
// TODO(konstin): Also check the cache whether any cached or installed dist is already known to
// have been yanked, we currently don't show this message on the second run anymore
for dist in &remote {
let Some(file) = dist.file() else {
continue;
};
match &file.yanked {
None | Some(Yanked::Bool(false)) => {}
Some(Yanked::Bool(true)) => {
writeln!(
printer.stderr(),
"{}{} {dist} is yanked. Refresh your lockfile to pin an un-yanked version.",
"warning".yellow().bold(),
":".bold(),
)?;
}
Some(Yanked::Reason(reason)) => {
writeln!(
printer.stderr(),
"{}{} {dist} is yanked (reason: \"{reason}\"). Refresh your lockfile to pin an un-yanked version.",
"warning".yellow().bold(),
":".bold(),
)?;
}
}
} }
Ok(ExitStatus::Success) Ok(ExitStatus::Success)

View file

@ -284,6 +284,7 @@ async fn run() -> Result<ExitStatus> {
args.shared.python_version, args.shared.python_version,
args.shared.python_platform, args.shared.python_platform,
args.shared.strict, args.shared.strict,
args.shared.exclude_newer,
args.shared.python, args.shared.python,
args.shared.system, args.shared.system,
args.shared.break_system_packages, args.shared.break_system_packages,
@ -292,6 +293,7 @@ async fn run() -> Result<ExitStatus> {
globals.native_tls, globals.native_tls,
globals.preview, globals.preview,
cache, cache,
args.dry_run,
printer, printer,
) )
.await .await

View file

@ -332,6 +332,7 @@ pub(crate) struct PipSyncSettings {
pub(crate) src_file: Vec<PathBuf>, pub(crate) src_file: Vec<PathBuf>,
pub(crate) reinstall: Reinstall, pub(crate) reinstall: Reinstall,
pub(crate) refresh: Refresh, pub(crate) refresh: Refresh,
pub(crate) dry_run: bool,
// Shared settings. // Shared settings.
pub(crate) shared: PipSharedSettings, pub(crate) shared: PipSharedSettings,
@ -378,6 +379,8 @@ impl PipSyncSettings {
python_platform, python_platform,
strict, strict,
no_strict, no_strict,
exclude_newer,
dry_run,
compat_args: _, compat_args: _,
} = args; } = args;
@ -386,6 +389,7 @@ impl PipSyncSettings {
src_file, src_file,
reinstall: Reinstall::from_args(flag(reinstall, no_reinstall), reinstall_package), reinstall: Reinstall::from_args(flag(reinstall, no_reinstall), reinstall_package),
refresh: Refresh::from_args(flag(refresh, no_refresh), refresh_package), refresh: Refresh::from_args(flag(refresh, no_refresh), refresh_package),
dry_run,
// Shared settings. // Shared settings.
shared: PipSharedSettings::combine( shared: PipSharedSettings::combine(
@ -417,6 +421,7 @@ impl PipSyncSettings {
}), }),
python_version, python_version,
python_platform, python_platform,
exclude_newer,
link_mode, link_mode,
compile_bytecode: flag(compile_bytecode, no_compile_bytecode), compile_bytecode: flag(compile_bytecode, no_compile_bytecode),
require_hashes: flag(require_hashes, no_require_hashes), require_hashes: flag(require_hashes, no_require_hashes),
@ -446,6 +451,7 @@ pub(crate) struct PipInstallSettings {
pub(crate) refresh: Refresh, pub(crate) refresh: Refresh,
pub(crate) dry_run: bool, pub(crate) dry_run: bool,
pub(crate) uv_lock: Option<String>, pub(crate) uv_lock: Option<String>,
// Shared settings. // Shared settings.
pub(crate) shared: PipSharedSettings, pub(crate) shared: PipSharedSettings,
} }

View file

@ -132,6 +132,7 @@ fn check_incompatible_packages() -> Result<()> {
----- stderr ----- ----- stderr -----
Resolved 1 package in [TIME] Resolved 1 package in [TIME]
Downloaded 1 package in [TIME] Downloaded 1 package in [TIME]
Uninstalled 1 package in [TIME]
Installed 1 package in [TIME] Installed 1 package in [TIME]
- idna==3.6 - idna==3.6
+ idna==2.4 + idna==2.4
@ -198,6 +199,7 @@ fn check_multiple_incompatible_packages() -> Result<()> {
----- stderr ----- ----- stderr -----
Resolved 2 packages in [TIME] Resolved 2 packages in [TIME]
Downloaded 2 packages in [TIME] Downloaded 2 packages in [TIME]
Uninstalled 2 packages in [TIME]
Installed 2 packages in [TIME] Installed 2 packages in [TIME]
- idna==3.6 - idna==3.6
+ idna==2.4 + idna==2.4

View file

@ -602,6 +602,7 @@ fn respect_installed_and_reinstall() -> Result<()> {
----- stderr ----- ----- stderr -----
Resolved 7 packages in [TIME] Resolved 7 packages in [TIME]
Downloaded 1 package in [TIME] Downloaded 1 package in [TIME]
Uninstalled 1 package in [TIME]
Installed 1 package in [TIME] Installed 1 package in [TIME]
- flask==2.3.2 - flask==2.3.2
+ flask==2.3.3 + flask==2.3.3
@ -625,6 +626,7 @@ fn respect_installed_and_reinstall() -> Result<()> {
----- stderr ----- ----- stderr -----
Resolved 7 packages in [TIME] Resolved 7 packages in [TIME]
Downloaded 1 package in [TIME] Downloaded 1 package in [TIME]
Uninstalled 1 package in [TIME]
Installed 1 package in [TIME] Installed 1 package in [TIME]
- flask==2.3.3 - flask==2.3.3
+ flask==3.0.2 + flask==3.0.2
@ -647,6 +649,7 @@ fn respect_installed_and_reinstall() -> Result<()> {
----- stderr ----- ----- stderr -----
Resolved 7 packages in [TIME] Resolved 7 packages in [TIME]
Uninstalled 1 package in [TIME]
Installed 1 package in [TIME] Installed 1 package in [TIME]
- flask==3.0.2 - flask==3.0.2
+ flask==3.0.2 + flask==3.0.2
@ -760,6 +763,7 @@ fn reinstall_incomplete() -> Result<()> {
Resolved 3 packages in [TIME] Resolved 3 packages in [TIME]
Downloaded 1 package in [TIME] Downloaded 1 package in [TIME]
warning: Failed to uninstall package at [SITE_PACKAGES]/anyio-3.7.0.dist-info due to missing RECORD file. Installation may result in an incomplete environment. warning: Failed to uninstall package at [SITE_PACKAGES]/anyio-3.7.0.dist-info due to missing RECORD file. Installation may result in an incomplete environment.
Uninstalled 1 package in [TIME]
Installed 1 package in [TIME] Installed 1 package in [TIME]
- anyio==3.7.0 - anyio==3.7.0
+ anyio==4.0.0 + anyio==4.0.0
@ -817,6 +821,7 @@ fn allow_incompatibilities() -> Result<()> {
----- stderr ----- ----- stderr -----
Resolved 2 packages in [TIME] Resolved 2 packages in [TIME]
Downloaded 1 package in [TIME] Downloaded 1 package in [TIME]
Uninstalled 1 package in [TIME]
Installed 1 package in [TIME] Installed 1 package in [TIME]
- jinja2==3.1.3 - jinja2==3.1.3
+ jinja2==2.11.3 + jinja2==2.11.3
@ -925,6 +930,7 @@ fn install_editable_and_registry() {
----- stderr ----- ----- stderr -----
Built 1 editable in [TIME] Built 1 editable in [TIME]
Resolved 1 package in [TIME] Resolved 1 package in [TIME]
Uninstalled 1 package in [TIME]
Installed 1 package in [TIME] Installed 1 package in [TIME]
- black==24.3.0 - black==24.3.0
+ black==0.1.0 (from file://[WORKSPACE]/scripts/packages/black_editable) + black==0.1.0 (from file://[WORKSPACE]/scripts/packages/black_editable)
@ -964,6 +970,7 @@ fn install_editable_and_registry() {
----- stderr ----- ----- stderr -----
Resolved 6 packages in [TIME] Resolved 6 packages in [TIME]
Downloaded 1 package in [TIME] Downloaded 1 package in [TIME]
Uninstalled 1 package in [TIME]
Installed 1 package in [TIME] Installed 1 package in [TIME]
- black==0.1.0 (from file://[WORKSPACE]/scripts/packages/black_editable) - black==0.1.0 (from file://[WORKSPACE]/scripts/packages/black_editable)
+ black==23.10.0 + black==23.10.0
@ -1663,6 +1670,7 @@ fn reinstall_no_binary() {
----- stderr ----- ----- stderr -----
Resolved 3 packages in [TIME] Resolved 3 packages in [TIME]
Uninstalled 1 package in [TIME]
Installed 1 package in [TIME] Installed 1 package in [TIME]
- anyio==4.3.0 - anyio==4.3.0
+ anyio==4.3.0 + anyio==4.3.0
@ -1993,6 +2001,7 @@ fn install_upgrade() {
----- stderr ----- ----- stderr -----
Resolved 3 packages in [TIME] Resolved 3 packages in [TIME]
Downloaded 1 package in [TIME] Downloaded 1 package in [TIME]
Uninstalled 1 package in [TIME]
Installed 1 package in [TIME] Installed 1 package in [TIME]
- anyio==3.6.2 - anyio==3.6.2
+ anyio==4.3.0 + anyio==4.3.0
@ -2040,6 +2049,7 @@ fn install_upgrade() {
----- stderr ----- ----- stderr -----
Resolved 3 packages in [TIME] Resolved 3 packages in [TIME]
Downloaded 1 package in [TIME] Downloaded 1 package in [TIME]
Uninstalled 1 package in [TIME]
Installed 1 package in [TIME] Installed 1 package in [TIME]
- httpcore==0.16.3 - httpcore==0.16.3
+ httpcore==1.0.4 + httpcore==1.0.4
@ -2492,6 +2502,7 @@ fn reinstall_duplicate() -> Result<()> {
----- stderr ----- ----- stderr -----
Resolved 1 package in [TIME] Resolved 1 package in [TIME]
Downloaded 1 package in [TIME] Downloaded 1 package in [TIME]
Uninstalled 2 packages in [TIME]
Installed 1 package in [TIME] Installed 1 package in [TIME]
- pip==21.3.1 - pip==21.3.1
- pip==22.1.1 - pip==22.1.1
@ -2612,6 +2623,7 @@ requires-python = ">=3.8"
Built 1 editable in [TIME] Built 1 editable in [TIME]
Resolved 4 packages in [TIME] Resolved 4 packages in [TIME]
Downloaded 1 package in [TIME] Downloaded 1 package in [TIME]
Uninstalled 2 packages in [TIME]
Installed 2 packages in [TIME] Installed 2 packages in [TIME]
- anyio==4.0.0 - anyio==4.0.0
+ anyio==3.7.1 + anyio==3.7.1
@ -2677,6 +2689,7 @@ dependencies = {file = ["requirements.txt"]}
----- stderr ----- ----- stderr -----
Built 1 editable in [TIME] Built 1 editable in [TIME]
Resolved 4 packages in [TIME] Resolved 4 packages in [TIME]
Uninstalled 1 package in [TIME]
Installed 1 package in [TIME] Installed 1 package in [TIME]
- example==0.1.0 (from file://[TEMP_DIR]/editable) - example==0.1.0 (from file://[TEMP_DIR]/editable)
+ example==0.1.0 (from file://[TEMP_DIR]/editable) + example==0.1.0 (from file://[TEMP_DIR]/editable)
@ -2698,6 +2711,7 @@ dependencies = {file = ["requirements.txt"]}
Built 1 editable in [TIME] Built 1 editable in [TIME]
Resolved 4 packages in [TIME] Resolved 4 packages in [TIME]
Downloaded 1 package in [TIME] Downloaded 1 package in [TIME]
Uninstalled 2 packages in [TIME]
Installed 2 packages in [TIME] Installed 2 packages in [TIME]
- anyio==4.0.0 - anyio==4.0.0
+ anyio==3.7.1 + anyio==3.7.1
@ -2782,6 +2796,7 @@ requires-python = ">=3.8"
----- stderr ----- ----- stderr -----
Resolved 4 packages in [TIME] Resolved 4 packages in [TIME]
Downloaded 2 packages in [TIME] Downloaded 2 packages in [TIME]
Uninstalled 2 packages in [TIME]
Installed 2 packages in [TIME] Installed 2 packages in [TIME]
- anyio==4.0.0 - anyio==4.0.0
+ anyio==3.7.1 + anyio==3.7.1
@ -3957,6 +3972,7 @@ fn already_installed_dependent_editable() {
----- stderr ----- ----- stderr -----
Resolved 1 package in [TIME] Resolved 1 package in [TIME]
Uninstalled 1 package in [TIME]
Installed 1 package in [TIME] Installed 1 package in [TIME]
- first-editable==0.0.1 (from file://[WORKSPACE]/scripts/packages/dependent_editables/first_editable) - first-editable==0.0.1 (from file://[WORKSPACE]/scripts/packages/dependent_editables/first_editable)
+ first-editable==0.0.1 (from file://[WORKSPACE]/scripts/packages/dependent_editables/first_editable) + first-editable==0.0.1 (from file://[WORKSPACE]/scripts/packages/dependent_editables/first_editable)
@ -4054,6 +4070,7 @@ fn already_installed_local_path_dependent() {
----- stderr ----- ----- stderr -----
Resolved 2 packages in [TIME] Resolved 2 packages in [TIME]
Uninstalled 1 package in [TIME]
Installed 1 package in [TIME] Installed 1 package in [TIME]
- first-local==0.1.0 (from file://[WORKSPACE]/scripts/packages/dependent_locals/first_local) - first-local==0.1.0 (from file://[WORKSPACE]/scripts/packages/dependent_locals/first_local)
+ first-local==0.1.0 (from file://[WORKSPACE]/scripts/packages/dependent_locals/first_local) + first-local==0.1.0 (from file://[WORKSPACE]/scripts/packages/dependent_locals/first_local)
@ -4181,6 +4198,7 @@ fn already_installed_local_version_of_remote_package() {
----- stderr ----- ----- stderr -----
Resolved 1 package in [TIME] Resolved 1 package in [TIME]
Uninstalled 1 package in [TIME]
Installed 1 package in [TIME] Installed 1 package in [TIME]
- anyio==4.3.0+foo (from file://[WORKSPACE]/scripts/packages/anyio_local) - anyio==4.3.0+foo (from file://[WORKSPACE]/scripts/packages/anyio_local)
+ anyio==4.3.0+foo (from file://[WORKSPACE]/scripts/packages/anyio_local) + anyio==4.3.0+foo (from file://[WORKSPACE]/scripts/packages/anyio_local)
@ -4199,6 +4217,7 @@ fn already_installed_local_version_of_remote_package() {
----- stderr ----- ----- stderr -----
Resolved 3 packages in [TIME] Resolved 3 packages in [TIME]
Downloaded 3 packages in [TIME] Downloaded 3 packages in [TIME]
Uninstalled 1 package in [TIME]
Installed 3 packages in [TIME] Installed 3 packages in [TIME]
- anyio==4.3.0+foo (from file://[WORKSPACE]/scripts/packages/anyio_local) - anyio==4.3.0+foo (from file://[WORKSPACE]/scripts/packages/anyio_local)
+ anyio==4.3.0 + anyio==4.3.0
@ -4216,6 +4235,7 @@ fn already_installed_local_version_of_remote_package() {
----- stderr ----- ----- stderr -----
Resolved 1 package in [TIME] Resolved 1 package in [TIME]
Uninstalled 1 package in [TIME]
Installed 1 package in [TIME] Installed 1 package in [TIME]
- anyio==4.3.0 - anyio==4.3.0
+ anyio==4.3.0+foo (from file://[WORKSPACE]/scripts/packages/anyio_local) + anyio==4.3.0+foo (from file://[WORKSPACE]/scripts/packages/anyio_local)
@ -4300,6 +4320,7 @@ fn already_installed_multiple_versions() -> Result<()> {
----- stderr ----- ----- stderr -----
Resolved 3 packages in [TIME] Resolved 3 packages in [TIME]
Downloaded 1 package in [TIME] Downloaded 1 package in [TIME]
Uninstalled 2 packages in [TIME]
Installed 1 package in [TIME] Installed 1 package in [TIME]
- anyio==3.7.0 - anyio==3.7.0
- anyio==4.0.0 - anyio==4.0.0
@ -4320,6 +4341,7 @@ fn already_installed_multiple_versions() -> Result<()> {
----- stderr ----- ----- stderr -----
Resolved 3 packages in [TIME] Resolved 3 packages in [TIME]
Uninstalled 2 packages in [TIME]
Installed 1 package in [TIME] Installed 1 package in [TIME]
- anyio==3.7.0 - anyio==3.7.0
- anyio==4.0.0 - anyio==4.0.0
@ -4431,6 +4453,7 @@ fn already_installed_remote_url() {
----- stderr ----- ----- stderr -----
Resolved 1 package in [TIME] Resolved 1 package in [TIME]
Uninstalled 1 package in [TIME]
Installed 1 package in [TIME] Installed 1 package in [TIME]
- uv-public-pypackage==0.1.0 (from git+https://github.com/astral-test/uv-public-pypackage@b270df1a2fb5d012294e9aaf05e7e0bab1e6a389) - uv-public-pypackage==0.1.0 (from git+https://github.com/astral-test/uv-public-pypackage@b270df1a2fb5d012294e9aaf05e7e0bab1e6a389)
+ uv-public-pypackage==0.1.0 (from git+https://github.com/astral-test/uv-public-pypackage@b270df1a2fb5d012294e9aaf05e7e0bab1e6a389) + uv-public-pypackage==0.1.0 (from git+https://github.com/astral-test/uv-public-pypackage@b270df1a2fb5d012294e9aaf05e7e0bab1e6a389)

View file

@ -294,6 +294,7 @@ fn noop() -> Result<()> {
----- stdout ----- ----- stdout -----
----- stderr ----- ----- stderr -----
Resolved 1 package in [TIME]
Audited 1 package in [TIME] Audited 1 package in [TIME]
"### "###
); );
@ -345,6 +346,7 @@ fn link() -> Result<()> {
----- stdout ----- ----- stdout -----
----- stderr ----- ----- stderr -----
Resolved 1 package in [TIME]
Installed 1 package in [TIME] Installed 1 package in [TIME]
+ iniconfig==2.0.0 + iniconfig==2.0.0
"### "###
@ -422,7 +424,7 @@ fn install_sequential() -> Result<()> {
----- stdout ----- ----- stdout -----
----- stderr ----- ----- stderr -----
Resolved 1 package in [TIME] Resolved 2 packages in [TIME]
Downloaded 1 package in [TIME] Downloaded 1 package in [TIME]
Installed 1 package in [TIME] Installed 1 package in [TIME]
+ tomli==2.0.1 + tomli==2.0.1
@ -703,6 +705,7 @@ fn install_url_then_install_url() -> Result<()> {
----- stdout ----- ----- stdout -----
----- stderr ----- ----- stderr -----
Resolved 1 package in [TIME]
Audited 1 package in [TIME] Audited 1 package in [TIME]
"### "###
); );
@ -738,6 +741,7 @@ fn install_url_then_install_version() -> Result<()> {
----- stdout ----- ----- stdout -----
----- stderr ----- ----- stderr -----
Resolved 1 package in [TIME]
Audited 1 package in [TIME] Audited 1 package in [TIME]
"### "###
); );
@ -917,7 +921,7 @@ fn warn_on_yanked_version() -> Result<()> {
Downloaded 1 package in [TIME] Downloaded 1 package in [TIME]
Installed 1 package in [TIME] Installed 1 package in [TIME]
+ colorama==0.4.2 + colorama==0.4.2
warning: colorama==0.4.2 is yanked (reason: "Bad build, missing files, will not install"). Refresh your lockfile to pin an un-yanked version. warning: colorama==0.4.2 is yanked (reason: "Bad build, missing files, will not install").
"### "###
); );
@ -971,6 +975,7 @@ fn install_local_wheel() -> Result<()> {
----- stdout ----- ----- stdout -----
----- stderr ----- ----- stderr -----
Resolved 1 package in [TIME]
Installed 1 package in [TIME] Installed 1 package in [TIME]
+ tomli==2.0.1 (from file://[TEMP_DIR]/tomli-2.0.1-py3-none-any.whl) + tomli==2.0.1 (from file://[TEMP_DIR]/tomli-2.0.1-py3-none-any.whl)
"### "###
@ -1263,6 +1268,7 @@ fn install_url_source_dist_cached() -> Result<()> {
----- stdout ----- ----- stdout -----
----- stderr ----- ----- stderr -----
Resolved 1 package in [TIME]
Installed 1 package in [TIME] Installed 1 package in [TIME]
+ tqdm==4.66.1 (from https://files.pythonhosted.org/packages/62/06/d5604a70d160f6a6ca5fd2ba25597c24abd5c5ca5f437263d177ac242308/tqdm-4.66.1.tar.gz) + tqdm==4.66.1 (from https://files.pythonhosted.org/packages/62/06/d5604a70d160f6a6ca5fd2ba25597c24abd5c5ca5f437263d177ac242308/tqdm-4.66.1.tar.gz)
"### "###
@ -1352,6 +1358,7 @@ fn install_git_source_dist_cached() -> Result<()> {
----- stdout ----- ----- stdout -----
----- stderr ----- ----- stderr -----
Resolved 1 package in [TIME]
Installed 1 package in [TIME] Installed 1 package in [TIME]
+ werkzeug==2.0.0 (from git+https://github.com/pallets/werkzeug.git@af160e0b6b7ddd81c22f1652c728ff5ac72d5c74) + werkzeug==2.0.0 (from git+https://github.com/pallets/werkzeug.git@af160e0b6b7ddd81c22f1652c728ff5ac72d5c74)
"### "###
@ -1448,6 +1455,7 @@ fn install_registry_source_dist_cached() -> Result<()> {
----- stdout ----- ----- stdout -----
----- stderr ----- ----- stderr -----
Resolved 1 package in [TIME]
Installed 1 package in [TIME] Installed 1 package in [TIME]
+ future==0.18.3 + future==0.18.3
"### "###
@ -1558,6 +1566,7 @@ fn install_path_source_dist_cached() -> Result<()> {
----- stdout ----- ----- stdout -----
----- stderr ----- ----- stderr -----
Resolved 1 package in [TIME]
Installed 1 package in [TIME] Installed 1 package in [TIME]
+ wheel==0.42.0 (from file://[TEMP_DIR]/wheel-0.42.0.tar.gz) + wheel==0.42.0 (from file://[TEMP_DIR]/wheel-0.42.0.tar.gz)
"### "###
@ -1661,6 +1670,7 @@ fn install_path_built_dist_cached() -> Result<()> {
----- stdout ----- ----- stdout -----
----- stderr ----- ----- stderr -----
Resolved 1 package in [TIME]
Installed 1 package in [TIME] Installed 1 package in [TIME]
+ tomli==2.0.1 (from file://[TEMP_DIR]/tomli-2.0.1-py3-none-any.whl) + tomli==2.0.1 (from file://[TEMP_DIR]/tomli-2.0.1-py3-none-any.whl)
"### "###
@ -1769,6 +1779,7 @@ fn install_url_built_dist_cached() -> Result<()> {
----- stdout ----- ----- stdout -----
----- stderr ----- ----- stderr -----
Resolved 1 package in [TIME]
Installed 1 package in [TIME] Installed 1 package in [TIME]
+ tqdm==4.66.1 (from https://files.pythonhosted.org/packages/00/e5/f12a80907d0884e6dff9c16d0c0114d81b8cd07dc3ae54c5e962cc83037e/tqdm-4.66.1-py3-none-any.whl) + tqdm==4.66.1 (from https://files.pythonhosted.org/packages/00/e5/f12a80907d0884e6dff9c16d0c0114d81b8cd07dc3ae54c5e962cc83037e/tqdm-4.66.1-py3-none-any.whl)
"### "###
@ -1830,12 +1841,12 @@ fn duplicate_package_overlap() -> Result<()> {
.arg("requirements.txt") .arg("requirements.txt")
.arg("--strict"), @r###" .arg("--strict"), @r###"
success: false success: false
exit_code: 2 exit_code: 1
----- stdout ----- ----- stdout -----
----- stderr ----- ----- stderr -----
error: Failed to determine installation plan × No solution found when resolving dependencies:
Caused by: Detected duplicate package in requirements: markupsafe Because you require markupsafe==2.1.3 and markupsafe==2.1.2, we can conclude that the requirements are unsatisfiable.
"### "###
); );
@ -1905,6 +1916,7 @@ fn reinstall() -> Result<()> {
----- stdout ----- ----- stdout -----
----- stderr ----- ----- stderr -----
Resolved 2 packages in [TIME]
Uninstalled 2 packages in [TIME] Uninstalled 2 packages in [TIME]
Installed 2 packages in [TIME] Installed 2 packages in [TIME]
- markupsafe==2.1.3 - markupsafe==2.1.3
@ -1958,6 +1970,7 @@ fn reinstall_package() -> Result<()> {
----- stdout ----- ----- stdout -----
----- stderr ----- ----- stderr -----
Resolved 2 packages in [TIME]
Uninstalled 1 package in [TIME] Uninstalled 1 package in [TIME]
Installed 1 package in [TIME] Installed 1 package in [TIME]
- tomli==2.0.1 - tomli==2.0.1
@ -2008,6 +2021,7 @@ fn reinstall_git() -> Result<()> {
----- stdout ----- ----- stdout -----
----- stderr ----- ----- stderr -----
Resolved 1 package in [TIME]
Uninstalled 1 package in [TIME] Uninstalled 1 package in [TIME]
Installed 1 package in [TIME] Installed 1 package in [TIME]
- werkzeug==2.0.0 (from git+https://github.com/pallets/werkzeug.git@af160e0b6b7ddd81c22f1652c728ff5ac72d5c74) - werkzeug==2.0.0 (from git+https://github.com/pallets/werkzeug.git@af160e0b6b7ddd81c22f1652c728ff5ac72d5c74)
@ -2121,7 +2135,7 @@ fn refresh_package() -> Result<()> {
----- stdout ----- ----- stdout -----
----- stderr ----- ----- stderr -----
Resolved 1 package in [TIME] Resolved 2 packages in [TIME]
Downloaded 1 package in [TIME] Downloaded 1 package in [TIME]
Installed 2 packages in [TIME] Installed 2 packages in [TIME]
+ markupsafe==2.1.3 + markupsafe==2.1.3
@ -2166,7 +2180,7 @@ fn sync_editable() -> Result<()> {
----- stderr ----- ----- stderr -----
Built 1 editable in [TIME] Built 1 editable in [TIME]
Resolved 2 packages in [TIME] Resolved 3 packages in [TIME]
Downloaded 2 packages in [TIME] Downloaded 2 packages in [TIME]
Installed 3 packages in [TIME] Installed 3 packages in [TIME]
+ boltons==23.1.1 + boltons==23.1.1
@ -2186,6 +2200,7 @@ fn sync_editable() -> Result<()> {
----- stderr ----- ----- stderr -----
Built 1 editable in [TIME] Built 1 editable in [TIME]
Resolved 3 packages in [TIME]
Uninstalled 1 package in [TIME] Uninstalled 1 package in [TIME]
Installed 1 package in [TIME] Installed 1 package in [TIME]
- poetry-editable==0.1.0 (from file://[TEMP_DIR]/poetry_editable) - poetry-editable==0.1.0 (from file://[TEMP_DIR]/poetry_editable)
@ -2233,6 +2248,7 @@ fn sync_editable() -> Result<()> {
----- stdout ----- ----- stdout -----
----- stderr ----- ----- stderr -----
Resolved 3 packages in [TIME]
Audited 3 packages in [TIME] Audited 3 packages in [TIME]
"### "###
); );
@ -2295,6 +2311,7 @@ fn sync_editable_and_registry() -> Result<()> {
----- stderr ----- ----- stderr -----
Built 1 editable in [TIME] Built 1 editable in [TIME]
Resolved 1 package in [TIME]
Uninstalled 1 package in [TIME] Uninstalled 1 package in [TIME]
Installed 1 package in [TIME] Installed 1 package in [TIME]
- black==24.1.0 - black==24.1.0
@ -2317,6 +2334,7 @@ fn sync_editable_and_registry() -> Result<()> {
----- stdout ----- ----- stdout -----
----- stderr ----- ----- stderr -----
Resolved 1 package in [TIME]
Audited 1 package in [TIME] Audited 1 package in [TIME]
"### "###
); );
@ -2380,6 +2398,7 @@ fn sync_editable_and_local() -> Result<()> {
----- stderr ----- ----- stderr -----
Built 1 editable in [TIME] Built 1 editable in [TIME]
Resolved 1 package in [TIME]
Installed 1 package in [TIME] Installed 1 package in [TIME]
+ black==0.1.0 (from file://[TEMP_DIR]/black_editable) + black==0.1.0 (from file://[TEMP_DIR]/black_editable)
"### "###
@ -2423,6 +2442,7 @@ fn sync_editable_and_local() -> Result<()> {
----- stderr ----- ----- stderr -----
Built 1 editable in [TIME] Built 1 editable in [TIME]
Resolved 1 package in [TIME]
Uninstalled 1 package in [TIME] Uninstalled 1 package in [TIME]
Installed 1 package in [TIME] Installed 1 package in [TIME]
- black==0.1.0 (from file://[TEMP_DIR]/black_editable) - black==0.1.0 (from file://[TEMP_DIR]/black_editable)
@ -2450,8 +2470,9 @@ fn incompatible_wheel() -> Result<()> {
----- stdout ----- ----- stdout -----
----- stderr ----- ----- stderr -----
error: Failed to determine installation plan error: Failed to read `foo @ file://[TEMP_DIR]/foo-1.2.3-not-compatible-wheel.whl`
Caused by: A path dependency is incompatible with the current platform: foo-1.2.3-not-compatible-wheel.whl Caused by: Failed to unzip wheel: foo-1.2.3-not-compatible-wheel.whl
Caused by: unable to locate the end of central directory record
"### "###
); );
@ -2673,6 +2694,7 @@ fn find_links_wheel_cache() -> Result<()> {
----- stdout ----- ----- stdout -----
----- stderr ----- ----- stderr -----
Resolved 1 package in [TIME]
Uninstalled 1 package in [TIME] Uninstalled 1 package in [TIME]
Installed 1 package in [TIME] Installed 1 package in [TIME]
- tqdm==1000.0.0 - tqdm==1000.0.0
@ -2722,6 +2744,7 @@ fn find_links_source_cache() -> Result<()> {
----- stdout ----- ----- stdout -----
----- stderr ----- ----- stderr -----
Resolved 1 package in [TIME]
Uninstalled 1 package in [TIME] Uninstalled 1 package in [TIME]
Installed 1 package in [TIME] Installed 1 package in [TIME]
- tqdm==999.0.0 - tqdm==999.0.0
@ -2782,6 +2805,7 @@ fn offline() -> Result<()> {
----- stdout ----- ----- stdout -----
----- stderr ----- ----- stderr -----
Resolved 1 package in [TIME]
Installed 1 package in [TIME] Installed 1 package in [TIME]
+ black==23.10.1 + black==23.10.1
"### "###
@ -2790,9 +2814,9 @@ fn offline() -> Result<()> {
Ok(()) Ok(())
} }
/// Sync with a repeated `anyio` requirement. The second requirement should be ignored. /// Sync with a repeated `anyio` requirement.
#[test] #[test]
fn repeat_requirement() -> Result<()> { fn repeat_requirement_identical() -> Result<()> {
let context = TestContext::new("3.12"); let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in"); let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("anyio\nanyio")?; requirements_in.write_str("anyio\nanyio")?;
@ -2813,23 +2837,45 @@ fn repeat_requirement() -> Result<()> {
Ok(()) Ok(())
} }
/// Sync with a repeated, but conflicting `anyio` requirement. The second requirement should cause /// Sync with a repeated `anyio` requirement, with compatible versions.
/// an error.
#[test] #[test]
fn conflicting_requirement() -> Result<()> { fn repeat_requirement_compatible() -> Result<()> {
let context = TestContext::new("3.12"); let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in"); let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("anyio\nanyio==4.0.0")?; requirements_in.write_str("anyio\nanyio==4.0.0")?;
uv_snapshot!(command(&context) uv_snapshot!(command(&context)
.arg("requirements.in"), @r###" .arg("requirements.in"), @r###"
success: false success: true
exit_code: 2 exit_code: 0
----- stdout ----- ----- stdout -----
----- stderr ----- ----- stderr -----
error: Failed to determine installation plan Resolved 1 package in [TIME]
Caused by: Detected duplicate package in requirements: anyio Downloaded 1 package in [TIME]
Installed 1 package in [TIME]
+ anyio==4.0.0
"###);
Ok(())
}
/// Sync with a repeated, but conflicting `anyio` requirement.
#[test]
fn repeat_requirement_incompatible() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("anyio<4.0.0\nanyio==4.0.0")?;
uv_snapshot!(command(&context)
.arg("requirements.in"), @r###"
success: false
exit_code: 1
----- stdout -----
----- stderr -----
× No solution found when resolving dependencies:
Because you require anyio<4.0.0 and anyio==4.0.0, we can conclude that the requirements are unsatisfiable.
"###); "###);
Ok(()) Ok(())
@ -2950,6 +2996,7 @@ requires-python = ">=3.8"
----- stderr ----- ----- stderr -----
Built 1 editable in [TIME] Built 1 editable in [TIME]
Resolved 1 package in [TIME]
Installed 1 package in [TIME] Installed 1 package in [TIME]
+ example==0.0.0 (from file://[TEMP_DIR]/editable) + example==0.0.0 (from file://[TEMP_DIR]/editable)
"### "###
@ -2963,6 +3010,7 @@ requires-python = ">=3.8"
----- stdout ----- ----- stdout -----
----- stderr ----- ----- stderr -----
Resolved 1 package in [TIME]
Audited 1 package in [TIME] Audited 1 package in [TIME]
"### "###
); );
@ -2988,6 +3036,7 @@ requires-python = ">=3.8"
----- stderr ----- ----- stderr -----
Built 1 editable in [TIME] Built 1 editable in [TIME]
Resolved 1 package in [TIME]
Uninstalled 1 package in [TIME] Uninstalled 1 package in [TIME]
Installed 1 package in [TIME] Installed 1 package in [TIME]
- example==0.0.0 (from file://[TEMP_DIR]/editable) - example==0.0.0 (from file://[TEMP_DIR]/editable)
@ -3564,6 +3613,7 @@ fn require_hashes_source_url() -> Result<()> {
----- stdout ----- ----- stdout -----
----- stderr ----- ----- stderr -----
Resolved 1 package in [TIME]
Uninstalled 1 package in [TIME] Uninstalled 1 package in [TIME]
Installed 1 package in [TIME] Installed 1 package in [TIME]
- anyio==4.0.0 (from https://files.pythonhosted.org/packages/74/17/5075225ee1abbb93cd7fc30a2d343c6a3f5f71cf388f14768a7a38256581/anyio-4.0.0.tar.gz) - anyio==4.0.0 (from https://files.pythonhosted.org/packages/74/17/5075225ee1abbb93cd7fc30a2d343c6a3f5f71cf388f14768a7a38256581/anyio-4.0.0.tar.gz)
@ -3664,6 +3714,7 @@ fn require_hashes_wheel_url() -> Result<()> {
----- stdout ----- ----- stdout -----
----- stderr ----- ----- stderr -----
Resolved 1 package in [TIME]
Uninstalled 1 package in [TIME] Uninstalled 1 package in [TIME]
Installed 1 package in [TIME] Installed 1 package in [TIME]
- anyio==4.0.0 (from https://files.pythonhosted.org/packages/36/55/ad4de788d84a630656ece71059665e01ca793c04294c463fd84132f40fe6/anyio-4.0.0-py3-none-any.whl) - anyio==4.0.0 (from https://files.pythonhosted.org/packages/36/55/ad4de788d84a630656ece71059665e01ca793c04294c463fd84132f40fe6/anyio-4.0.0-py3-none-any.whl)
@ -3713,7 +3764,7 @@ fn require_hashes_wheel_url() -> Result<()> {
----- stdout ----- ----- stdout -----
----- stderr ----- ----- stderr -----
Resolved 1 package in [TIME] Resolved 2 packages in [TIME]
Downloaded 1 package in [TIME] Downloaded 1 package in [TIME]
Installed 1 package in [TIME] Installed 1 package in [TIME]
+ iniconfig==2.0.0 + iniconfig==2.0.0
@ -3875,6 +3926,7 @@ fn require_hashes_re_download() -> Result<()> {
----- stdout ----- ----- stdout -----
----- stderr ----- ----- stderr -----
Resolved 1 package in [TIME]
Uninstalled 1 package in [TIME] Uninstalled 1 package in [TIME]
Installed 1 package in [TIME] Installed 1 package in [TIME]
- anyio==4.0.0 - anyio==4.0.0
@ -4074,6 +4126,7 @@ fn require_hashes_editable() -> Result<()> {
----- stderr ----- ----- stderr -----
Built 1 editable in [TIME] Built 1 editable in [TIME]
Resolved 1 package in [TIME]
Installed 1 package in [TIME] Installed 1 package in [TIME]
+ black==0.1.0 (from file://[WORKSPACE]/scripts/packages/black_editable) + black==0.1.0 (from file://[WORKSPACE]/scripts/packages/black_editable)
"### "###
@ -4276,6 +4329,7 @@ fn require_hashes_at_least_one() -> Result<()> {
----- stdout ----- ----- stdout -----
----- stderr ----- ----- stderr -----
Resolved 1 package in [TIME]
Uninstalled 1 package in [TIME] Uninstalled 1 package in [TIME]
Installed 1 package in [TIME] Installed 1 package in [TIME]
- anyio==4.0.0 - anyio==4.0.0
@ -4297,6 +4351,7 @@ fn require_hashes_at_least_one() -> Result<()> {
----- stdout ----- ----- stdout -----
----- stderr ----- ----- stderr -----
Resolved 1 package in [TIME]
Uninstalled 1 package in [TIME] Uninstalled 1 package in [TIME]
Installed 1 package in [TIME] Installed 1 package in [TIME]
- anyio==4.0.0 - anyio==4.0.0
@ -4531,6 +4586,7 @@ fn require_hashes_find_links_invalid_hash() -> Result<()> {
----- stdout ----- ----- stdout -----
----- stderr ----- ----- stderr -----
Resolved 1 package in [TIME]
Installed 1 package in [TIME] Installed 1 package in [TIME]
+ example-a-961b4c22==1.0.0 + example-a-961b4c22==1.0.0
"### "###
@ -4731,6 +4787,7 @@ fn require_hashes_registry_invalid_hash() -> Result<()> {
----- stdout ----- ----- stdout -----
----- stderr ----- ----- stderr -----
Resolved 1 package in [TIME]
Installed 1 package in [TIME] Installed 1 package in [TIME]
+ example-a-961b4c22==1.0.0 + example-a-961b4c22==1.0.0
"### "###