mirror of
https://github.com/astral-sh/uv.git
synced 2025-07-07 13:25:00 +00:00
Consider installed packages during resolution (#2596)
Previously, we did not consider installed distributions as candidates while performing resolution. Here, we update the resolver to use installed distributions that satisfy requirements instead of pulling new distributions from the registry. The implementation details are as follows: - We now provide `SitePackages` to the `CandidateSelector` - If an installed distribution satisfies the requirement, we prefer it over remote distributions - We do not want to allow installed distributions in some cases, i.e., upgrade and reinstall - We address this by introducing an `Exclusions` type which tracks installed packages to ignore during selection - There's a new `ResolvedDist` wrapper with `Installed(InstalledDist)` and `Installable(Dist)` variants - This lets us pass already installed distributions throughout the resolver The user-facing behavior is thoroughly covered in the tests, but briefly: - Installing a package that depends on an already-installed package prefers the local version over the index - Installing a package with a name that matches an already-installed URL package does not reinstall from the index - Reinstalling (--reinstall) a package by name _will_ pull from the index even if an already-installed URL package is present - To reinstall the URL package, you must specify the URL in the request Closes https://github.com/astral-sh/uv/issues/1661 Addresses: - https://github.com/astral-sh/uv/issues/1476 - https://github.com/astral-sh/uv/issues/1856 - https://github.com/astral-sh/uv/issues/2093 - https://github.com/astral-sh/uv/issues/2282 - https://github.com/astral-sh/uv/issues/2383 - https://github.com/astral-sh/uv/issues/2560 ## Test plan - [x] Reproduction at `charlesnicholson/uv-pep420-bug` passes - [x] Unit test for editable package ([#1476](https://github.com/astral-sh/uv/issues/1476)) - [x] Unit test for previously installed package with empty registry - [x] Unit test for local non-editable package - [x] Unit test for new version available locally but not in registry ([#2093](https://github.com/astral-sh/uv/issues/2093)) - ~[ ] Unit test for wheel not available in registry but already installed locally ([#2282](https://github.com/astral-sh/uv/issues/2282))~ (seems complicated and not worthwhile) - [x] Unit test for install from URL dependency then with matching version ([#2383](https://github.com/astral-sh/uv/issues/2383)) - [x] Unit test for install of new package that depends on installed package does not change version ([#2560](https://github.com/astral-sh/uv/issues/2560)) - [x] Unit test that `pip compile` does _not_ consider installed packages
This commit is contained in:
parent
7b685a8158
commit
e1878c8359
53 changed files with 1551 additions and 301 deletions
1
Cargo.lock
generated
1
Cargo.lock
generated
|
@ -4528,6 +4528,7 @@ dependencies = [
|
|||
"uv-client",
|
||||
"uv-installer",
|
||||
"uv-interpreter",
|
||||
"uv-requirements",
|
||||
"uv-resolver",
|
||||
"uv-types",
|
||||
]
|
||||
|
|
|
@ -10,7 +10,7 @@ use pep440_rs::Version;
|
|||
use uv_fs::Simplified;
|
||||
use uv_normalize::PackageName;
|
||||
|
||||
use crate::{InstalledMetadata, InstalledVersion, Name};
|
||||
use crate::{DistributionMetadata, InstalledMetadata, InstalledVersion, Name, VersionOrUrl};
|
||||
|
||||
/// A built distribution (wheel) that is installed in a virtual environment.
|
||||
#[derive(Debug, Clone)]
|
||||
|
@ -114,6 +114,7 @@ impl InstalledDist {
|
|||
pub fn metadata(&self) -> Result<pypi_types::Metadata23> {
|
||||
let path = self.path().join("METADATA");
|
||||
let contents = fs::read(&path)?;
|
||||
// TODO(zanieb): Update this to use thiserror so we can unpack parse errors downstream
|
||||
pypi_types::Metadata23::parse_metadata(&contents)
|
||||
.with_context(|| format!("Failed to parse METADATA file at: {}", path.user_display()))
|
||||
}
|
||||
|
@ -145,6 +146,12 @@ impl InstalledDist {
|
|||
}
|
||||
}
|
||||
|
||||
impl DistributionMetadata for InstalledDist {
|
||||
fn version_or_url(&self) -> VersionOrUrl {
|
||||
VersionOrUrl::Version(self.version())
|
||||
}
|
||||
}
|
||||
|
||||
impl Name for InstalledRegistryDist {
|
||||
fn name(&self) -> &PackageName {
|
||||
&self.name
|
||||
|
|
|
@ -56,6 +56,7 @@ pub use crate::index_url::*;
|
|||
pub use crate::installed::*;
|
||||
pub use crate::prioritized_distribution::*;
|
||||
pub use crate::resolution::*;
|
||||
pub use crate::resolved::*;
|
||||
pub use crate::traits::*;
|
||||
|
||||
mod any;
|
||||
|
@ -70,6 +71,7 @@ mod index_url;
|
|||
mod installed;
|
||||
mod prioritized_distribution;
|
||||
mod resolution;
|
||||
mod resolved;
|
||||
mod traits;
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
|
@ -361,6 +363,14 @@ impl Dist {
|
|||
})))
|
||||
}
|
||||
|
||||
/// Return true if the distribution is editable.
|
||||
pub fn is_editable(&self) -> bool {
|
||||
match self {
|
||||
Self::Source(dist) => dist.is_editable(),
|
||||
Self::Built(_) => false,
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the [`File`] instance, if this dist is from a registry with simple json api support
|
||||
pub fn file(&self) -> Option<&File> {
|
||||
match self {
|
||||
|
@ -426,6 +436,14 @@ impl SourceDist {
|
|||
}
|
||||
}
|
||||
|
||||
/// Return true if the distribution is editable.
|
||||
pub fn is_editable(&self) -> bool {
|
||||
match self {
|
||||
Self::Path(PathSourceDist { editable, .. }) => *editable,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the path to the source distribution, if if it's a local distribution.
|
||||
pub fn as_path(&self) -> Option<&Path> {
|
||||
match self {
|
||||
|
@ -952,6 +970,16 @@ impl Identifier for BuiltDist {
|
|||
}
|
||||
}
|
||||
|
||||
impl Identifier for InstalledDist {
|
||||
fn distribution_id(&self) -> DistributionId {
|
||||
self.path().distribution_id()
|
||||
}
|
||||
|
||||
fn resource_id(&self) -> ResourceId {
|
||||
self.path().resource_id()
|
||||
}
|
||||
}
|
||||
|
||||
impl Identifier for Dist {
|
||||
fn distribution_id(&self) -> DistributionId {
|
||||
match self {
|
||||
|
|
|
@ -4,7 +4,7 @@ use pep440_rs::VersionSpecifiers;
|
|||
use platform_tags::{IncompatibleTag, TagCompatibility, TagPriority};
|
||||
use pypi_types::{Hashes, Yanked};
|
||||
|
||||
use crate::Dist;
|
||||
use crate::{Dist, InstalledDist, ResolvedDistRef};
|
||||
|
||||
/// A collection of distributions that have been filtered by relevance.
|
||||
#[derive(Debug, Default, Clone)]
|
||||
|
@ -24,6 +24,8 @@ struct PrioritizedDistInner {
|
|||
/// A distribution that can be used for both resolution and installation.
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum CompatibleDist<'a> {
|
||||
/// The distribution is already installed and can be used.
|
||||
InstalledDist(&'a InstalledDist),
|
||||
/// The distribution should be resolved and installed using a source distribution.
|
||||
SourceDist(&'a Dist),
|
||||
/// The distribution should be resolved and installed using a wheel distribution.
|
||||
|
@ -284,27 +286,29 @@ impl PrioritizedDist {
|
|||
}
|
||||
|
||||
impl<'a> CompatibleDist<'a> {
|
||||
/// Return the [`Dist`] to use during resolution.
|
||||
pub fn for_resolution(&self) -> &Dist {
|
||||
/// Return the [`ResolvedDistRef`] to use during resolution.
|
||||
pub fn for_resolution(&self) -> ResolvedDistRef<'a> {
|
||||
match *self {
|
||||
CompatibleDist::SourceDist(sdist) => sdist,
|
||||
CompatibleDist::CompatibleWheel(wheel, _) => wheel,
|
||||
CompatibleDist::InstalledDist(dist) => ResolvedDistRef::Installed(dist),
|
||||
CompatibleDist::SourceDist(sdist) => ResolvedDistRef::Installable(sdist),
|
||||
CompatibleDist::CompatibleWheel(wheel, _) => ResolvedDistRef::Installable(wheel),
|
||||
CompatibleDist::IncompatibleWheel {
|
||||
source_dist: _,
|
||||
wheel,
|
||||
} => wheel,
|
||||
} => ResolvedDistRef::Installable(wheel),
|
||||
}
|
||||
}
|
||||
|
||||
/// Return the [`Dist`] to use during installation.
|
||||
pub fn for_installation(&self) -> &Dist {
|
||||
/// Return the [`ResolvedDistRef`] to use during installation.
|
||||
pub fn for_installation(&self) -> ResolvedDistRef<'a> {
|
||||
match *self {
|
||||
CompatibleDist::SourceDist(sdist) => sdist,
|
||||
CompatibleDist::CompatibleWheel(wheel, _) => wheel,
|
||||
CompatibleDist::InstalledDist(dist) => ResolvedDistRef::Installed(dist),
|
||||
CompatibleDist::SourceDist(sdist) => ResolvedDistRef::Installable(sdist),
|
||||
CompatibleDist::CompatibleWheel(wheel, _) => ResolvedDistRef::Installable(wheel),
|
||||
CompatibleDist::IncompatibleWheel {
|
||||
source_dist,
|
||||
wheel: _,
|
||||
} => source_dist,
|
||||
} => ResolvedDistRef::Installable(source_dist),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -3,35 +3,46 @@ use rustc_hash::FxHashMap;
|
|||
use pep508_rs::Requirement;
|
||||
use uv_normalize::PackageName;
|
||||
|
||||
use crate::{BuiltDist, Dist, PathSourceDist, SourceDist};
|
||||
use crate::{BuiltDist, Dist, InstalledDist, Name, ResolvedDist, SourceDist};
|
||||
|
||||
/// A set of packages pinned at specific versions.
|
||||
#[derive(Debug, Default, Clone)]
|
||||
pub struct Resolution(FxHashMap<PackageName, Dist>);
|
||||
pub struct Resolution(FxHashMap<PackageName, ResolvedDist>);
|
||||
|
||||
impl Resolution {
|
||||
/// Create a new resolution from the given pinned packages.
|
||||
pub fn new(packages: FxHashMap<PackageName, Dist>) -> Self {
|
||||
pub fn new(packages: FxHashMap<PackageName, ResolvedDist>) -> Self {
|
||||
Self(packages)
|
||||
}
|
||||
|
||||
/// Return the distribution for the given package name, if it exists.
|
||||
pub fn get(&self, package_name: &PackageName) -> Option<&Dist> {
|
||||
pub fn get(&self, package_name: &PackageName) -> Option<&ResolvedDist> {
|
||||
self.0.get(package_name)
|
||||
}
|
||||
|
||||
/// Return the remote distribution for the given package name, if it exists.
|
||||
pub fn get_remote(&self, package_name: &PackageName) -> Option<&Dist> {
|
||||
match self.0.get(package_name) {
|
||||
Some(dist) => match dist {
|
||||
ResolvedDist::Installable(dist) => Some(dist),
|
||||
ResolvedDist::Installed(_) => None,
|
||||
},
|
||||
None => None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Iterate over the [`PackageName`] entities in this resolution.
|
||||
pub fn packages(&self) -> impl Iterator<Item = &PackageName> {
|
||||
self.0.keys()
|
||||
}
|
||||
|
||||
/// Iterate over the [`Dist`] entities in this resolution.
|
||||
pub fn distributions(&self) -> impl Iterator<Item = &Dist> {
|
||||
/// Iterate over the [`ResolvedDist`] entities in this resolution.
|
||||
pub fn distributions(&self) -> impl Iterator<Item = &ResolvedDist> {
|
||||
self.0.values()
|
||||
}
|
||||
|
||||
/// Iterate over the [`Dist`] entities in this resolution.
|
||||
pub fn into_distributions(self) -> impl Iterator<Item = Dist> {
|
||||
/// Iterate over the [`ResolvedDist`] entities in this resolution.
|
||||
pub fn into_distributions(self) -> impl Iterator<Item = ResolvedDist> {
|
||||
self.0.into_values()
|
||||
}
|
||||
|
||||
|
@ -51,10 +62,9 @@ impl Resolution {
|
|||
let mut requirements = self
|
||||
.0
|
||||
.values()
|
||||
.filter_map(|dist| match dist {
|
||||
Dist::Source(SourceDist::Path(PathSourceDist { editable: true, .. })) => None,
|
||||
dist => Some(Requirement::from(dist.clone())),
|
||||
})
|
||||
// Remove editable requirements
|
||||
.filter(|dist| !dist.is_editable())
|
||||
.map(|dist| Requirement::from(dist.clone()))
|
||||
.collect::<Vec<_>>();
|
||||
requirements.sort_unstable_by(|a, b| a.name.cmp(&b.name));
|
||||
requirements
|
||||
|
@ -74,6 +84,7 @@ impl From<Dist> for Requirement {
|
|||
)),
|
||||
marker: None,
|
||||
},
|
||||
|
||||
Dist::Built(BuiltDist::DirectUrl(wheel)) => Self {
|
||||
name: wheel.filename.name,
|
||||
extras: vec![],
|
||||
|
@ -117,3 +128,27 @@ impl From<Dist> for Requirement {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<InstalledDist> for Requirement {
|
||||
fn from(dist: InstalledDist) -> Self {
|
||||
Self {
|
||||
name: dist.name().clone(),
|
||||
extras: vec![],
|
||||
version_or_url: Some(pep508_rs::VersionOrUrl::VersionSpecifier(
|
||||
pep440_rs::VersionSpecifiers::from(pep440_rs::VersionSpecifier::equals_version(
|
||||
dist.version().clone(),
|
||||
)),
|
||||
)),
|
||||
marker: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<ResolvedDist> for Requirement {
|
||||
fn from(dist: ResolvedDist) -> Self {
|
||||
match dist {
|
||||
ResolvedDist::Installable(dist) => dist.into(),
|
||||
ResolvedDist::Installed(dist) => dist.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
132
crates/distribution-types/src/resolved.rs
Normal file
132
crates/distribution-types/src/resolved.rs
Normal file
|
@ -0,0 +1,132 @@
|
|||
use std::fmt::Display;
|
||||
|
||||
use pep508_rs::PackageName;
|
||||
|
||||
use crate::{
|
||||
Dist, DistributionId, DistributionMetadata, Identifier, InstalledDist, Name, ResourceId,
|
||||
VersionOrUrl,
|
||||
};
|
||||
|
||||
/// A distribution that can be used for resolution and installation.
|
||||
///
|
||||
/// Either an already-installed distribution or a distribution that can be installed.
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum ResolvedDist {
|
||||
Installed(InstalledDist),
|
||||
Installable(Dist),
|
||||
}
|
||||
|
||||
/// A variant of [`ResolvedDist`] with borrowed inner distributions.
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum ResolvedDistRef<'a> {
|
||||
Installed(&'a InstalledDist),
|
||||
Installable(&'a Dist),
|
||||
}
|
||||
|
||||
impl ResolvedDist {
|
||||
/// Return true if the distribution is editable.
|
||||
pub fn is_editable(&self) -> bool {
|
||||
match self {
|
||||
Self::Installable(dist) => dist.is_editable(),
|
||||
Self::Installed(dist) => dist.is_editable(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl ResolvedDistRef<'_> {
|
||||
pub fn to_owned(&self) -> ResolvedDist {
|
||||
match self {
|
||||
Self::Installable(dist) => ResolvedDist::Installable((*dist).clone()),
|
||||
Self::Installed(dist) => ResolvedDist::Installed((*dist).clone()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Name for ResolvedDistRef<'_> {
|
||||
fn name(&self) -> &PackageName {
|
||||
match self {
|
||||
Self::Installable(dist) => dist.name(),
|
||||
Self::Installed(dist) => dist.name(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl DistributionMetadata for ResolvedDistRef<'_> {
|
||||
fn version_or_url(&self) -> VersionOrUrl {
|
||||
match self {
|
||||
Self::Installed(installed) => VersionOrUrl::Version(installed.version()),
|
||||
Self::Installable(dist) => dist.version_or_url(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Identifier for ResolvedDistRef<'_> {
|
||||
fn distribution_id(&self) -> DistributionId {
|
||||
match self {
|
||||
Self::Installed(dist) => dist.distribution_id(),
|
||||
Self::Installable(dist) => dist.distribution_id(),
|
||||
}
|
||||
}
|
||||
|
||||
fn resource_id(&self) -> ResourceId {
|
||||
match self {
|
||||
Self::Installed(dist) => dist.resource_id(),
|
||||
Self::Installable(dist) => dist.resource_id(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Name for ResolvedDist {
|
||||
fn name(&self) -> &PackageName {
|
||||
match self {
|
||||
Self::Installable(dist) => dist.name(),
|
||||
Self::Installed(dist) => dist.name(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl DistributionMetadata for ResolvedDist {
|
||||
fn version_or_url(&self) -> VersionOrUrl {
|
||||
match self {
|
||||
Self::Installed(installed) => installed.version_or_url(),
|
||||
Self::Installable(dist) => dist.version_or_url(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Identifier for ResolvedDist {
|
||||
fn distribution_id(&self) -> DistributionId {
|
||||
match self {
|
||||
Self::Installed(dist) => dist.distribution_id(),
|
||||
Self::Installable(dist) => dist.distribution_id(),
|
||||
}
|
||||
}
|
||||
|
||||
fn resource_id(&self) -> ResourceId {
|
||||
match self {
|
||||
Self::Installed(dist) => dist.resource_id(),
|
||||
Self::Installable(dist) => dist.resource_id(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Dist> for ResolvedDist {
|
||||
fn from(value: Dist) -> Self {
|
||||
ResolvedDist::Installable(value)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<InstalledDist> for ResolvedDist {
|
||||
fn from(value: InstalledDist) -> Self {
|
||||
ResolvedDist::Installed(value)
|
||||
}
|
||||
}
|
||||
|
||||
impl Display for ResolvedDist {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
Self::Installed(dist) => dist.fmt(f),
|
||||
Self::Installable(dist) => dist.fmt(f),
|
||||
}
|
||||
}
|
||||
}
|
|
@ -11,9 +11,9 @@ use uv_build::{SourceBuild, SourceBuildContext};
|
|||
use uv_cache::{Cache, CacheArgs};
|
||||
use uv_client::{FlatIndex, RegistryClientBuilder};
|
||||
use uv_dispatch::BuildDispatch;
|
||||
use uv_installer::NoBinary;
|
||||
use uv_interpreter::PythonEnvironment;
|
||||
use uv_resolver::InMemoryIndex;
|
||||
use uv_types::NoBinary;
|
||||
use uv_types::{
|
||||
BuildContext, BuildIsolation, BuildKind, ConfigSettings, InFlight, NoBuild, SetupPyStrategy,
|
||||
};
|
||||
|
|
|
@ -14,10 +14,10 @@ use pep508_rs::Requirement;
|
|||
use uv_cache::{Cache, CacheArgs};
|
||||
use uv_client::{FlatIndex, FlatIndexClient, RegistryClientBuilder};
|
||||
use uv_dispatch::BuildDispatch;
|
||||
use uv_installer::NoBinary;
|
||||
use uv_installer::SitePackages;
|
||||
use uv_interpreter::PythonEnvironment;
|
||||
use uv_resolver::{InMemoryIndex, Manifest, Options, Resolver};
|
||||
use uv_types::{BuildIsolation, ConfigSettings, InFlight, NoBuild, SetupPyStrategy};
|
||||
use uv_types::{BuildIsolation, ConfigSettings, InFlight, NoBinary, NoBuild, SetupPyStrategy};
|
||||
|
||||
#[derive(ValueEnum, Default, Clone)]
|
||||
pub(crate) enum ResolveCliFormat {
|
||||
|
@ -88,6 +88,8 @@ pub(crate) async fn resolve_cli(args: ResolveCliArgs) -> Result<()> {
|
|||
&NoBinary::None,
|
||||
);
|
||||
|
||||
let site_packages = SitePackages::from_executable(&venv)?;
|
||||
|
||||
// Copied from `BuildDispatch`
|
||||
let tags = venv.interpreter().tags()?;
|
||||
let resolver = Resolver::new(
|
||||
|
@ -100,6 +102,7 @@ pub(crate) async fn resolve_cli(args: ResolveCliArgs) -> Result<()> {
|
|||
&flat_index,
|
||||
&index,
|
||||
&build_dispatch,
|
||||
&site_packages,
|
||||
)?;
|
||||
let resolution_graph = resolver.resolve().await.with_context(|| {
|
||||
format!(
|
||||
|
|
|
@ -16,10 +16,10 @@ use pep508_rs::{Requirement, VersionOrUrl};
|
|||
use uv_cache::{Cache, CacheArgs};
|
||||
use uv_client::{FlatIndex, OwnedArchive, RegistryClient, RegistryClientBuilder};
|
||||
use uv_dispatch::BuildDispatch;
|
||||
use uv_installer::NoBinary;
|
||||
use uv_interpreter::PythonEnvironment;
|
||||
use uv_normalize::PackageName;
|
||||
use uv_resolver::InMemoryIndex;
|
||||
use uv_types::NoBinary;
|
||||
use uv_types::{BuildContext, BuildIsolation, ConfigSettings, InFlight, NoBuild, SetupPyStrategy};
|
||||
|
||||
#[derive(Parser)]
|
||||
|
|
|
@ -21,6 +21,7 @@ uv-cache = { workspace = true }
|
|||
uv-client = { workspace = true }
|
||||
uv-installer = { workspace = true }
|
||||
uv-interpreter = { workspace = true }
|
||||
uv-requirements = { workspace = true }
|
||||
uv-resolver = { workspace = true }
|
||||
uv-types = { workspace = true }
|
||||
|
||||
|
|
|
@ -17,11 +17,12 @@ use pep508_rs::Requirement;
|
|||
use uv_build::{SourceBuild, SourceBuildContext};
|
||||
use uv_cache::Cache;
|
||||
use uv_client::{FlatIndex, RegistryClient};
|
||||
use uv_installer::{Downloader, Installer, NoBinary, Plan, Planner, Reinstall, SitePackages};
|
||||
use uv_installer::{Downloader, Installer, Plan, Planner, SitePackages};
|
||||
use uv_interpreter::{Interpreter, PythonEnvironment};
|
||||
use uv_resolver::{InMemoryIndex, Manifest, Options, Resolver};
|
||||
use uv_types::{
|
||||
BuildContext, BuildIsolation, BuildKind, ConfigSettings, InFlight, NoBuild, SetupPyStrategy,
|
||||
BuildContext, BuildIsolation, BuildKind, ConfigSettings, EmptyInstalledPackages, InFlight,
|
||||
NoBinary, NoBuild, Reinstall, SetupPyStrategy,
|
||||
};
|
||||
|
||||
/// The main implementation of [`BuildContext`], used by the CLI, see [`BuildContext`]
|
||||
|
@ -145,6 +146,7 @@ impl<'a> BuildContext for BuildDispatch<'a> {
|
|||
self.flat_index,
|
||||
self.index,
|
||||
self,
|
||||
&EmptyInstalledPackages,
|
||||
)?;
|
||||
let graph = resolver.resolve().await.with_context(|| {
|
||||
format!(
|
||||
|
@ -185,8 +187,9 @@ impl<'a> BuildContext for BuildDispatch<'a> {
|
|||
let site_packages = SitePackages::from_executable(venv)?;
|
||||
|
||||
let Plan {
|
||||
local,
|
||||
cached,
|
||||
remote,
|
||||
installed: _,
|
||||
reinstalls,
|
||||
extraneous: _,
|
||||
} = Planner::with_requirements(&resolution.requirements()).build(
|
||||
|
@ -200,7 +203,7 @@ impl<'a> BuildContext for BuildDispatch<'a> {
|
|||
)?;
|
||||
|
||||
// Nothing to do.
|
||||
if remote.is_empty() && local.is_empty() && reinstalls.is_empty() {
|
||||
if remote.is_empty() && cached.is_empty() && reinstalls.is_empty() {
|
||||
debug!("No build requirements to install for build");
|
||||
return Ok(());
|
||||
}
|
||||
|
@ -210,7 +213,7 @@ impl<'a> BuildContext for BuildDispatch<'a> {
|
|||
.iter()
|
||||
.map(|dist| {
|
||||
resolution
|
||||
.get(&dist.name)
|
||||
.get_remote(&dist.name)
|
||||
.cloned()
|
||||
.expect("Resolution should contain all packages")
|
||||
})
|
||||
|
@ -252,7 +255,7 @@ impl<'a> BuildContext for BuildDispatch<'a> {
|
|||
}
|
||||
|
||||
// Install the resolved distributions.
|
||||
let wheels = wheels.into_iter().chain(local).collect::<Vec<_>>();
|
||||
let wheels = wheels.into_iter().chain(cached).collect::<Vec<_>>();
|
||||
if !wheels.is_empty() {
|
||||
debug!(
|
||||
"Installing build requirement{}: {}",
|
||||
|
|
|
@ -2,10 +2,9 @@ pub use compile::{compile_tree, CompileError};
|
|||
pub use downloader::{Downloader, Reporter as DownloadReporter};
|
||||
pub use editable::{is_dynamic, BuiltEditable, ResolvedEditable};
|
||||
pub use installer::{Installer, Reporter as InstallReporter};
|
||||
pub use plan::{Plan, Planner, Reinstall};
|
||||
pub use plan::{Plan, Planner};
|
||||
pub use site_packages::{Diagnostic, SitePackages};
|
||||
pub use uninstall::{uninstall, UninstallError};
|
||||
pub use uv_types::NoBinary;
|
||||
|
||||
mod compile;
|
||||
mod downloader;
|
||||
|
|
|
@ -16,8 +16,7 @@ use uv_cache::{ArchiveTarget, ArchiveTimestamp, Cache, CacheBucket, WheelCache};
|
|||
use uv_distribution::{BuiltWheelIndex, RegistryWheelIndex};
|
||||
use uv_fs::Simplified;
|
||||
use uv_interpreter::PythonEnvironment;
|
||||
use uv_normalize::PackageName;
|
||||
use uv_types::NoBinary;
|
||||
use uv_types::{NoBinary, Reinstall};
|
||||
|
||||
use crate::{ResolvedEditable, SitePackages};
|
||||
|
||||
|
@ -68,9 +67,10 @@ impl<'a> Planner<'a> {
|
|||
// Index all the already-downloaded wheels in the cache.
|
||||
let mut registry_index = RegistryWheelIndex::new(cache, tags, index_locations);
|
||||
|
||||
let mut local = vec![];
|
||||
let mut cached = vec![];
|
||||
let mut remote = vec![];
|
||||
let mut reinstalls = vec![];
|
||||
let mut installed = vec![];
|
||||
let mut extraneous = vec![];
|
||||
let mut seen = FxHashMap::with_capacity_and_hasher(
|
||||
self.requirements.len(),
|
||||
|
@ -122,7 +122,7 @@ impl<'a> Planner<'a> {
|
|||
let existing = site_packages.remove_packages(built.name());
|
||||
reinstalls.extend(existing);
|
||||
|
||||
local.push(built.wheel.clone());
|
||||
cached.push(built.wheel.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -166,53 +166,20 @@ impl<'a> Planner<'a> {
|
|||
};
|
||||
|
||||
if reinstall {
|
||||
let installed = site_packages.remove_packages(&requirement.name);
|
||||
reinstalls.extend(installed);
|
||||
let installed_dists = site_packages.remove_packages(&requirement.name);
|
||||
reinstalls.extend(installed_dists);
|
||||
} else {
|
||||
let installed = site_packages.remove_packages(&requirement.name);
|
||||
match installed.as_slice() {
|
||||
let installed_dists = site_packages.remove_packages(&requirement.name);
|
||||
match installed_dists.as_slice() {
|
||||
[] => {}
|
||||
[distribution] => {
|
||||
// Filter out already-installed packages.
|
||||
match requirement.version_or_url.as_ref() {
|
||||
// Accept any version of the package.
|
||||
None => continue,
|
||||
|
||||
// If the requirement comes from a registry, check by name.
|
||||
Some(VersionOrUrl::VersionSpecifier(version_specifier)) => {
|
||||
if version_specifier.contains(distribution.version()) {
|
||||
debug!("Requirement already satisfied: {distribution}");
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
// If the requirement comes from a direct URL, check by URL.
|
||||
Some(VersionOrUrl::Url(url)) => {
|
||||
if let InstalledDist::Url(installed) = &distribution {
|
||||
if &installed.url == url.raw() {
|
||||
// If the requirement came from a local path, check freshness.
|
||||
if let Ok(archive) = url.to_file_path() {
|
||||
if ArchiveTimestamp::up_to_date_with(
|
||||
&archive,
|
||||
ArchiveTarget::Install(distribution),
|
||||
)? {
|
||||
debug!("Requirement already satisfied (and up-to-date): {installed}");
|
||||
continue;
|
||||
}
|
||||
debug!("Requirement already satisfied (but not up-to-date): {installed}");
|
||||
} else {
|
||||
// Otherwise, assume the requirement is up-to-date.
|
||||
debug!("Requirement already satisfied (assumed up-to-date): {installed}");
|
||||
continue;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if installed_satisfies_requirement(distribution, requirement)? {
|
||||
installed.push(distribution.clone());
|
||||
continue;
|
||||
}
|
||||
|
||||
reinstalls.push(distribution.clone());
|
||||
}
|
||||
_ => reinstalls.extend(installed),
|
||||
_ => reinstalls.extend(installed_dists),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -222,14 +189,14 @@ impl<'a> Planner<'a> {
|
|||
continue;
|
||||
}
|
||||
|
||||
// Identify any locally-available distributions that satisfy the requirement.
|
||||
// Identify any cached distributions that satisfy the requirement.
|
||||
match requirement.version_or_url.as_ref() {
|
||||
None => {
|
||||
if let Some((_version, distribution)) =
|
||||
registry_index.get(&requirement.name).next()
|
||||
{
|
||||
debug!("Requirement already cached: {distribution}");
|
||||
local.push(CachedDist::Registry(distribution.clone()));
|
||||
cached.push(CachedDist::Registry(distribution.clone()));
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
@ -246,7 +213,7 @@ impl<'a> Planner<'a> {
|
|||
})
|
||||
{
|
||||
debug!("Requirement already cached: {distribution}");
|
||||
local.push(CachedDist::Registry(distribution.clone()));
|
||||
cached.push(CachedDist::Registry(distribution.clone()));
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
@ -291,7 +258,7 @@ impl<'a> Planner<'a> {
|
|||
);
|
||||
|
||||
debug!("URL wheel requirement already cached: {cached_dist}");
|
||||
local.push(CachedDist::Url(cached_dist));
|
||||
cached.push(CachedDist::Url(cached_dist));
|
||||
continue;
|
||||
}
|
||||
Err(err) if err.kind() == io::ErrorKind::NotFound => {
|
||||
|
@ -339,7 +306,7 @@ impl<'a> Planner<'a> {
|
|||
debug!(
|
||||
"URL wheel requirement already cached: {cached_dist}"
|
||||
);
|
||||
local.push(CachedDist::Url(cached_dist));
|
||||
cached.push(CachedDist::Url(cached_dist));
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
@ -355,7 +322,7 @@ impl<'a> Planner<'a> {
|
|||
if let Some(wheel) = BuiltWheelIndex::url(&sdist, cache, tags)? {
|
||||
let cached_dist = wheel.into_url_dist(url.clone());
|
||||
debug!("URL source requirement already cached: {cached_dist}");
|
||||
local.push(CachedDist::Url(cached_dist));
|
||||
cached.push(CachedDist::Url(cached_dist));
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
@ -365,7 +332,7 @@ impl<'a> Planner<'a> {
|
|||
if let Some(wheel) = BuiltWheelIndex::path(&sdist, cache, tags)? {
|
||||
let cached_dist = wheel.into_url_dist(url.clone());
|
||||
debug!("Path source requirement already cached: {cached_dist}");
|
||||
local.push(CachedDist::Url(cached_dist));
|
||||
cached.push(CachedDist::Url(cached_dist));
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
@ -375,7 +342,7 @@ impl<'a> Planner<'a> {
|
|||
if let Some(wheel) = BuiltWheelIndex::git(&sdist, cache, tags) {
|
||||
let cached_dist = wheel.into_url_dist(url.clone());
|
||||
debug!("Git source requirement already cached: {cached_dist}");
|
||||
local.push(CachedDist::Url(cached_dist));
|
||||
cached.push(CachedDist::Url(cached_dist));
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
@ -409,7 +376,8 @@ impl<'a> Planner<'a> {
|
|||
}
|
||||
|
||||
Ok(Plan {
|
||||
local,
|
||||
cached,
|
||||
installed,
|
||||
remote,
|
||||
reinstalls,
|
||||
extraneous,
|
||||
|
@ -429,7 +397,11 @@ enum Specifier<'a> {
|
|||
pub struct Plan {
|
||||
/// The distributions that are not already installed in the current environment, but are
|
||||
/// available in the local cache.
|
||||
pub local: Vec<CachedDist>,
|
||||
pub cached: Vec<CachedDist>,
|
||||
|
||||
/// Any distributions that are already installed in the current environment, and can be used
|
||||
/// to satisfy the requirements.
|
||||
pub installed: Vec<InstalledDist>,
|
||||
|
||||
/// The distributions that are not already installed in the current environment, and are
|
||||
/// not available in the local cache.
|
||||
|
@ -444,37 +416,49 @@ pub struct Plan {
|
|||
pub extraneous: Vec<InstalledDist>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum Reinstall {
|
||||
/// Don't reinstall any packages; respect the existing installation.
|
||||
None,
|
||||
/// Returns true if a requirement is satisfied by an installed distribution.
|
||||
///
|
||||
/// Returns an error if IO fails during a freshness check for a local path.
|
||||
fn installed_satisfies_requirement(
|
||||
distribution: &InstalledDist,
|
||||
requirement: &Requirement,
|
||||
) -> Result<bool> {
|
||||
// Filter out already-installed packages.
|
||||
match requirement.version_or_url.as_ref() {
|
||||
// Accept any version of the package.
|
||||
None => return Ok(true),
|
||||
|
||||
/// Reinstall all packages in the plan.
|
||||
All,
|
||||
// If the requirement comes from a registry, check by name.
|
||||
Some(VersionOrUrl::VersionSpecifier(version_specifier)) => {
|
||||
if version_specifier.contains(distribution.version()) {
|
||||
debug!("Requirement already satisfied: {distribution}");
|
||||
return Ok(true);
|
||||
}
|
||||
}
|
||||
|
||||
/// Reinstall only the specified packages.
|
||||
Packages(Vec<PackageName>),
|
||||
}
|
||||
|
||||
impl Reinstall {
|
||||
/// Determine the reinstall strategy to use.
|
||||
pub fn from_args(reinstall: bool, reinstall_package: Vec<PackageName>) -> Self {
|
||||
if reinstall {
|
||||
Self::All
|
||||
} else if !reinstall_package.is_empty() {
|
||||
Self::Packages(reinstall_package)
|
||||
} else {
|
||||
Self::None
|
||||
// If the requirement comes from a direct URL, check by URL.
|
||||
Some(VersionOrUrl::Url(url)) => {
|
||||
if let InstalledDist::Url(installed) = &distribution {
|
||||
if &installed.url == url.raw() {
|
||||
// If the requirement came from a local path, check freshness.
|
||||
if let Ok(archive) = url.to_file_path() {
|
||||
if ArchiveTimestamp::up_to_date_with(
|
||||
&archive,
|
||||
ArchiveTarget::Install(distribution),
|
||||
)? {
|
||||
debug!("Requirement already satisfied (and up-to-date): {installed}");
|
||||
return Ok(true);
|
||||
}
|
||||
debug!("Requirement already satisfied (but not up-to-date): {installed}");
|
||||
} else {
|
||||
// Otherwise, assume the requirement is up-to-date.
|
||||
debug!("Requirement already satisfied (assumed up-to-date): {installed}");
|
||||
return Ok(true);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns `true` if no packages should be reinstalled.
|
||||
pub fn is_none(&self) -> bool {
|
||||
matches!(self, Self::None)
|
||||
}
|
||||
|
||||
/// Returns `true` if all packages should be reinstalled.
|
||||
pub fn is_all(&self) -> bool {
|
||||
matches!(self, Self::All)
|
||||
}
|
||||
Ok(false)
|
||||
}
|
||||
|
|
|
@ -14,6 +14,7 @@ use requirements_txt::EditableRequirement;
|
|||
use uv_cache::{ArchiveTarget, ArchiveTimestamp};
|
||||
use uv_interpreter::PythonEnvironment;
|
||||
use uv_normalize::PackageName;
|
||||
use uv_types::InstalledPackagesProvider;
|
||||
|
||||
use crate::is_dynamic;
|
||||
|
||||
|
@ -577,3 +578,13 @@ impl Diagnostic {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl InstalledPackagesProvider for SitePackages<'_> {
|
||||
fn iter(&self) -> impl Iterator<Item = &InstalledDist> {
|
||||
self.iter()
|
||||
}
|
||||
|
||||
fn get_packages(&self, name: &PackageName) -> Vec<&InstalledDist> {
|
||||
self.get_packages(name)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -4,13 +4,15 @@ use distribution_types::{CompatibleDist, IncompatibleDist, IncompatibleSource};
|
|||
use distribution_types::{DistributionMetadata, IncompatibleWheel, Name, PrioritizedDist};
|
||||
use pep440_rs::Version;
|
||||
use pep508_rs::MarkerEnvironment;
|
||||
use tracing::debug;
|
||||
use uv_normalize::PackageName;
|
||||
use uv_types::InstalledPackagesProvider;
|
||||
|
||||
use crate::preferences::Preferences;
|
||||
use crate::prerelease_mode::PreReleaseStrategy;
|
||||
use crate::resolution_mode::ResolutionStrategy;
|
||||
use crate::version_map::{VersionMap, VersionMapDistHandle};
|
||||
use crate::{Manifest, Options};
|
||||
use crate::{Exclusions, Manifest, Options};
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub(crate) struct CandidateSelector {
|
||||
|
@ -61,23 +63,63 @@ enum AllowPreRelease {
|
|||
|
||||
impl CandidateSelector {
|
||||
/// Select a [`Candidate`] from a set of candidate versions and files.
|
||||
pub(crate) fn select<'a>(
|
||||
///
|
||||
/// Unless present in the provided [`Exclusions`], local distributions from the
|
||||
/// [`InstalledPackagesProvider`] are preferred over remote distributions in
|
||||
/// the [`VersionMap`].
|
||||
pub(crate) fn select<'a, InstalledPackages: InstalledPackagesProvider>(
|
||||
&'a self,
|
||||
package_name: &'a PackageName,
|
||||
range: &'a Range<Version>,
|
||||
version_map: &'a VersionMap,
|
||||
preferences: &'a Preferences,
|
||||
installed_packages: &'a InstalledPackages,
|
||||
exclusions: &'a Exclusions,
|
||||
) -> Option<Candidate<'a>> {
|
||||
// If the package has a preference (e.g., an existing version from an existing lockfile),
|
||||
// and the preference satisfies the current range, use that.
|
||||
if let Some(version) = preferences.version(package_name) {
|
||||
if range.contains(version) {
|
||||
// Check for a locally installed distribution that matches the preferred version
|
||||
if !exclusions.contains(package_name) {
|
||||
for dist in installed_packages.get_packages(package_name) {
|
||||
if dist.version() == version {
|
||||
debug!("Found installed version of {dist} that satisfies preference in {range}");
|
||||
|
||||
return Some(Candidate {
|
||||
name: package_name,
|
||||
version,
|
||||
dist: CandidateDist::Compatible(CompatibleDist::InstalledDist(
|
||||
dist,
|
||||
)),
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Check for a remote distribution that matches the preferred version
|
||||
if let Some(file) = version_map.get(version) {
|
||||
return Some(Candidate::new(package_name, version, file));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Check for a locally installed distribution that satisfies the range
|
||||
if !exclusions.contains(package_name) {
|
||||
for dist in installed_packages.get_packages(package_name) {
|
||||
let version = dist.version();
|
||||
if range.contains(version) {
|
||||
debug!("Found installed version of {dist} that satisfies {range}");
|
||||
|
||||
return Some(Candidate {
|
||||
name: package_name,
|
||||
version,
|
||||
dist: CandidateDist::Compatible(CompatibleDist::InstalledDist(dist)),
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Determine the appropriate prerelease strategy for the current package.
|
||||
let allow_prerelease = match &self.prerelease_strategy {
|
||||
PreReleaseStrategy::Disallow => AllowPreRelease::No,
|
||||
|
@ -100,7 +142,7 @@ impl CandidateSelector {
|
|||
};
|
||||
|
||||
tracing::trace!(
|
||||
"selecting candidate for package {:?} with range {:?} with {} versions",
|
||||
"selecting candidate for package {:?} with range {:?} with {} remote versions",
|
||||
package_name,
|
||||
range,
|
||||
version_map.len()
|
||||
|
|
|
@ -8,7 +8,9 @@ use pubgrub::range::Range;
|
|||
use pubgrub::report::{DefaultStringReporter, DerivationTree, Reporter};
|
||||
use rustc_hash::FxHashMap;
|
||||
|
||||
use distribution_types::{BuiltDist, IndexLocations, PathBuiltDist, PathSourceDist, SourceDist};
|
||||
use distribution_types::{
|
||||
BuiltDist, IndexLocations, InstalledDist, PathBuiltDist, PathSourceDist, SourceDist,
|
||||
};
|
||||
use once_map::OnceMap;
|
||||
use pep440_rs::Version;
|
||||
use pep508_rs::Requirement;
|
||||
|
@ -70,6 +72,10 @@ pub enum ResolveError {
|
|||
#[error("Failed to read: {0}")]
|
||||
Read(Box<PathBuiltDist>, #[source] uv_distribution::Error),
|
||||
|
||||
// TODO(zanieb): Use `thiserror` in `InstalledDist` so we can avoid chaining `anyhow`
|
||||
#[error("Failed to read metadata from installed package: {0}")]
|
||||
ReadInstalled(Box<InstalledDist>, #[source] anyhow::Error),
|
||||
|
||||
#[error("Failed to build: {0}")]
|
||||
Build(Box<PathSourceDist>, #[source] uv_distribution::Error),
|
||||
|
||||
|
|
48
crates/uv-resolver/src/exclusions.rs
Normal file
48
crates/uv-resolver/src/exclusions.rs
Normal file
|
@ -0,0 +1,48 @@
|
|||
use pep508_rs::PackageName;
|
||||
use rustc_hash::FxHashSet;
|
||||
use uv_types::{Reinstall, Upgrade};
|
||||
|
||||
/// Tracks locally installed packages that should not be selected during resolution.
|
||||
#[derive(Debug, Default, Clone)]
|
||||
pub enum Exclusions {
|
||||
#[default]
|
||||
None,
|
||||
/// Exclude some local packages from consideration, e.g. from `--reinstall-package foo --upgrade-package bar`
|
||||
Some(FxHashSet<PackageName>),
|
||||
/// Exclude all local packages from consideration, e.g. from `--reinstall` or `--upgrade`
|
||||
All,
|
||||
}
|
||||
|
||||
impl Exclusions {
|
||||
pub fn new(reinstall: Reinstall, upgrade: Upgrade) -> Self {
|
||||
if upgrade.is_all() || reinstall.is_all() {
|
||||
Self::All
|
||||
} else {
|
||||
let mut exclusions: FxHashSet<PackageName> =
|
||||
if let Reinstall::Packages(packages) = reinstall {
|
||||
FxHashSet::from_iter(packages)
|
||||
} else {
|
||||
FxHashSet::default()
|
||||
};
|
||||
|
||||
if let Upgrade::Packages(packages) = upgrade {
|
||||
exclusions.extend(packages);
|
||||
};
|
||||
|
||||
if exclusions.is_empty() {
|
||||
Self::None
|
||||
} else {
|
||||
Self::Some(exclusions)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns true if the package is excluded and a local distribution should not be used.
|
||||
pub fn contains(&self, package: &PackageName) -> bool {
|
||||
match self {
|
||||
Self::None => false,
|
||||
Self::Some(packages) => packages.contains(package),
|
||||
Self::All => true,
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,5 +1,6 @@
|
|||
pub use dependency_mode::DependencyMode;
|
||||
pub use error::ResolveError;
|
||||
pub use exclusions::Exclusions;
|
||||
pub use manifest::Manifest;
|
||||
pub use options::{Options, OptionsBuilder};
|
||||
pub use preferences::{Preference, PreferenceError};
|
||||
|
@ -22,6 +23,7 @@ mod dependency_mode;
|
|||
mod dependency_provider;
|
||||
mod editables;
|
||||
mod error;
|
||||
mod exclusions;
|
||||
mod manifest;
|
||||
mod options;
|
||||
mod overrides;
|
||||
|
|
|
@ -4,7 +4,7 @@ use pypi_types::Metadata23;
|
|||
use uv_normalize::PackageName;
|
||||
use uv_types::RequestedRequirements;
|
||||
|
||||
use crate::preferences::Preference;
|
||||
use crate::{preferences::Preference, Exclusions};
|
||||
|
||||
/// A manifest of requirements, constraints, and preferences.
|
||||
#[derive(Clone, Debug)]
|
||||
|
@ -34,6 +34,12 @@ pub struct Manifest {
|
|||
/// direct requirements in their own right.
|
||||
pub(crate) editables: Vec<(LocalEditable, Metadata23)>,
|
||||
|
||||
/// The installed packages to exclude from consideration during resolution.
|
||||
///
|
||||
/// These typically represent packages that are being upgraded or reinstalled
|
||||
/// and should be pulled from a remote source like a package index.
|
||||
pub(crate) exclusions: Exclusions,
|
||||
|
||||
/// The lookahead requirements for the project.
|
||||
///
|
||||
/// These represent transitive dependencies that should be incorporated when making
|
||||
|
@ -43,6 +49,7 @@ pub struct Manifest {
|
|||
}
|
||||
|
||||
impl Manifest {
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub fn new(
|
||||
requirements: Vec<Requirement>,
|
||||
constraints: Vec<Requirement>,
|
||||
|
@ -50,6 +57,7 @@ impl Manifest {
|
|||
preferences: Vec<Preference>,
|
||||
project: Option<PackageName>,
|
||||
editables: Vec<(LocalEditable, Metadata23)>,
|
||||
exclusions: Exclusions,
|
||||
lookaheads: Vec<RequestedRequirements>,
|
||||
) -> Self {
|
||||
Self {
|
||||
|
@ -59,6 +67,7 @@ impl Manifest {
|
|||
preferences,
|
||||
project,
|
||||
editables,
|
||||
exclusions,
|
||||
lookaheads,
|
||||
}
|
||||
}
|
||||
|
@ -71,6 +80,7 @@ impl Manifest {
|
|||
preferences: Vec::new(),
|
||||
project: None,
|
||||
editables: Vec::new(),
|
||||
exclusions: Exclusions::default(),
|
||||
lookaheads: Vec::new(),
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
use rustc_hash::FxHashMap;
|
||||
|
||||
use distribution_types::{CompatibleDist, Dist};
|
||||
use distribution_types::{CompatibleDist, ResolvedDist};
|
||||
use uv_normalize::PackageName;
|
||||
|
||||
use crate::candidate_selector::Candidate;
|
||||
|
@ -10,19 +10,23 @@ use crate::candidate_selector::Candidate;
|
|||
/// For example, given `Flask==3.0.0`, the [`FilePins`] would contain a mapping from `Flask` to
|
||||
/// `3.0.0` to the specific wheel or source distribution archive that was pinned for that version.
|
||||
#[derive(Debug, Default)]
|
||||
pub(crate) struct FilePins(FxHashMap<PackageName, FxHashMap<pep440_rs::Version, Dist>>);
|
||||
pub(crate) struct FilePins(FxHashMap<PackageName, FxHashMap<pep440_rs::Version, ResolvedDist>>);
|
||||
|
||||
impl FilePins {
|
||||
/// Pin a candidate package.
|
||||
pub(crate) fn insert(&mut self, candidate: &Candidate, dist: &CompatibleDist) {
|
||||
self.0
|
||||
.entry(candidate.name().clone())
|
||||
.or_default()
|
||||
.insert(candidate.version().clone(), dist.for_installation().clone());
|
||||
self.0.entry(candidate.name().clone()).or_default().insert(
|
||||
candidate.version().clone(),
|
||||
dist.for_installation().to_owned(),
|
||||
);
|
||||
}
|
||||
|
||||
/// Return the pinned file for the given package name and version, if it exists.
|
||||
pub(crate) fn get(&self, name: &PackageName, version: &pep440_rs::Version) -> Option<&Dist> {
|
||||
pub(crate) fn get(
|
||||
&self,
|
||||
name: &PackageName,
|
||||
version: &pep440_rs::Version,
|
||||
) -> Option<&ResolvedDist> {
|
||||
self.0.get(name)?.get(version)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -8,6 +8,7 @@ use pep508_rs::{
|
|||
};
|
||||
use pypi_types::{HashError, Hashes};
|
||||
use requirements_txt::RequirementEntry;
|
||||
use tracing::trace;
|
||||
use uv_normalize::PackageName;
|
||||
|
||||
#[derive(thiserror::Error, Debug)]
|
||||
|
@ -68,13 +69,17 @@ impl Preference {
|
|||
pub(crate) struct Preferences(FxHashMap<PackageName, Pin>);
|
||||
|
||||
impl Preferences {
|
||||
/// Create a map of pinned packages from a list of [`Preference`] entries.
|
||||
pub(crate) fn from_requirements(
|
||||
requirements: Vec<Preference>,
|
||||
/// Create a map of pinned packages from an iterator of [`Preference`] entries.
|
||||
/// Takes ownership of the [`Preference`] entries.
|
||||
///
|
||||
/// The provided [`MarkerEnvironment`] will be used to filter the preferences
|
||||
/// to an applicable subset.
|
||||
pub(crate) fn from_iter<PreferenceIterator: IntoIterator<Item = Preference>>(
|
||||
preferences: PreferenceIterator,
|
||||
markers: &MarkerEnvironment,
|
||||
) -> Self {
|
||||
Self(
|
||||
requirements
|
||||
preferences
|
||||
.into_iter()
|
||||
.filter_map(|preference| {
|
||||
let Preference {
|
||||
|
@ -84,26 +89,45 @@ impl Preferences {
|
|||
|
||||
// Search for, e.g., `flask==1.2.3` entries that match the current environment.
|
||||
if !requirement.evaluate_markers(markers, &[]) {
|
||||
trace!(
|
||||
"Excluding {requirement} from preferences due to unmatched markers."
|
||||
);
|
||||
return None;
|
||||
}
|
||||
let Some(VersionOrUrl::VersionSpecifier(version_specifiers)) =
|
||||
requirement.version_or_url.as_ref()
|
||||
else {
|
||||
return None;
|
||||
};
|
||||
let [version_specifier] = version_specifiers.as_ref() else {
|
||||
return None;
|
||||
};
|
||||
if *version_specifier.operator() != Operator::Equal {
|
||||
return None;
|
||||
match requirement.version_or_url.as_ref() {
|
||||
Some(VersionOrUrl::VersionSpecifier(version_specifiers)) =>
|
||||
{
|
||||
let [version_specifier] = version_specifiers.as_ref() else {
|
||||
trace!(
|
||||
"Excluding {requirement} from preferences due to multiple version specifiers."
|
||||
);
|
||||
return None;
|
||||
};
|
||||
if *version_specifier.operator() != Operator::Equal {
|
||||
trace!(
|
||||
"Excluding {requirement} from preferences due to inexact version specifier."
|
||||
);
|
||||
return None;
|
||||
}
|
||||
Some((
|
||||
requirement.name,
|
||||
Pin {
|
||||
version: version_specifier.version().clone(),
|
||||
hashes,
|
||||
},
|
||||
))
|
||||
}
|
||||
Some(VersionOrUrl::Url(_)) => {
|
||||
trace!(
|
||||
"Excluding {requirement} from preferences due to URL dependency."
|
||||
);
|
||||
None
|
||||
}
|
||||
_ => {
|
||||
None
|
||||
}
|
||||
Some((
|
||||
requirement.name,
|
||||
Pin {
|
||||
version: version_specifier.version().clone(),
|
||||
hashes,
|
||||
},
|
||||
))
|
||||
}
|
||||
|
||||
})
|
||||
.collect(),
|
||||
)
|
||||
|
|
|
@ -15,7 +15,8 @@ use url::Url;
|
|||
|
||||
use crate::dependency_provider::UvDependencyProvider;
|
||||
use distribution_types::{
|
||||
Dist, DistributionMetadata, LocalEditable, Name, PackageId, Verbatim, VersionOrUrl,
|
||||
Dist, DistributionMetadata, LocalEditable, Name, PackageId, ResolvedDist, Verbatim,
|
||||
VersionOrUrl,
|
||||
};
|
||||
use once_map::OnceMap;
|
||||
use pep440_rs::Version;
|
||||
|
@ -48,7 +49,7 @@ pub enum AnnotationStyle {
|
|||
#[derive(Debug)]
|
||||
pub struct ResolutionGraph {
|
||||
/// The underlying graph.
|
||||
petgraph: petgraph::graph::Graph<Dist, Range<Version>, petgraph::Directed>,
|
||||
petgraph: petgraph::graph::Graph<ResolvedDist, Range<Version>, petgraph::Directed>,
|
||||
/// The metadata for every distribution in this resolution.
|
||||
hashes: FxHashMap<PackageName, Vec<Hashes>>,
|
||||
/// The enabled extras for every distribution in this resolution.
|
||||
|
@ -88,7 +89,7 @@ impl ResolutionGraph {
|
|||
PubGrubPackage::Package(package_name, None, None) => {
|
||||
// Create the distribution.
|
||||
let pinned_package = if let Some((editable, _)) = editables.get(package_name) {
|
||||
Dist::from_editable(package_name.clone(), editable.clone())?
|
||||
Dist::from_editable(package_name.clone(), editable.clone())?.into()
|
||||
} else {
|
||||
pins.get(package_name, version)
|
||||
.expect("Every package should be pinned")
|
||||
|
@ -140,7 +141,7 @@ impl ResolutionGraph {
|
|||
}
|
||||
|
||||
// Add the distribution to the graph.
|
||||
let index = petgraph.add_node(pinned_package);
|
||||
let index = petgraph.add_node(pinned_package.into());
|
||||
inverse.insert(package_name, index);
|
||||
}
|
||||
PubGrubPackage::Package(package_name, Some(extra), None) => {
|
||||
|
@ -158,7 +159,7 @@ impl ResolutionGraph {
|
|||
Dist::from_editable(package_name.clone(), editable.clone())?;
|
||||
|
||||
diagnostics.push(Diagnostic::MissingExtra {
|
||||
dist: pinned_package,
|
||||
dist: pinned_package.into(),
|
||||
extra: extra.clone(),
|
||||
});
|
||||
}
|
||||
|
@ -205,7 +206,7 @@ impl ResolutionGraph {
|
|||
Dist::from_editable(package_name.clone(), editable.clone())?;
|
||||
|
||||
diagnostics.push(Diagnostic::MissingExtra {
|
||||
dist: pinned_package,
|
||||
dist: pinned_package.into(),
|
||||
extra: extra.clone(),
|
||||
});
|
||||
}
|
||||
|
@ -230,7 +231,7 @@ impl ResolutionGraph {
|
|||
let pinned_package = Dist::from_url(package_name.clone(), url)?;
|
||||
|
||||
diagnostics.push(Diagnostic::MissingExtra {
|
||||
dist: pinned_package,
|
||||
dist: pinned_package.into(),
|
||||
extra: extra.clone(),
|
||||
});
|
||||
}
|
||||
|
@ -310,8 +311,8 @@ impl ResolutionGraph {
|
|||
.any(|index| self.petgraph[index].name() == name)
|
||||
}
|
||||
|
||||
/// Iterate over the [`Dist`] entities in this resolution.
|
||||
pub fn into_distributions(self) -> impl Iterator<Item = Dist> {
|
||||
/// Iterate over the [`ResolvedDist`] entities in this resolution.
|
||||
pub fn into_distributions(self) -> impl Iterator<Item = ResolvedDist> {
|
||||
self.petgraph
|
||||
.into_nodes_edges()
|
||||
.0
|
||||
|
@ -325,7 +326,9 @@ impl ResolutionGraph {
|
|||
}
|
||||
|
||||
/// Return the underlying graph.
|
||||
pub fn petgraph(&self) -> &petgraph::graph::Graph<Dist, Range<Version>, petgraph::Directed> {
|
||||
pub fn petgraph(
|
||||
&self,
|
||||
) -> &petgraph::graph::Graph<ResolvedDist, Range<Version>, petgraph::Directed> {
|
||||
&self.petgraph
|
||||
}
|
||||
|
||||
|
@ -523,7 +526,7 @@ enum Node<'a> {
|
|||
/// A node linked to an editable distribution.
|
||||
Editable(&'a PackageName, &'a LocalEditable),
|
||||
/// A node linked to a non-editable distribution.
|
||||
Distribution(&'a PackageName, &'a Dist, &'a [ExtraName]),
|
||||
Distribution(&'a PackageName, &'a ResolvedDist, &'a [ExtraName]),
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord)]
|
||||
|
@ -718,7 +721,7 @@ pub enum Diagnostic {
|
|||
MissingExtra {
|
||||
/// The distribution that was requested with an non-existent extra. For example,
|
||||
/// `black==23.10.0`.
|
||||
dist: Dist,
|
||||
dist: ResolvedDist,
|
||||
/// The extra that was requested. For example, `colorama` in `black[colorama]`.
|
||||
extra: ExtraName,
|
||||
},
|
||||
|
|
|
@ -19,7 +19,7 @@ use url::Url;
|
|||
|
||||
use distribution_types::{
|
||||
BuiltDist, Dist, DistributionMetadata, IncompatibleDist, IncompatibleSource, IncompatibleWheel,
|
||||
Name, RemoteSource, SourceDist, VersionOrUrl,
|
||||
InstalledDist, Name, RemoteSource, ResolvedDist, ResolvedDistRef, SourceDist, VersionOrUrl,
|
||||
};
|
||||
pub(crate) use locals::Locals;
|
||||
use pep440_rs::{Version, MIN_VERSION};
|
||||
|
@ -31,7 +31,7 @@ use uv_client::{FlatIndex, RegistryClient};
|
|||
use uv_distribution::DistributionDatabase;
|
||||
use uv_interpreter::Interpreter;
|
||||
use uv_normalize::PackageName;
|
||||
use uv_types::BuildContext;
|
||||
use uv_types::{BuildContext, InstalledPackagesProvider};
|
||||
|
||||
use crate::candidate_selector::{CandidateDist, CandidateSelector};
|
||||
use crate::constraints::Constraints;
|
||||
|
@ -55,7 +55,7 @@ pub use crate::resolver::provider::{
|
|||
use crate::resolver::reporter::Facade;
|
||||
pub use crate::resolver::reporter::{BuildId, Reporter};
|
||||
use crate::yanks::AllowedYanks;
|
||||
use crate::{DependencyMode, Options};
|
||||
use crate::{DependencyMode, Exclusions, Options, VersionMap};
|
||||
|
||||
mod index;
|
||||
mod locals;
|
||||
|
@ -89,12 +89,17 @@ enum ResolverVersion {
|
|||
Unavailable(Version, UnavailableVersion),
|
||||
}
|
||||
|
||||
pub struct Resolver<'a, Provider: ResolverProvider> {
|
||||
pub struct Resolver<
|
||||
'a,
|
||||
Provider: ResolverProvider,
|
||||
InstalledPackages: InstalledPackagesProvider + Send + Sync,
|
||||
> {
|
||||
project: Option<PackageName>,
|
||||
requirements: Vec<Requirement>,
|
||||
constraints: Constraints,
|
||||
overrides: Overrides,
|
||||
preferences: Preferences,
|
||||
exclusions: Exclusions,
|
||||
editables: Editables,
|
||||
urls: Urls,
|
||||
locals: Locals,
|
||||
|
@ -103,6 +108,7 @@ pub struct Resolver<'a, Provider: ResolverProvider> {
|
|||
python_requirement: PythonRequirement,
|
||||
selector: CandidateSelector,
|
||||
index: &'a InMemoryIndex,
|
||||
installed_packages: &'a InstalledPackages,
|
||||
/// Incompatibilities for packages that are entirely unavailable
|
||||
unavailable_packages: DashMap<PackageName, UnavailablePackage>,
|
||||
/// The set of all registry-based packages visited during resolution.
|
||||
|
@ -111,7 +117,12 @@ pub struct Resolver<'a, Provider: ResolverProvider> {
|
|||
provider: Provider,
|
||||
}
|
||||
|
||||
impl<'a, Context: BuildContext + Send + Sync> Resolver<'a, DefaultResolverProvider<'a, Context>> {
|
||||
impl<
|
||||
'a,
|
||||
Context: BuildContext + Send + Sync,
|
||||
InstalledPackages: InstalledPackagesProvider + Send + Sync,
|
||||
> Resolver<'a, DefaultResolverProvider<'a, Context>, InstalledPackages>
|
||||
{
|
||||
/// Initialize a new resolver using the default backend doing real requests.
|
||||
///
|
||||
/// Reads the flat index entries.
|
||||
|
@ -126,6 +137,7 @@ impl<'a, Context: BuildContext + Send + Sync> Resolver<'a, DefaultResolverProvid
|
|||
flat_index: &'a FlatIndex,
|
||||
index: &'a InMemoryIndex,
|
||||
build_context: &'a Context,
|
||||
installed_packages: &'a InstalledPackages,
|
||||
) -> Result<Self, ResolveError> {
|
||||
let provider = DefaultResolverProvider::new(
|
||||
client,
|
||||
|
@ -145,11 +157,17 @@ impl<'a, Context: BuildContext + Send + Sync> Resolver<'a, DefaultResolverProvid
|
|||
PythonRequirement::new(interpreter, markers),
|
||||
index,
|
||||
provider,
|
||||
installed_packages,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, Provider: ResolverProvider> Resolver<'a, Provider> {
|
||||
impl<
|
||||
'a,
|
||||
Provider: ResolverProvider,
|
||||
InstalledPackages: InstalledPackagesProvider + Send + Sync,
|
||||
> Resolver<'a, Provider, InstalledPackages>
|
||||
{
|
||||
/// Initialize a new resolver using a user provided backend.
|
||||
pub fn new_custom_io(
|
||||
manifest: Manifest,
|
||||
|
@ -158,6 +176,7 @@ impl<'a, Provider: ResolverProvider> Resolver<'a, Provider> {
|
|||
python_requirement: PythonRequirement,
|
||||
index: &'a InMemoryIndex,
|
||||
provider: Provider,
|
||||
installed_packages: &'a InstalledPackages,
|
||||
) -> Result<Self, ResolveError> {
|
||||
Ok(Self {
|
||||
index,
|
||||
|
@ -171,12 +190,14 @@ impl<'a, Provider: ResolverProvider> Resolver<'a, Provider> {
|
|||
requirements: manifest.requirements,
|
||||
constraints: Constraints::from_requirements(manifest.constraints),
|
||||
overrides: Overrides::from_requirements(manifest.overrides),
|
||||
preferences: Preferences::from_requirements(manifest.preferences, markers),
|
||||
preferences: Preferences::from_iter(manifest.preferences, markers),
|
||||
exclusions: manifest.exclusions,
|
||||
editables: Editables::from_requirements(manifest.editables),
|
||||
markers,
|
||||
python_requirement,
|
||||
reporter: None,
|
||||
provider,
|
||||
installed_packages,
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -613,26 +634,23 @@ impl<'a, Provider: ResolverProvider> Resolver<'a, Provider> {
|
|||
.ok_or(ResolveError::Unregistered)?;
|
||||
self.visited.insert(package_name.clone());
|
||||
|
||||
let empty_version_map = VersionMap::default();
|
||||
let version_map = match *versions_response {
|
||||
VersionsResponse::Found(ref version_map) => version_map,
|
||||
// Short-circuit if we do not find any versions for the package
|
||||
VersionsResponse::NoIndex => {
|
||||
self.unavailable_packages
|
||||
.insert(package_name.clone(), UnavailablePackage::NoIndex);
|
||||
|
||||
return Ok(None);
|
||||
&empty_version_map
|
||||
}
|
||||
VersionsResponse::Offline => {
|
||||
self.unavailable_packages
|
||||
.insert(package_name.clone(), UnavailablePackage::Offline);
|
||||
|
||||
return Ok(None);
|
||||
&empty_version_map
|
||||
}
|
||||
VersionsResponse::NotFound => {
|
||||
self.unavailable_packages
|
||||
.insert(package_name.clone(), UnavailablePackage::NotFound);
|
||||
|
||||
return Ok(None);
|
||||
&empty_version_map
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -645,10 +663,14 @@ impl<'a, Provider: ResolverProvider> Resolver<'a, Provider> {
|
|||
}
|
||||
|
||||
// Find a version.
|
||||
let Some(candidate) =
|
||||
self.selector
|
||||
.select(package_name, range, version_map, &self.preferences)
|
||||
else {
|
||||
let Some(candidate) = self.selector.select(
|
||||
package_name,
|
||||
range,
|
||||
version_map,
|
||||
&self.preferences,
|
||||
self.installed_packages,
|
||||
&self.exclusions,
|
||||
) else {
|
||||
// Short circuit: we couldn't find _any_ versions for a package.
|
||||
return Ok(None);
|
||||
};
|
||||
|
@ -664,24 +686,26 @@ impl<'a, Provider: ResolverProvider> Resolver<'a, Provider> {
|
|||
}
|
||||
};
|
||||
|
||||
let filename = match dist.for_installation() {
|
||||
ResolvedDistRef::Installable(dist) => {
|
||||
dist.filename().unwrap_or(Cow::Borrowed("unknown filename"))
|
||||
}
|
||||
ResolvedDistRef::Installed(_) => Cow::Borrowed("installed"),
|
||||
};
|
||||
if let Some(extra) = extra {
|
||||
debug!(
|
||||
"Selecting: {}[{}]=={} ({})",
|
||||
candidate.name(),
|
||||
extra,
|
||||
candidate.version(),
|
||||
dist.for_resolution()
|
||||
.filename()
|
||||
.unwrap_or(Cow::Borrowed("unknown filename"))
|
||||
filename,
|
||||
);
|
||||
} else {
|
||||
debug!(
|
||||
"Selecting: {}=={} ({})",
|
||||
candidate.name(),
|
||||
candidate.version(),
|
||||
dist.for_resolution()
|
||||
.filename()
|
||||
.unwrap_or(Cow::Borrowed("unknown filename"))
|
||||
filename,
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -692,11 +716,14 @@ impl<'a, Provider: ResolverProvider> Resolver<'a, Provider> {
|
|||
let version = candidate.version().clone();
|
||||
|
||||
// Emit a request to fetch the metadata for this version.
|
||||
if self.index.distributions.register(candidate.package_id()) {
|
||||
let dist = dist.for_resolution().clone();
|
||||
request_sink.send(Request::Dist(dist)).await?;
|
||||
}
|
||||
|
||||
if self.index.distributions.register(candidate.package_id()) {
|
||||
let request = match dist.for_resolution() {
|
||||
ResolvedDistRef::Installable(dist) => Request::Dist(dist.clone()),
|
||||
ResolvedDistRef::Installed(dist) => Request::Installed(dist.clone()),
|
||||
};
|
||||
request_sink.send(request).await?;
|
||||
}
|
||||
Ok(Some(ResolverVersion::Available(version)))
|
||||
}
|
||||
}
|
||||
|
@ -827,8 +854,13 @@ impl<'a, Provider: ResolverProvider> Resolver<'a, Provider> {
|
|||
};
|
||||
let package_id = dist.package_id();
|
||||
|
||||
// If the package does not exist in the registry, we cannot fetch its dependencies
|
||||
if self.unavailable_packages.get(package_name).is_some() {
|
||||
// If the package does not exist in the registry or locally, we cannot fetch its dependencies
|
||||
if self.unavailable_packages.get(package_name).is_some()
|
||||
&& self
|
||||
.installed_packages
|
||||
.get_packages(package_name)
|
||||
.is_empty()
|
||||
{
|
||||
debug_assert!(
|
||||
false,
|
||||
"Dependencies were requested for a package that is not available"
|
||||
|
@ -894,6 +926,10 @@ impl<'a, Provider: ResolverProvider> Resolver<'a, Provider> {
|
|||
trace!("Received package metadata for: {package_name}");
|
||||
self.index.packages.done(package_name, version_map);
|
||||
}
|
||||
Some(Response::Installed { dist, metadata }) => {
|
||||
trace!("Received installed distribution metadata for: {dist}");
|
||||
self.index.distributions.done(dist.package_id(), metadata);
|
||||
}
|
||||
Some(Response::Dist {
|
||||
dist: Dist::Built(dist),
|
||||
metadata,
|
||||
|
@ -974,6 +1010,13 @@ impl<'a, Provider: ResolverProvider> Resolver<'a, Provider> {
|
|||
}))
|
||||
}
|
||||
|
||||
Request::Installed(dist) => {
|
||||
let metadata = dist
|
||||
.metadata()
|
||||
.map_err(|err| ResolveError::ReadInstalled(Box::new(dist.clone()), err))?;
|
||||
Ok(Some(Response::Installed { dist, metadata }))
|
||||
}
|
||||
|
||||
// Pre-fetch the package and distribution metadata.
|
||||
Request::Prefetch(package_name, range) => {
|
||||
// Wait for the package metadata to become available.
|
||||
|
@ -1009,10 +1052,14 @@ impl<'a, Provider: ResolverProvider> Resolver<'a, Provider> {
|
|||
|
||||
// Try to find a compatible version. If there aren't any compatible versions,
|
||||
// short-circuit.
|
||||
let Some(candidate) =
|
||||
self.selector
|
||||
.select(&package_name, &range, version_map, &self.preferences)
|
||||
else {
|
||||
let Some(candidate) = self.selector.select(
|
||||
&package_name,
|
||||
&range,
|
||||
version_map,
|
||||
&self.preferences,
|
||||
self.installed_packages,
|
||||
&self.exclusions,
|
||||
) else {
|
||||
return Ok(None);
|
||||
};
|
||||
|
||||
|
@ -1023,33 +1070,44 @@ impl<'a, Provider: ResolverProvider> Resolver<'a, Provider> {
|
|||
|
||||
// Emit a request to fetch the metadata for this version.
|
||||
if self.index.distributions.register(candidate.package_id()) {
|
||||
let dist = dist.for_resolution().clone();
|
||||
let dist = dist.for_resolution().to_owned();
|
||||
|
||||
let (metadata, precise) = self
|
||||
.provider
|
||||
.get_or_build_wheel_metadata(&dist)
|
||||
.boxed()
|
||||
.await
|
||||
.map_err(|err| match dist.clone() {
|
||||
Dist::Built(BuiltDist::Path(built_dist)) => {
|
||||
ResolveError::Read(Box::new(built_dist), err)
|
||||
let response = match dist {
|
||||
ResolvedDist::Installable(dist) => {
|
||||
let (metadata, precise) = self
|
||||
.provider
|
||||
.get_or_build_wheel_metadata(&dist)
|
||||
.boxed()
|
||||
.await
|
||||
.map_err(|err| match dist.clone() {
|
||||
Dist::Built(BuiltDist::Path(built_dist)) => {
|
||||
ResolveError::Read(Box::new(built_dist), err)
|
||||
}
|
||||
Dist::Source(SourceDist::Path(source_dist)) => {
|
||||
ResolveError::Build(Box::new(source_dist), err)
|
||||
}
|
||||
Dist::Built(built_dist) => {
|
||||
ResolveError::Fetch(Box::new(built_dist), err)
|
||||
}
|
||||
Dist::Source(source_dist) => {
|
||||
ResolveError::FetchAndBuild(Box::new(source_dist), err)
|
||||
}
|
||||
})?;
|
||||
Response::Dist {
|
||||
dist,
|
||||
metadata,
|
||||
precise,
|
||||
}
|
||||
Dist::Source(SourceDist::Path(source_dist)) => {
|
||||
ResolveError::Build(Box::new(source_dist), err)
|
||||
}
|
||||
Dist::Built(built_dist) => {
|
||||
ResolveError::Fetch(Box::new(built_dist), err)
|
||||
}
|
||||
Dist::Source(source_dist) => {
|
||||
ResolveError::FetchAndBuild(Box::new(source_dist), err)
|
||||
}
|
||||
})?;
|
||||
}
|
||||
ResolvedDist::Installed(dist) => {
|
||||
let metadata = dist.metadata().map_err(|err| {
|
||||
ResolveError::ReadInstalled(Box::new(dist.clone()), err)
|
||||
})?;
|
||||
Response::Installed { dist, metadata }
|
||||
}
|
||||
};
|
||||
|
||||
Ok(Some(Response::Dist {
|
||||
dist,
|
||||
metadata,
|
||||
precise,
|
||||
}))
|
||||
Ok(Some(response))
|
||||
} else {
|
||||
Ok(None)
|
||||
}
|
||||
|
@ -1087,6 +1145,8 @@ pub(crate) enum Request {
|
|||
Package(PackageName),
|
||||
/// A request to fetch the metadata for a built or source distribution.
|
||||
Dist(Dist),
|
||||
/// A request to fetch the metadata from an already-installed distribution.
|
||||
Installed(InstalledDist),
|
||||
/// A request to pre-fetch the metadata for a package and the best-guess distribution.
|
||||
Prefetch(PackageName, Range<Version>),
|
||||
}
|
||||
|
@ -1100,6 +1160,9 @@ impl Display for Request {
|
|||
Self::Dist(dist) => {
|
||||
write!(f, "Metadata {dist}")
|
||||
}
|
||||
Self::Installed(dist) => {
|
||||
write!(f, "Installed metadata {dist}")
|
||||
}
|
||||
Self::Prefetch(package_name, range) => {
|
||||
write!(f, "Prefetch {package_name} {range}")
|
||||
}
|
||||
|
@ -1118,6 +1181,11 @@ enum Response {
|
|||
metadata: Metadata23,
|
||||
precise: Option<Url>,
|
||||
},
|
||||
/// The returned metadata for an already-installed distribution.
|
||||
Installed {
|
||||
dist: InstalledDist,
|
||||
metadata: Metadata23,
|
||||
},
|
||||
}
|
||||
|
||||
/// An enum used by [`DependencyProvider`] that holds information about package dependencies.
|
||||
|
|
|
@ -199,6 +199,15 @@ impl VersionMap {
|
|||
}
|
||||
}
|
||||
|
||||
impl Default for VersionMap {
|
||||
/// Create an empty version map.
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
inner: VersionMapInner::Eager(BTreeMap::default()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<FlatDistributions> for VersionMap {
|
||||
fn from(flat_index: FlatDistributions) -> Self {
|
||||
Self {
|
||||
|
|
|
@ -17,11 +17,12 @@ use uv_cache::Cache;
|
|||
use uv_client::{FlatIndex, RegistryClientBuilder};
|
||||
use uv_interpreter::{find_default_python, Interpreter, PythonEnvironment};
|
||||
use uv_resolver::{
|
||||
DisplayResolutionGraph, InMemoryIndex, Manifest, Options, OptionsBuilder, PreReleaseMode,
|
||||
Preference, ResolutionGraph, ResolutionMode, Resolver,
|
||||
DisplayResolutionGraph, Exclusions, InMemoryIndex, Manifest, Options, OptionsBuilder,
|
||||
PreReleaseMode, Preference, ResolutionGraph, ResolutionMode, Resolver,
|
||||
};
|
||||
use uv_types::{
|
||||
BuildContext, BuildIsolation, BuildKind, NoBinary, NoBuild, SetupPyStrategy, SourceBuildTrait,
|
||||
BuildContext, BuildIsolation, BuildKind, EmptyInstalledPackages, NoBinary, NoBuild,
|
||||
SetupPyStrategy, SourceBuildTrait,
|
||||
};
|
||||
|
||||
// Exclude any packages uploaded after this date.
|
||||
|
@ -124,6 +125,7 @@ async fn resolve(
|
|||
find_default_python(&Cache::temp().unwrap()).expect("Expected a python to be installed");
|
||||
let interpreter = Interpreter::artificial(real_interpreter.platform().clone(), markers.clone());
|
||||
let build_context = DummyContext::new(Cache::temp()?, interpreter.clone());
|
||||
let installed_packages = EmptyInstalledPackages;
|
||||
let resolver = Resolver::new(
|
||||
manifest,
|
||||
options,
|
||||
|
@ -134,6 +136,7 @@ async fn resolve(
|
|||
&flat_index,
|
||||
&index,
|
||||
&build_context,
|
||||
&installed_packages,
|
||||
)?;
|
||||
Ok(resolver.resolve().await?)
|
||||
}
|
||||
|
@ -271,6 +274,7 @@ async fn black_mypy_extensions() -> Result<()> {
|
|||
vec![],
|
||||
None,
|
||||
vec![],
|
||||
Exclusions::default(),
|
||||
vec![],
|
||||
);
|
||||
let options = OptionsBuilder::new()
|
||||
|
@ -307,6 +311,7 @@ async fn black_mypy_extensions_extra() -> Result<()> {
|
|||
vec![],
|
||||
None,
|
||||
vec![],
|
||||
Exclusions::default(),
|
||||
vec![],
|
||||
);
|
||||
let options = OptionsBuilder::new()
|
||||
|
@ -343,6 +348,7 @@ async fn black_flake8() -> Result<()> {
|
|||
vec![],
|
||||
None,
|
||||
vec![],
|
||||
Exclusions::default(),
|
||||
vec![],
|
||||
);
|
||||
let options = OptionsBuilder::new()
|
||||
|
@ -433,8 +439,10 @@ async fn black_respect_preference() -> Result<()> {
|
|||
)?)],
|
||||
None,
|
||||
vec![],
|
||||
Exclusions::default(),
|
||||
vec![],
|
||||
);
|
||||
|
||||
let options = OptionsBuilder::new()
|
||||
.exclude_newer(Some(*EXCLUDE_NEWER))
|
||||
.build();
|
||||
|
@ -469,6 +477,7 @@ async fn black_ignore_preference() -> Result<()> {
|
|||
)?)],
|
||||
None,
|
||||
vec![],
|
||||
Exclusions::default(),
|
||||
vec![],
|
||||
);
|
||||
let options = OptionsBuilder::new()
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
use pep508_rs::PackageName;
|
||||
|
||||
use rustc_hash::FxHashSet;
|
||||
|
||||
/// Whether to reinstall packages.
|
||||
|
@ -38,7 +39,7 @@ impl Reinstall {
|
|||
}
|
||||
|
||||
/// Whether to allow package upgrades.
|
||||
#[derive(Debug)]
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum Upgrade {
|
||||
/// Prefer pinned versions from the existing lockfile, if possible.
|
||||
None,
|
||||
|
|
|
@ -3,9 +3,9 @@ use std::path::{Path, PathBuf};
|
|||
|
||||
use anyhow::Result;
|
||||
|
||||
use distribution_types::{IndexLocations, Resolution, SourceDist};
|
||||
use distribution_types::{IndexLocations, InstalledDist, Resolution, SourceDist};
|
||||
|
||||
use pep508_rs::Requirement;
|
||||
use pep508_rs::{PackageName, Requirement};
|
||||
use uv_cache::Cache;
|
||||
use uv_interpreter::{Interpreter, PythonEnvironment};
|
||||
|
||||
|
@ -127,3 +127,22 @@ pub trait SourceBuildTrait {
|
|||
fn wheel<'a>(&'a self, wheel_dir: &'a Path)
|
||||
-> impl Future<Output = Result<String>> + Send + 'a;
|
||||
}
|
||||
|
||||
/// A wrapper for [`uv_installer::SitePackages`]
|
||||
pub trait InstalledPackagesProvider {
|
||||
fn iter(&self) -> impl Iterator<Item = &InstalledDist>;
|
||||
fn get_packages(&self, name: &PackageName) -> Vec<&InstalledDist>;
|
||||
}
|
||||
|
||||
/// An [`InstalledPackagesProvider`] with no packages in it.
|
||||
pub struct EmptyInstalledPackages;
|
||||
|
||||
impl InstalledPackagesProvider for EmptyInstalledPackages {
|
||||
fn get_packages(&self, _name: &pep508_rs::PackageName) -> Vec<&InstalledDist> {
|
||||
Vec::new()
|
||||
}
|
||||
|
||||
fn iter(&self) -> impl Iterator<Item = &InstalledDist> {
|
||||
std::iter::empty()
|
||||
}
|
||||
}
|
||||
|
|
|
@ -24,7 +24,7 @@ use uv_client::{
|
|||
};
|
||||
use uv_dispatch::BuildDispatch;
|
||||
use uv_fs::Simplified;
|
||||
use uv_installer::{Downloader, NoBinary};
|
||||
use uv_installer::Downloader;
|
||||
use uv_interpreter::{find_best_python, PythonEnvironment, PythonVersion};
|
||||
use uv_normalize::{ExtraName, PackageName};
|
||||
use uv_requirements::{
|
||||
|
@ -32,10 +32,13 @@ use uv_requirements::{
|
|||
RequirementsSource, RequirementsSpecification, SourceTreeResolver,
|
||||
};
|
||||
use uv_resolver::{
|
||||
AnnotationStyle, DependencyMode, DisplayResolutionGraph, InMemoryIndex, Manifest,
|
||||
AnnotationStyle, DependencyMode, DisplayResolutionGraph, Exclusions, InMemoryIndex, Manifest,
|
||||
OptionsBuilder, PreReleaseMode, PythonRequirement, ResolutionMode, Resolver,
|
||||
};
|
||||
use uv_types::{BuildIsolation, ConfigSettings, InFlight, NoBuild, SetupPyStrategy, Upgrade};
|
||||
use uv_types::{
|
||||
BuildIsolation, ConfigSettings, EmptyInstalledPackages, InFlight, NoBinary, NoBuild,
|
||||
SetupPyStrategy, Upgrade,
|
||||
};
|
||||
use uv_warnings::warn_user;
|
||||
|
||||
use crate::commands::reporters::{DownloadReporter, ResolverReporter};
|
||||
|
@ -344,6 +347,8 @@ pub(crate) async fn pip_compile(
|
|||
preferences,
|
||||
project,
|
||||
editable_metadata,
|
||||
// Do not consider any installed packages during compilation
|
||||
Exclusions::All,
|
||||
lookaheads,
|
||||
);
|
||||
|
||||
|
@ -365,6 +370,7 @@ pub(crate) async fn pip_compile(
|
|||
&flat_index,
|
||||
&top_level_index,
|
||||
&build_dispatch,
|
||||
&EmptyInstalledPackages,
|
||||
)?
|
||||
.with_reporter(ResolverReporter::from(printer));
|
||||
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
use std::collections::HashSet;
|
||||
use std::fmt::Write;
|
||||
use std::path::Path;
|
||||
|
||||
|
@ -27,9 +26,7 @@ use uv_client::{
|
|||
};
|
||||
use uv_dispatch::BuildDispatch;
|
||||
use uv_fs::Simplified;
|
||||
use uv_installer::{
|
||||
BuiltEditable, Downloader, NoBinary, Plan, Planner, Reinstall, ResolvedEditable, SitePackages,
|
||||
};
|
||||
use uv_installer::{BuiltEditable, Downloader, Plan, Planner, ResolvedEditable, SitePackages};
|
||||
use uv_interpreter::{Interpreter, PythonEnvironment};
|
||||
use uv_normalize::PackageName;
|
||||
use uv_requirements::{
|
||||
|
@ -37,10 +34,13 @@ use uv_requirements::{
|
|||
RequirementsSpecification, SourceTreeResolver,
|
||||
};
|
||||
use uv_resolver::{
|
||||
DependencyMode, InMemoryIndex, Manifest, Options, OptionsBuilder, PreReleaseMode, Preference,
|
||||
ResolutionGraph, ResolutionMode, Resolver,
|
||||
DependencyMode, Exclusions, InMemoryIndex, Manifest, Options, OptionsBuilder, PreReleaseMode,
|
||||
Preference, ResolutionGraph, ResolutionMode, Resolver,
|
||||
};
|
||||
use uv_types::{
|
||||
BuildIsolation, ConfigSettings, InFlight, NoBinary, NoBuild, Reinstall, SetupPyStrategy,
|
||||
Upgrade,
|
||||
};
|
||||
use uv_types::{BuildIsolation, ConfigSettings, InFlight, NoBuild, SetupPyStrategy, Upgrade};
|
||||
use uv_warnings::warn_user;
|
||||
|
||||
use crate::commands::reporters::{DownloadReporter, InstallReporter, ResolverReporter};
|
||||
|
@ -62,7 +62,7 @@ pub(crate) async fn pip_install(
|
|||
upgrade: Upgrade,
|
||||
index_locations: IndexLocations,
|
||||
keyring_provider: KeyringProvider,
|
||||
reinstall: &Reinstall,
|
||||
reinstall: Reinstall,
|
||||
link_mode: LinkMode,
|
||||
compile: bool,
|
||||
setup_py: SetupPyStrategy,
|
||||
|
@ -298,7 +298,7 @@ pub(crate) async fn pip_install(
|
|||
project,
|
||||
&editables,
|
||||
&site_packages,
|
||||
reinstall,
|
||||
&reinstall,
|
||||
&upgrade,
|
||||
&interpreter,
|
||||
tags,
|
||||
|
@ -352,7 +352,7 @@ pub(crate) async fn pip_install(
|
|||
&resolution,
|
||||
editables,
|
||||
site_packages,
|
||||
reinstall,
|
||||
&reinstall,
|
||||
&no_binary,
|
||||
link_mode,
|
||||
compile,
|
||||
|
@ -513,27 +513,15 @@ async fn resolve(
|
|||
) -> Result<ResolutionGraph, Error> {
|
||||
let start = std::time::Instant::now();
|
||||
|
||||
let preferences = if upgrade.is_all() || reinstall.is_all() {
|
||||
vec![]
|
||||
} else {
|
||||
// Combine upgrade and reinstall lists
|
||||
let mut exclusions: HashSet<&PackageName> = if let Reinstall::Packages(packages) = reinstall
|
||||
{
|
||||
HashSet::from_iter(packages)
|
||||
} else {
|
||||
HashSet::default()
|
||||
};
|
||||
if let Upgrade::Packages(packages) = upgrade {
|
||||
exclusions.extend(packages);
|
||||
};
|
||||
// TODO(zanieb): Consider consuming these instead of cloning
|
||||
let exclusions = Exclusions::new(reinstall.clone(), upgrade.clone());
|
||||
|
||||
// Prefer current site packages, unless in the upgrade or reinstall lists
|
||||
site_packages
|
||||
.requirements()
|
||||
.map(Preference::from_requirement)
|
||||
.filter(|preference| !exclusions.contains(preference.name()))
|
||||
.collect()
|
||||
};
|
||||
// Prefer current site packages; filter out packages that are marked for reinstall or upgrade
|
||||
let preferences = site_packages
|
||||
.requirements()
|
||||
.filter(|requirement| !exclusions.contains(&requirement.name))
|
||||
.map(Preference::from_requirement)
|
||||
.collect();
|
||||
|
||||
// Map the editables to their metadata.
|
||||
let editables = editables
|
||||
|
@ -560,6 +548,7 @@ async fn resolve(
|
|||
preferences,
|
||||
project,
|
||||
editables,
|
||||
exclusions,
|
||||
lookaheads,
|
||||
);
|
||||
|
||||
|
@ -574,6 +563,7 @@ async fn resolve(
|
|||
flat_index,
|
||||
index,
|
||||
build_dispatch,
|
||||
site_packages,
|
||||
)?
|
||||
.with_reporter(ResolverReporter::from(printer));
|
||||
let resolution = resolver.resolve().await?;
|
||||
|
@ -643,14 +633,15 @@ async fn install(
|
|||
}
|
||||
|
||||
let Plan {
|
||||
local,
|
||||
cached,
|
||||
remote,
|
||||
reinstalls,
|
||||
installed: _,
|
||||
extraneous: _,
|
||||
} = plan;
|
||||
|
||||
// Nothing to do.
|
||||
if remote.is_empty() && local.is_empty() && reinstalls.is_empty() {
|
||||
if remote.is_empty() && cached.is_empty() && reinstalls.is_empty() {
|
||||
let s = if resolution.len() == 1 { "" } else { "s" };
|
||||
writeln!(
|
||||
printer.stderr(),
|
||||
|
@ -670,7 +661,7 @@ async fn install(
|
|||
.iter()
|
||||
.map(|dist| {
|
||||
resolution
|
||||
.get(&dist.name)
|
||||
.get_remote(&dist.name)
|
||||
.cloned()
|
||||
.expect("Resolution should contain all packages")
|
||||
})
|
||||
|
@ -733,7 +724,7 @@ async fn install(
|
|||
}
|
||||
|
||||
// Install the resolved distributions.
|
||||
let wheels = wheels.into_iter().chain(local).collect::<Vec<_>>();
|
||||
let wheels = wheels.into_iter().chain(cached).collect::<Vec<_>>();
|
||||
if !wheels.is_empty() {
|
||||
let start = std::time::Instant::now();
|
||||
uv_installer::Installer::new(venv)
|
||||
|
@ -806,14 +797,15 @@ async fn install(
|
|||
printer: Printer,
|
||||
) -> Result<(), Error> {
|
||||
let Plan {
|
||||
local,
|
||||
cached,
|
||||
remote,
|
||||
reinstalls,
|
||||
installed: _,
|
||||
extraneous: _,
|
||||
} = plan;
|
||||
|
||||
// Nothing to do.
|
||||
if remote.is_empty() && local.is_empty() && reinstalls.is_empty() {
|
||||
if remote.is_empty() && cached.is_empty() && reinstalls.is_empty() {
|
||||
let s = if resolution.len() == 1 { "" } else { "s" };
|
||||
writeln!(
|
||||
printer.stderr(),
|
||||
|
@ -834,7 +826,7 @@ async fn install(
|
|||
.iter()
|
||||
.map(|dist| {
|
||||
resolution
|
||||
.get(&dist.name)
|
||||
.get_remote(&dist.name)
|
||||
.cloned()
|
||||
.expect("Resolution should contain all packages")
|
||||
})
|
||||
|
@ -872,7 +864,7 @@ async fn install(
|
|||
}
|
||||
|
||||
// Install the resolved distributions.
|
||||
let installs = wheels.len() + local.len();
|
||||
let installs = wheels.len() + cached.len();
|
||||
|
||||
if installs > 0 {
|
||||
let s = if installs == 1 { "" } else { "s" };
|
||||
|
@ -895,7 +887,7 @@ async fn install(
|
|||
version: distribution.version_or_url().to_string(),
|
||||
kind: ChangeEventKind::Added,
|
||||
}))
|
||||
.chain(local.into_iter().map(|distribution| DryRunEvent {
|
||||
.chain(cached.into_iter().map(|distribution| DryRunEvent {
|
||||
name: distribution.name().clone(),
|
||||
version: distribution.installed_version().to_string(),
|
||||
kind: ChangeEventKind::Added,
|
||||
|
|
|
@ -5,7 +5,9 @@ use itertools::Itertools;
|
|||
use owo_colors::OwoColorize;
|
||||
use tracing::debug;
|
||||
|
||||
use distribution_types::{IndexLocations, InstalledMetadata, LocalDist, LocalEditable, Name};
|
||||
use distribution_types::{
|
||||
IndexLocations, InstalledMetadata, LocalDist, LocalEditable, Name, ResolvedDist,
|
||||
};
|
||||
use install_wheel_rs::linker::LinkMode;
|
||||
use platform_tags::Tags;
|
||||
use pypi_types::Yanked;
|
||||
|
@ -18,16 +20,17 @@ use uv_client::{
|
|||
};
|
||||
use uv_dispatch::BuildDispatch;
|
||||
use uv_fs::Simplified;
|
||||
use uv_installer::{
|
||||
is_dynamic, Downloader, NoBinary, Plan, Planner, Reinstall, ResolvedEditable, SitePackages,
|
||||
};
|
||||
use uv_installer::{is_dynamic, Downloader, Plan, Planner, ResolvedEditable, SitePackages};
|
||||
use uv_interpreter::{Interpreter, PythonEnvironment};
|
||||
use uv_requirements::{
|
||||
ExtrasSpecification, NamedRequirementsResolver, RequirementsSource, RequirementsSpecification,
|
||||
SourceTreeResolver,
|
||||
};
|
||||
use uv_resolver::{DependencyMode, InMemoryIndex, Manifest, OptionsBuilder, Resolver};
|
||||
use uv_types::{BuildIsolation, ConfigSettings, InFlight, NoBuild, SetupPyStrategy};
|
||||
use uv_types::{
|
||||
BuildIsolation, ConfigSettings, EmptyInstalledPackages, InFlight, NoBinary, NoBuild, Reinstall,
|
||||
SetupPyStrategy,
|
||||
};
|
||||
use uv_warnings::warn_user;
|
||||
|
||||
use crate::commands::reporters::{DownloadReporter, InstallReporter, ResolverReporter};
|
||||
|
@ -169,6 +172,9 @@ pub(crate) async fn pip_sync(
|
|||
let no_binary = no_binary.combine(specified_no_binary);
|
||||
let no_build = no_build.combine(specified_no_build);
|
||||
|
||||
// Determine the set of installed packages.
|
||||
let site_packages = SitePackages::from_executable(&venv)?;
|
||||
|
||||
// Prep the build context.
|
||||
let build_dispatch = BuildDispatch::new(
|
||||
&client,
|
||||
|
@ -206,9 +212,6 @@ pub(crate) async fn pip_sync(
|
|||
requirements
|
||||
};
|
||||
|
||||
// Determine the set of installed packages.
|
||||
let site_packages = SitePackages::from_executable(&venv)?;
|
||||
|
||||
// Resolve any editables.
|
||||
let resolved_editables = resolve_editables(
|
||||
editables,
|
||||
|
@ -223,12 +226,13 @@ pub(crate) async fn pip_sync(
|
|||
)
|
||||
.await?;
|
||||
|
||||
// Partition into those that should be linked from the cache (`local`), those that need to be
|
||||
// Partition into those that should be linked from the cache (`cached`), those that need to be
|
||||
// downloaded (`remote`), and those that should be removed (`extraneous`).
|
||||
let Plan {
|
||||
local,
|
||||
cached,
|
||||
remote,
|
||||
reinstalls,
|
||||
installed: _,
|
||||
extraneous,
|
||||
} = Planner::with_requirements(&requirements)
|
||||
.with_editable_requirements(&resolved_editables.editables)
|
||||
|
@ -244,7 +248,7 @@ pub(crate) async fn pip_sync(
|
|||
.context("Failed to determine installation plan")?;
|
||||
|
||||
// Nothing to do.
|
||||
if remote.is_empty() && local.is_empty() && reinstalls.is_empty() && extraneous.is_empty() {
|
||||
if remote.is_empty() && cached.is_empty() && reinstalls.is_empty() && extraneous.is_empty() {
|
||||
let s = if num_requirements == 1 { "" } else { "s" };
|
||||
writeln!(
|
||||
printer.stderr(),
|
||||
|
@ -290,6 +294,8 @@ pub(crate) async fn pip_sync(
|
|||
&flat_index,
|
||||
&index,
|
||||
&build_dispatch,
|
||||
// TODO(zanieb): We should consier support for installed packages in pip sync
|
||||
&EmptyInstalledPackages,
|
||||
)?
|
||||
.with_reporter(reporter);
|
||||
let resolution = resolver.resolve().await?;
|
||||
|
@ -306,7 +312,13 @@ pub(crate) async fn pip_sync(
|
|||
.dimmed()
|
||||
)?;
|
||||
|
||||
resolution.into_distributions().collect::<Vec<_>>()
|
||||
resolution
|
||||
.into_distributions()
|
||||
.filter_map(|dist| match dist {
|
||||
ResolvedDist::Installable(dist) => Some(dist),
|
||||
ResolvedDist::Installed(_) => None,
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
};
|
||||
|
||||
// Download, build, and unzip any missing distributions.
|
||||
|
@ -384,7 +396,7 @@ pub(crate) async fn pip_sync(
|
|||
}
|
||||
|
||||
// Install the resolved distributions.
|
||||
let wheels = wheels.into_iter().chain(local).collect::<Vec<_>>();
|
||||
let wheels = wheels.into_iter().chain(cached).collect::<Vec<_>>();
|
||||
if !wheels.is_empty() {
|
||||
let start = std::time::Instant::now();
|
||||
uv_installer::Installer::new(&venv)
|
||||
|
|
|
@ -11,17 +11,18 @@ use miette::{Diagnostic, IntoDiagnostic};
|
|||
use owo_colors::OwoColorize;
|
||||
use thiserror::Error;
|
||||
|
||||
use distribution_types::{DistributionMetadata, IndexLocations, Name};
|
||||
use distribution_types::{DistributionMetadata, IndexLocations, Name, ResolvedDist};
|
||||
use pep508_rs::Requirement;
|
||||
use uv_auth::{KeyringProvider, GLOBAL_AUTH_STORE};
|
||||
use uv_cache::Cache;
|
||||
use uv_client::{Connectivity, FlatIndex, FlatIndexClient, RegistryClientBuilder};
|
||||
use uv_dispatch::BuildDispatch;
|
||||
use uv_fs::Simplified;
|
||||
use uv_installer::NoBinary;
|
||||
use uv_interpreter::{find_default_python, find_requested_python, Error};
|
||||
use uv_resolver::{InMemoryIndex, OptionsBuilder};
|
||||
use uv_types::{BuildContext, BuildIsolation, ConfigSettings, InFlight, NoBuild, SetupPyStrategy};
|
||||
use uv_types::{
|
||||
BuildContext, BuildIsolation, ConfigSettings, InFlight, NoBinary, NoBuild, SetupPyStrategy,
|
||||
};
|
||||
|
||||
use crate::commands::ExitStatus;
|
||||
use crate::printer::Printer;
|
||||
|
@ -213,6 +214,10 @@ async fn venv_impl(
|
|||
|
||||
for distribution in resolution
|
||||
.distributions()
|
||||
.filter_map(|dist| match dist {
|
||||
ResolvedDist::Installable(dist) => Some(dist),
|
||||
ResolvedDist::Installed(_) => None,
|
||||
})
|
||||
.sorted_unstable_by(|a, b| a.name().cmp(b.name()).then(a.version().cmp(&b.version())))
|
||||
{
|
||||
writeln!(
|
||||
|
|
|
@ -16,13 +16,14 @@ use distribution_types::{FlatIndexLocation, IndexLocations, IndexUrl};
|
|||
use uv_auth::KeyringProvider;
|
||||
use uv_cache::{Cache, CacheArgs, Refresh};
|
||||
use uv_client::Connectivity;
|
||||
use uv_installer::{NoBinary, Reinstall};
|
||||
use uv_interpreter::PythonVersion;
|
||||
use uv_normalize::{ExtraName, PackageName};
|
||||
use uv_requirements::{ExtrasSpecification, RequirementsSource};
|
||||
use uv_resolver::{AnnotationStyle, DependencyMode, PreReleaseMode, ResolutionMode};
|
||||
use uv_types::NoBinary;
|
||||
use uv_types::{
|
||||
ConfigSettingEntry, ConfigSettings, NoBuild, PackageNameSpecifier, SetupPyStrategy, Upgrade,
|
||||
ConfigSettingEntry, ConfigSettings, NoBuild, PackageNameSpecifier, Reinstall, SetupPyStrategy,
|
||||
Upgrade,
|
||||
};
|
||||
|
||||
use crate::commands::{extra_name_with_clap_error, ExitStatus, ListFormat, VersionFormat};
|
||||
|
@ -1689,7 +1690,7 @@ async fn run() -> Result<ExitStatus> {
|
|||
upgrade,
|
||||
index_urls,
|
||||
args.keyring_provider,
|
||||
&reinstall,
|
||||
reinstall,
|
||||
args.link_mode,
|
||||
args.compile,
|
||||
setup_py,
|
||||
|
|
|
@ -6399,6 +6399,115 @@ fn emit_marker_expression_pypy() -> Result<()> {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
/// A local version of a package shadowing a remote package is installed.
|
||||
#[test]
|
||||
fn local_version_of_remote_package() -> Result<()> {
|
||||
let context = TestContext::new("3.12");
|
||||
let root_path = context.workspace_root.join("scripts/packages");
|
||||
|
||||
let requirements_in = context.temp_dir.child("requirements.in");
|
||||
requirements_in.write_str("anyio")?;
|
||||
|
||||
uv_snapshot!(context.filters(), context.compile()
|
||||
.arg(requirements_in.canonicalize()?), @r###"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
# This file was autogenerated by uv via the following command:
|
||||
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in
|
||||
anyio==4.3.0
|
||||
idna==3.6
|
||||
# via anyio
|
||||
sniffio==1.3.1
|
||||
# via anyio
|
||||
|
||||
----- stderr -----
|
||||
Resolved 3 packages in [TIME]
|
||||
"###);
|
||||
|
||||
// Actually install the local dependency
|
||||
// TODO(zanieb): We should have an `install` utility on the context instead of doing this
|
||||
let mut command = Command::new(get_bin());
|
||||
command
|
||||
.arg("pip")
|
||||
.arg("install")
|
||||
.arg(root_path.join("anyio_local"))
|
||||
.arg("--cache-dir")
|
||||
.arg(context.cache_dir.path())
|
||||
.arg("--exclude-newer")
|
||||
.arg(EXCLUDE_NEWER)
|
||||
.env("VIRTUAL_ENV", context.venv.as_os_str())
|
||||
.current_dir(context.temp_dir.path());
|
||||
if cfg!(all(windows, debug_assertions)) {
|
||||
command.env("UV_STACK_SIZE", (8 * 1024 * 1024).to_string());
|
||||
}
|
||||
uv_snapshot!(
|
||||
context.filters(),
|
||||
command, @r###"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
Resolved 1 package in [TIME]
|
||||
Downloaded 1 package in [TIME]
|
||||
Installed 1 package in [TIME]
|
||||
+ anyio==4.3.0+foo (from file://[WORKSPACE]/scripts/packages/anyio_local)
|
||||
"###
|
||||
);
|
||||
|
||||
// The local version should _not_ be included in the resolution
|
||||
uv_snapshot!(context.filters(), context.compile()
|
||||
.arg(requirements_in.canonicalize()?), @r###"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
# This file was autogenerated by uv via the following command:
|
||||
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in
|
||||
anyio==4.3.0
|
||||
idna==3.6
|
||||
# via anyio
|
||||
sniffio==1.3.1
|
||||
# via anyio
|
||||
|
||||
----- stderr -----
|
||||
Resolved 3 packages in [TIME]
|
||||
"###);
|
||||
|
||||
// Write a lock file with the local version
|
||||
let requirements_txt = context.temp_dir.child("requirements.txt");
|
||||
requirements_txt.write_str(&indoc::formatdoc! {r"
|
||||
anyio @ {workspace_root}/scripts/packages/anyio_local
|
||||
",
|
||||
workspace_root = context.workspace_root.simplified_display(),
|
||||
})?;
|
||||
|
||||
// The local version is _still_ excluded from the resolution
|
||||
// `uv pip compile` does not have access to an environment and cannot consider installed packages
|
||||
// We may want to allow the lock file to be preserved in this case in the future, but right now
|
||||
// we require the URL to always be in the input file.
|
||||
uv_snapshot!(context.filters(), context.compile()
|
||||
.arg(requirements_in.canonicalize()?)
|
||||
.arg("--output-file")
|
||||
.arg(requirements_txt.canonicalize()?), @r###"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
# This file was autogenerated by uv via the following command:
|
||||
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in --output-file requirements.txt
|
||||
anyio==4.3.0
|
||||
idna==3.6
|
||||
# via anyio
|
||||
sniffio==1.3.1
|
||||
# via anyio
|
||||
|
||||
----- stderr -----
|
||||
Resolved 3 packages in [TIME]
|
||||
"###);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn pendulum_no_tzdata_on_windows() -> Result<()> {
|
||||
let context = TestContext::new("3.12");
|
||||
|
|
|
@ -60,7 +60,7 @@ fn command_without_exclude_newer(context: &TestContext) -> Command {
|
|||
if cfg!(all(windows, debug_assertions)) {
|
||||
// TODO(konstin): Reduce stack usage in debug mode enough that the tests pass with the
|
||||
// default windows stack of 1MB
|
||||
command.env("UV_STACK_SIZE", (2 * 1024 * 1024).to_string());
|
||||
command.env("UV_STACK_SIZE", (4 * 1024 * 1024).to_string());
|
||||
}
|
||||
|
||||
command
|
||||
|
@ -91,7 +91,7 @@ fn missing_requirements_txt() {
|
|||
let context = TestContext::new("3.12");
|
||||
let requirements_txt = context.temp_dir.child("requirements.txt");
|
||||
|
||||
uv_snapshot!(command(&context)
|
||||
uv_snapshot!(context.filters(), command(&context)
|
||||
.arg("-r")
|
||||
.arg("requirements.txt")
|
||||
.arg("--strict"), @r###"
|
||||
|
@ -3031,3 +3031,613 @@ fn deptry_gitignore() {
|
|||
.assert_command("import deptry_reproducer.foo")
|
||||
.success();
|
||||
}
|
||||
|
||||
/// Reinstall an installed package with `--no-index`
|
||||
#[test]
|
||||
fn reinstall_no_index() {
|
||||
let context = TestContext::new("3.12");
|
||||
|
||||
// Install anyio
|
||||
uv_snapshot!(command(&context)
|
||||
.arg("anyio")
|
||||
.arg("--strict"), @r###"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
Resolved 3 packages in [TIME]
|
||||
Downloaded 3 packages in [TIME]
|
||||
Installed 3 packages in [TIME]
|
||||
+ anyio==4.3.0
|
||||
+ idna==3.6
|
||||
+ sniffio==1.3.1
|
||||
"###
|
||||
);
|
||||
|
||||
// Install anyio again
|
||||
uv_snapshot!(command(&context)
|
||||
.arg("anyio")
|
||||
.arg("--no-index")
|
||||
.arg("--strict"), @r###"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
Audited 1 package in [TIME]
|
||||
"###
|
||||
);
|
||||
|
||||
// Reinstall
|
||||
// We should not consider the already installed package as a source and
|
||||
// should attempt to pull from the index
|
||||
uv_snapshot!(command(&context)
|
||||
.arg("anyio")
|
||||
.arg("--no-index")
|
||||
.arg("--reinstall")
|
||||
.arg("--strict"), @r###"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
× No solution found when resolving dependencies:
|
||||
╰─▶ Because anyio was not found in the provided package locations and you
|
||||
require anyio, we can conclude that the requirements are unsatisfiable.
|
||||
|
||||
hint: Packages were unavailable because index lookups were disabled
|
||||
and no additional package locations were provided (try: `--find-links
|
||||
<uri>`)
|
||||
"###
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn already_installed_remote_dependencies() {
|
||||
let context = TestContext::new("3.12");
|
||||
|
||||
// Install anyio's dependencies.
|
||||
uv_snapshot!(command(&context)
|
||||
.arg("idna")
|
||||
.arg("sniffio")
|
||||
.arg("--strict"), @r###"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
Resolved 2 packages in [TIME]
|
||||
Downloaded 2 packages in [TIME]
|
||||
Installed 2 packages in [TIME]
|
||||
+ idna==3.6
|
||||
+ sniffio==1.3.1
|
||||
"###
|
||||
);
|
||||
|
||||
// Install anyio.
|
||||
uv_snapshot!(command(&context)
|
||||
.arg("anyio")
|
||||
.arg("--strict"), @r###"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
Resolved 3 packages in [TIME]
|
||||
Downloaded 1 package in [TIME]
|
||||
Installed 1 package in [TIME]
|
||||
+ anyio==4.3.0
|
||||
"###
|
||||
);
|
||||
}
|
||||
|
||||
/// Install an editable package that depends on a previously installed editable package.
|
||||
#[test]
|
||||
fn already_installed_dependent_editable() {
|
||||
let context = TestContext::new("3.12");
|
||||
let root_path = context
|
||||
.workspace_root
|
||||
.join("scripts/packages/dependent_editables");
|
||||
|
||||
// Install the first editable
|
||||
uv_snapshot!(context.filters(), command(&context)
|
||||
.arg(root_path.join("first_editable")), @r###"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
Resolved 1 package in [TIME]
|
||||
Downloaded 1 package in [TIME]
|
||||
Installed 1 package in [TIME]
|
||||
+ first-editable==0.0.1 (from file://[WORKSPACE]/scripts/packages/dependent_editables/first_editable)
|
||||
"###
|
||||
);
|
||||
|
||||
// Install the second editable which depends on the first editable
|
||||
// The already installed first editable package should satisfy the requirement
|
||||
uv_snapshot!(context.filters(), command(&context)
|
||||
.arg(root_path.join("second_editable"))
|
||||
// Disable the index to guard this test against dependency confusion attacks
|
||||
.arg("--no-index")
|
||||
.arg("--find-links")
|
||||
.arg("https://raw.githubusercontent.com/astral-sh/packse/0.3.12/vendor/links.html"), @r###"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
Resolved 2 packages in [TIME]
|
||||
Downloaded 1 package in [TIME]
|
||||
Installed 1 package in [TIME]
|
||||
+ second-editable==0.0.1 (from file://[WORKSPACE]/scripts/packages/dependent_editables/second_editable)
|
||||
"###
|
||||
);
|
||||
|
||||
// Request install of the first editable by full path again
|
||||
// We should audit the installed package
|
||||
uv_snapshot!(context.filters(), command(&context)
|
||||
.arg(root_path.join("first_editable")), @r###"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
Audited 1 package in [TIME]
|
||||
"###
|
||||
);
|
||||
|
||||
// Request reinstallation of the first package during install of the second
|
||||
// It's not available on an index and the user has not specified the path so we fail
|
||||
uv_snapshot!(context.filters(), command(&context)
|
||||
.arg(root_path.join("second_editable"))
|
||||
.arg("--reinstall-package")
|
||||
.arg("first-editable")
|
||||
// Disable the index to guard this test against dependency confusion attacks
|
||||
.arg("--no-index")
|
||||
.arg("--find-links")
|
||||
.arg("https://raw.githubusercontent.com/astral-sh/packse/0.3.12/vendor/links.html"), @r###"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
× No solution found when resolving dependencies:
|
||||
╰─▶ Because first-editable was not found in the provided package locations
|
||||
and second-editable==0.0.1 depends on first-editable, we can conclude
|
||||
that second-editable==0.0.1 cannot be used.
|
||||
And because only second-editable==0.0.1 is available and you
|
||||
require second-editable, we can conclude that the requirements are
|
||||
unsatisfiable.
|
||||
"###
|
||||
);
|
||||
|
||||
// Request reinstallation of the first package
|
||||
// We include it in the install command with a full path so we should succeed
|
||||
uv_snapshot!(context.filters(), command(&context)
|
||||
.arg(root_path.join("first_editable"))
|
||||
.arg("--reinstall-package")
|
||||
.arg("first-editable"), @r###"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
Resolved 1 package in [TIME]
|
||||
Installed 1 package in [TIME]
|
||||
- first-editable==0.0.1 (from file://[WORKSPACE]/scripts/packages/dependent_editables/first_editable)
|
||||
+ first-editable==0.0.1 (from file://[WORKSPACE]/scripts/packages/dependent_editables/first_editable)
|
||||
"###
|
||||
);
|
||||
}
|
||||
|
||||
/// Install an local package that depends on a previously installed local package.
|
||||
#[test]
|
||||
fn already_installed_local_path_dependent() {
|
||||
let context = TestContext::new("3.12");
|
||||
let root_path = context
|
||||
.workspace_root
|
||||
.join("scripts/packages/dependent_locals");
|
||||
|
||||
// Install the first local
|
||||
uv_snapshot!(context.filters(), command(&context)
|
||||
.arg(root_path.join("first_local")), @r###"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
Resolved 1 package in [TIME]
|
||||
Downloaded 1 package in [TIME]
|
||||
Installed 1 package in [TIME]
|
||||
+ first-local==0.1.0 (from file://[WORKSPACE]/scripts/packages/dependent_locals/first_local)
|
||||
"###
|
||||
);
|
||||
|
||||
// Install the second local which depends on the first local
|
||||
// The already installed first local package should satisfy the requirement
|
||||
uv_snapshot!(context.filters(), command(&context)
|
||||
.arg(root_path.join("second_local"))
|
||||
// Disable the index to guard this test against dependency confusion attacks
|
||||
.arg("--no-index")
|
||||
.arg("--find-links")
|
||||
.arg("https://raw.githubusercontent.com/astral-sh/packse/0.3.12/vendor/links.html"), @r###"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
Resolved 2 packages in [TIME]
|
||||
Downloaded 1 package in [TIME]
|
||||
Installed 1 package in [TIME]
|
||||
+ second-local==0.1.0 (from file://[WORKSPACE]/scripts/packages/dependent_locals/second_local)
|
||||
"###
|
||||
);
|
||||
|
||||
// Request install of the first local by full path again
|
||||
// We should audit the installed package
|
||||
uv_snapshot!(context.filters(), command(&context)
|
||||
.arg(root_path.join("first_local")), @r###"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
Audited 1 package in [TIME]
|
||||
"###
|
||||
);
|
||||
|
||||
// Request reinstallation of the first package during install of the second
|
||||
// It's not available on an index and the user has not specified the path so we fail
|
||||
uv_snapshot!(context.filters(), command(&context)
|
||||
.arg(root_path.join("second_local"))
|
||||
.arg("--reinstall-package")
|
||||
.arg("first-local")
|
||||
// Disable the index to guard this test against dependency confusion attacks
|
||||
.arg("--no-index")
|
||||
.arg("--find-links")
|
||||
.arg("https://raw.githubusercontent.com/astral-sh/packse/0.3.12/vendor/links.html"), @r###"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
× No solution found when resolving dependencies:
|
||||
╰─▶ Because first-local was not found in the provided package locations
|
||||
and second-local==0.1.0 depends on first-local, we can conclude that
|
||||
second-local==0.1.0 cannot be used.
|
||||
And because only second-local==0.1.0 is available and you require
|
||||
second-local, we can conclude that the requirements are unsatisfiable.
|
||||
"###
|
||||
);
|
||||
|
||||
// Request reinstallation of the first package
|
||||
// We include it in the install command with a full path so we succeed
|
||||
uv_snapshot!(context.filters(), command(&context)
|
||||
.arg(root_path.join("second_local"))
|
||||
.arg(root_path.join("first_local"))
|
||||
.arg("--reinstall-package")
|
||||
.arg("first-local"), @r###"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
Resolved 2 packages in [TIME]
|
||||
Installed 1 package in [TIME]
|
||||
- first-local==0.1.0 (from file://[WORKSPACE]/scripts/packages/dependent_locals/first_local)
|
||||
+ first-local==0.1.0 (from file://[WORKSPACE]/scripts/packages/dependent_locals/first_local)
|
||||
"###
|
||||
);
|
||||
|
||||
// Request upgrade of the first package
|
||||
// It's not available on an index and the user has not specified the path so we fail
|
||||
uv_snapshot!(context.filters(), command(&context)
|
||||
.arg(root_path.join("second_local"))
|
||||
.arg("--upgrade-package")
|
||||
.arg("first-local")
|
||||
// Disable the index to guard this test against dependency confusion attacks
|
||||
.arg("--no-index")
|
||||
.arg("--find-links")
|
||||
.arg("https://raw.githubusercontent.com/astral-sh/packse/0.3.12/vendor/links.html"), @r###"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
× No solution found when resolving dependencies:
|
||||
╰─▶ Because first-local was not found in the provided package locations
|
||||
and second-local==0.1.0 depends on first-local, we can conclude that
|
||||
second-local==0.1.0 cannot be used.
|
||||
And because only second-local==0.1.0 is available and you require
|
||||
second-local, we can conclude that the requirements are unsatisfiable.
|
||||
"###
|
||||
);
|
||||
|
||||
// Request upgrade of the first package
|
||||
// A full path is specified and there's nothing to upgrade to so we should just audit
|
||||
uv_snapshot!(context.filters(), command(&context)
|
||||
.arg(root_path.join("first_local"))
|
||||
.arg(root_path.join("second_local"))
|
||||
.arg("--upgrade-package")
|
||||
.arg("first-local")
|
||||
// Disable the index to guard this test against dependency confusion attacks
|
||||
.arg("--no-index")
|
||||
.arg("--find-links")
|
||||
.arg("https://raw.githubusercontent.com/astral-sh/packse/0.3.12/vendor/links.html"), @r###"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
Resolved 2 packages in [TIME]
|
||||
Audited 2 packages in [TIME]
|
||||
"###
|
||||
);
|
||||
}
|
||||
|
||||
/// A local version of a package shadowing a remote package is installed.
|
||||
#[test]
|
||||
fn already_installed_local_version_of_remote_package() {
|
||||
let context = TestContext::new("3.12");
|
||||
let root_path = context.workspace_root.join("scripts/packages");
|
||||
|
||||
// Install the local anyio first
|
||||
uv_snapshot!(context.filters(), command(&context)
|
||||
.arg(root_path.join("anyio_local")), @r###"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
Resolved 1 package in [TIME]
|
||||
Downloaded 1 package in [TIME]
|
||||
Installed 1 package in [TIME]
|
||||
+ anyio==4.3.0+foo (from file://[WORKSPACE]/scripts/packages/anyio_local)
|
||||
"###
|
||||
);
|
||||
|
||||
// Install again without specifying a local path — this should not pull from the index
|
||||
uv_snapshot!(context.filters(), command(&context)
|
||||
.arg("anyio"), @r###"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
Audited 1 package in [TIME]
|
||||
"###
|
||||
);
|
||||
|
||||
// Request install with a different version
|
||||
// We should attempt to pull from the index since the installed version does not match
|
||||
// but we disable it here to preserve this dependency for future tests
|
||||
uv_snapshot!(context.filters(), command(&context)
|
||||
.arg("anyio==4.2.0")
|
||||
.arg("--no-index"), @r###"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
× No solution found when resolving dependencies:
|
||||
╰─▶ Because anyio==4.2.0 was not found in the provided package locations
|
||||
and you require anyio==4.2.0, we can conclude that the requirements
|
||||
are unsatisfiable.
|
||||
|
||||
hint: Packages were unavailable because index lookups were disabled
|
||||
and no additional package locations were provided (try: `--find-links
|
||||
<uri>`)
|
||||
"###
|
||||
);
|
||||
|
||||
// Request reinstallation with the local version segment — this should fail since it is not available
|
||||
// in the index and the path was not provided
|
||||
uv_snapshot!(context.filters(), command(&context)
|
||||
.arg("anyio==4.3.0+foo")
|
||||
.arg("--reinstall"), @r###"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
× No solution found when resolving dependencies:
|
||||
╰─▶ Because there is no version of anyio==4.3.0+foo and you require
|
||||
anyio==4.3.0+foo, we can conclude that the requirements are
|
||||
unsatisfiable.
|
||||
"###
|
||||
);
|
||||
|
||||
// Request reinstall with the full path, this should reinstall from the path
|
||||
// and not pull from the index
|
||||
uv_snapshot!(context.filters(), command(&context)
|
||||
.arg(root_path.join("anyio_local"))
|
||||
.arg("--reinstall")
|
||||
.arg("anyio"), @r###"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
Resolved 1 package in [TIME]
|
||||
Installed 1 package in [TIME]
|
||||
- anyio==4.3.0+foo (from file://[WORKSPACE]/scripts/packages/anyio_local)
|
||||
+ anyio==4.3.0+foo (from file://[WORKSPACE]/scripts/packages/anyio_local)
|
||||
"###
|
||||
);
|
||||
|
||||
// Request reinstallation with just the name, this should pull from the index
|
||||
// and replace the path dependency
|
||||
uv_snapshot!(context.filters(), command(&context)
|
||||
.arg("anyio")
|
||||
.arg("--reinstall"), @r###"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
Resolved 3 packages in [TIME]
|
||||
Downloaded 3 packages in [TIME]
|
||||
Installed 3 packages in [TIME]
|
||||
- anyio==4.3.0+foo (from file://[WORKSPACE]/scripts/packages/anyio_local)
|
||||
+ anyio==4.3.0
|
||||
+ idna==3.6
|
||||
+ sniffio==1.3.1
|
||||
"###
|
||||
);
|
||||
|
||||
// Install the local anyio again so we can test upgrades
|
||||
uv_snapshot!(context.filters(), command(&context)
|
||||
.arg(root_path.join("anyio_local")), @r###"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
Resolved 1 package in [TIME]
|
||||
Installed 1 package in [TIME]
|
||||
- anyio==4.3.0
|
||||
+ anyio==4.3.0+foo (from file://[WORKSPACE]/scripts/packages/anyio_local)
|
||||
"###
|
||||
);
|
||||
|
||||
// Request upgrade with just the name
|
||||
// We shouldn't pull from the index because the local version is "newer"
|
||||
uv_snapshot!(context.filters(), command(&context)
|
||||
.arg("anyio")
|
||||
.arg("--upgrade"), @r###"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
Resolved 3 packages in [TIME]
|
||||
Audited 3 packages in [TIME]
|
||||
"###
|
||||
);
|
||||
|
||||
// Install something that depends on anyio
|
||||
// We shouldn't overwrite our local version with the remote anyio here
|
||||
uv_snapshot!(context.filters(), command(&context)
|
||||
.arg("httpx"), @r###"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
Resolved 7 packages in [TIME]
|
||||
Downloaded 4 packages in [TIME]
|
||||
Installed 4 packages in [TIME]
|
||||
+ certifi==2024.2.2
|
||||
+ h11==0.14.0
|
||||
+ httpcore==1.0.4
|
||||
+ httpx==0.27.0
|
||||
"###
|
||||
);
|
||||
}
|
||||
|
||||
/// Install a package from a remote URL
|
||||
#[test]
|
||||
#[cfg(feature = "git")]
|
||||
fn already_installed_remote_url() {
|
||||
let context = TestContext::new("3.8");
|
||||
|
||||
// First, install from the remote URL
|
||||
uv_snapshot!(context.filters(), command(&context).arg("uv-public-pypackage @ git+https://github.com/astral-test/uv-public-pypackage"), @r###"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
Resolved 1 package in [TIME]
|
||||
Downloaded 1 package in [TIME]
|
||||
Installed 1 package in [TIME]
|
||||
+ uv-public-pypackage==0.1.0 (from git+https://github.com/astral-test/uv-public-pypackage@0dacfd662c64cb4ceb16e6cf65a157a8b715b979)
|
||||
"###);
|
||||
|
||||
context.assert_installed("uv_public_pypackage", "0.1.0");
|
||||
|
||||
// Request installation again with just the name
|
||||
// We should just audit the URL package since it fulfills this requirement
|
||||
uv_snapshot!(
|
||||
command(&context).arg("uv-public-pypackage"), @r###"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
Audited 1 package in [TIME]
|
||||
"###);
|
||||
|
||||
// Request reinstallation
|
||||
// We should fail since the URL was not provided
|
||||
uv_snapshot!(
|
||||
command(&context)
|
||||
.arg("uv-public-pypackage")
|
||||
.arg("--no-index")
|
||||
.arg("--reinstall"), @r###"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
× No solution found when resolving dependencies:
|
||||
╰─▶ Because uv-public-pypackage was not found in the provided package
|
||||
locations and you require uv-public-pypackage, we can conclude that the
|
||||
requirements are unsatisfiable.
|
||||
|
||||
hint: Packages were unavailable because index lookups were disabled
|
||||
and no additional package locations were provided (try: `--find-links
|
||||
<uri>`)
|
||||
"###);
|
||||
|
||||
// Request installation again with just the full URL
|
||||
// We should just audit the existing package
|
||||
uv_snapshot!(
|
||||
command(&context).arg("uv-public-pypackage @ git+https://github.com/astral-test/uv-public-pypackage"), @r###"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
Resolved 1 package in [TIME]
|
||||
Audited 1 package in [TIME]
|
||||
"###);
|
||||
|
||||
// Request reinstallation with the full URL
|
||||
// We should reinstall successfully
|
||||
uv_snapshot!(
|
||||
command(&context)
|
||||
.arg("uv-public-pypackage @ git+https://github.com/astral-test/uv-public-pypackage")
|
||||
.arg("--reinstall"), @r###"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
Resolved 1 package in [TIME]
|
||||
Installed 1 package in [TIME]
|
||||
- uv-public-pypackage==0.1.0 (from git+https://github.com/astral-test/uv-public-pypackage@0dacfd662c64cb4ceb16e6cf65a157a8b715b979)
|
||||
+ uv-public-pypackage==0.1.0 (from git+https://github.com/astral-test/uv-public-pypackage@0dacfd662c64cb4ceb16e6cf65a157a8b715b979)
|
||||
"###);
|
||||
|
||||
// Request installation again with a different version
|
||||
// We should attempt to pull from the index since the local version does not match
|
||||
uv_snapshot!(
|
||||
command(&context).arg("uv-public-pypackage==0.2.0").arg("--no-index"), @r###"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
× No solution found when resolving dependencies:
|
||||
╰─▶ Because uv-public-pypackage==0.2.0 was not found in the provided package
|
||||
locations and you require uv-public-pypackage==0.2.0, we can conclude
|
||||
that the requirements are unsatisfiable.
|
||||
|
||||
hint: Packages were unavailable because index lookups were disabled
|
||||
and no additional package locations were provided (try: `--find-links
|
||||
<uri>`)
|
||||
"###);
|
||||
}
|
||||
|
|
|
@ -13,9 +13,7 @@ use indoc::indoc;
|
|||
use predicates::Predicate;
|
||||
use url::Url;
|
||||
|
||||
use common::{
|
||||
create_bin_with_executables, create_venv, uv_snapshot, venv_to_interpreter, INSTA_FILTERS,
|
||||
};
|
||||
use common::{create_bin_with_executables, create_venv, uv_snapshot, venv_to_interpreter};
|
||||
use uv_fs::Simplified;
|
||||
|
||||
use crate::common::{copy_dir_all, get_bin, TestContext};
|
||||
|
@ -3011,13 +3009,7 @@ requires-python = "<=3.5"
|
|||
let requirements_in = context.temp_dir.child("requirements.in");
|
||||
requirements_in.write_str(&format!("example @ {}", editable_dir.path().display()))?;
|
||||
|
||||
// In addition to the standard filters, remove the temporary directory from the snapshot.
|
||||
let filters: Vec<_> = [(r"\(from file://.*\)", "(from file://[TEMP_DIR])")]
|
||||
.into_iter()
|
||||
.chain(INSTA_FILTERS.to_vec())
|
||||
.collect();
|
||||
|
||||
uv_snapshot!(filters, command(&context)
|
||||
uv_snapshot!(context.filters(), command(&context)
|
||||
.arg("requirements.in"), @r###"
|
||||
success: false
|
||||
exit_code: 2
|
||||
|
|
0
scripts/packages/anyio_local/.gitignore
vendored
Normal file
0
scripts/packages/anyio_local/.gitignore
vendored
Normal file
0
scripts/packages/anyio_local/anyio/__init__.py
Normal file
0
scripts/packages/anyio_local/anyio/__init__.py
Normal file
16
scripts/packages/anyio_local/pyproject.toml
Normal file
16
scripts/packages/anyio_local/pyproject.toml
Normal file
|
@ -0,0 +1,16 @@
|
|||
[project]
|
||||
name = "anyio"
|
||||
description = 'A dummy package shadowing `anyio`.'
|
||||
requires-python = ">=3.7"
|
||||
license = {text = "MIT"}
|
||||
keywords = []
|
||||
authors = [
|
||||
{ name = "Astral Software Inc.", email = "hey@astral.sh" },
|
||||
]
|
||||
classifiers = []
|
||||
dependencies = []
|
||||
version = "4.3.0+foo"
|
||||
|
||||
[build-system]
|
||||
requires = ["flit_core>=3.4,<4"]
|
||||
build-backend = "flit_core.buildapi"
|
2
scripts/packages/dependent_editables/first_editable/.gitignore
vendored
Normal file
2
scripts/packages/dependent_editables/first_editable/.gitignore
vendored
Normal file
|
@ -0,0 +1,2 @@
|
|||
# Artifacts from the build process.
|
||||
*.egg-info/
|
|
@ -0,0 +1,3 @@
|
|||
from setuptools import setup
|
||||
|
||||
setup(name="first-editable", version="0.0.1", install_requires=[])
|
2
scripts/packages/dependent_editables/second_editable/.gitignore
vendored
Normal file
2
scripts/packages/dependent_editables/second_editable/.gitignore
vendored
Normal file
|
@ -0,0 +1,2 @@
|
|||
# Artifacts from the build process.
|
||||
*.egg-info/
|
|
@ -0,0 +1,9 @@
|
|||
from setuptools import setup
|
||||
|
||||
setup(
|
||||
name="second-editable",
|
||||
version="0.0.1",
|
||||
install_requires=[
|
||||
"first-editable",
|
||||
],
|
||||
)
|
2
scripts/packages/dependent_locals/first_local/.gitignore
vendored
Normal file
2
scripts/packages/dependent_locals/first_local/.gitignore
vendored
Normal file
|
@ -0,0 +1,2 @@
|
|||
*.pyc
|
||||
__pycache__
|
0
scripts/packages/dependent_locals/first_local/README.md
Normal file
0
scripts/packages/dependent_locals/first_local/README.md
Normal file
17
scripts/packages/dependent_locals/first_local/pyproject.toml
Normal file
17
scripts/packages/dependent_locals/first_local/pyproject.toml
Normal file
|
@ -0,0 +1,17 @@
|
|||
[build-system]
|
||||
requires = ["hatchling"]
|
||||
build-backend = "hatchling.build"
|
||||
|
||||
[project]
|
||||
name = "first-local"
|
||||
description = 'A simple package with no dependencies.'
|
||||
readme = "README.md"
|
||||
requires-python = ">=3.7"
|
||||
license = "MIT"
|
||||
keywords = []
|
||||
authors = [
|
||||
{ name = "Astral Software Inc.", email = "hey@astral.sh" },
|
||||
]
|
||||
classifiers = []
|
||||
dependencies = []
|
||||
version = "0.1.0"
|
2
scripts/packages/dependent_locals/second_local/.gitignore
vendored
Normal file
2
scripts/packages/dependent_locals/second_local/.gitignore
vendored
Normal file
|
@ -0,0 +1,2 @@
|
|||
# Artifacts from the build process.
|
||||
*.egg-info/
|
0
scripts/packages/dependent_locals/second_local/README.md
Normal file
0
scripts/packages/dependent_locals/second_local/README.md
Normal file
|
@ -0,0 +1,17 @@
|
|||
[build-system]
|
||||
requires = ["hatchling"]
|
||||
build-backend = "hatchling.build"
|
||||
|
||||
[project]
|
||||
name = "second-local"
|
||||
description = 'A simple package with a dependency on `first-local`.'
|
||||
readme = "README.md"
|
||||
requires-python = ">=3.7"
|
||||
license = "MIT"
|
||||
keywords = []
|
||||
authors = [
|
||||
{ name = "Astral Software Inc.", email = "hey@astral.sh" },
|
||||
]
|
||||
classifiers = []
|
||||
dependencies = ["first-local"]
|
||||
version = "0.1.0"
|
Loading…
Add table
Add a link
Reference in a new issue