Migrate resolver proof-of-concept to PubGrub (#97)

## Summary

This PR enables the proof-of-concept resolver to backtrack by way of
using the `pubgrub-rs` crate.

Rather than using PubGrub as a _framework_ (implementing the
`DependencyProvider` trait, letting PubGrub call us), I've instead
copied over PubGrub's primary solver hook (which is only ~100 lines or
so) and modified it for our purposes (e.g., made it async).

There's a lot to improve here, but it's a start that will let us
understand PubGrub's appropriateness for this problem space. A few
observations:

- In simple cases, the resolver is slower than our current (naive)
resolver. I think it's just that the pipelining isn't as efficient as in
the naive case, where we can just stream package and version fetches
concurrently without any bottlenecks.
- A lot of the code here relates to bridging PubGrub with our own
abstractions -- so we need a `PubGrubPackage`, a `PubGrubVersion`, etc.
This commit is contained in:
Charlie Marsh 2023-10-15 22:05:44 -04:00 committed by GitHub
parent a8d020f53c
commit 471a1d657d
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
53 changed files with 12301 additions and 257 deletions

145
Cargo.lock generated
View file

@ -17,6 +17,15 @@ version = "1.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe"
[[package]]
name = "ahash"
version = "0.3.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e8fd72866655d1904d6b0997d0b07ba561047d070fbe29de039031c641b61217"
dependencies = [
"const-random",
]
[[package]]
name = "ahash"
version = "0.7.6"
@ -166,7 +175,7 @@ checksum = "2089b7e3f35b9dd2d0ed921ead4f6d318c27680d4a5bd167b3ee120edb105837"
dependencies = [
"addr2line",
"cc",
"cfg-if",
"cfg-if 1.0.0",
"libc",
"miniz_oxide",
"object",
@ -299,6 +308,12 @@ dependencies = [
"libc",
]
[[package]]
name = "cfg-if"
version = "0.1.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4785bdd1c96b2a846b2bd7cc02e86b6b3dbf14e7e53446c4f54c92a361040822"
[[package]]
name = "cfg-if"
version = "1.0.0"
@ -413,6 +428,28 @@ dependencies = [
"windows-sys 0.45.0",
]
[[package]]
name = "const-random"
version = "0.1.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "368a7a772ead6ce7e1de82bfb04c485f3db8ec744f72925af5735e29a22cc18e"
dependencies = [
"const-random-macro",
"proc-macro-hack",
]
[[package]]
name = "const-random-macro"
version = "0.1.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9d7d6ab3c3a2282db210df5f02c4dab6e0a7057af0fb7ebd4070f30fe05c0ddb"
dependencies = [
"getrandom",
"once_cell",
"proc-macro-hack",
"tiny-keccak",
]
[[package]]
name = "core-foundation"
version = "0.9.3"
@ -444,7 +481,7 @@ version = "1.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b540bd8bc810d3885c6ea91e2018302f68baba2129ab3e88f32389ee9370880d"
dependencies = [
"cfg-if",
"cfg-if 1.0.0",
]
[[package]]
@ -453,7 +490,7 @@ version = "0.8.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ce6fd6f855243022dcecf8702fef0c297d4338e226845fe067f6341ad9fa0cef"
dependencies = [
"cfg-if",
"cfg-if 1.0.0",
"crossbeam-epoch",
"crossbeam-utils",
]
@ -465,7 +502,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ae211234986c545741a7dc064309f67ee1e5ad243d0e48335adc0484d960bcc7"
dependencies = [
"autocfg",
"cfg-if",
"cfg-if 1.0.0",
"crossbeam-utils",
"memoffset",
"scopeguard",
@ -477,9 +514,15 @@ version = "0.8.16"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5a22b2d63d4d1dc0b7f1b6b2747dd0088008a9be28b6ddf0b1e7d335e3037294"
dependencies = [
"cfg-if",
"cfg-if 1.0.0",
]
[[package]]
name = "crunchy"
version = "0.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7a81dae078cea95a014a339291cec439d2f232ebe854a9d672b796c6afafa9b7"
[[package]]
name = "crypto-common"
version = "0.1.6"
@ -511,6 +554,17 @@ dependencies = [
"memchr",
]
[[package]]
name = "dashmap"
version = "3.11.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0f260e2fc850179ef410018660006951c1b55b79e8087e87111a2c388994b9b5"
dependencies = [
"ahash 0.3.8",
"cfg-if 0.1.10",
"num_cpus",
]
[[package]]
name = "data-encoding"
version = "2.4.0"
@ -581,7 +635,7 @@ version = "0.8.33"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7268b386296a025e474d5140678f75d6de9493ae55a5d709eeb9dd08149945e1"
dependencies = [
"cfg-if",
"cfg-if 1.0.0",
]
[[package]]
@ -761,7 +815,7 @@ version = "0.2.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "be4136b2a15dd319360be1c07d9933517ccf0be8f16bf62a3bee4f0d618df427"
dependencies = [
"cfg-if",
"cfg-if 1.0.0",
"js-sys",
"libc",
"wasi",
@ -842,7 +896,7 @@ version = "0.12.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888"
dependencies = [
"ahash",
"ahash 0.7.6",
]
[[package]]
@ -1122,7 +1176,7 @@ version = "0.1.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7a5bbe824c507c5da5956355e86a746d82e0e1464f65d862cc5e71da70e94b2c"
dependencies = [
"cfg-if",
"cfg-if 1.0.0",
"js-sys",
"wasm-bindgen",
"web-sys",
@ -1402,7 +1456,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bac25ee399abb46215765b1cb35bc0212377e58a061560d8b29b024fd0430e7c"
dependencies = [
"bitflags 2.4.0",
"cfg-if",
"cfg-if 1.0.0",
"foreign-types",
"libc",
"once_cell",
@ -1478,7 +1532,7 @@ version = "0.8.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "60a2cfe6f0ad2bfc16aefa463b497d5c7a5ecd44a23efa72aa342d90177356dc"
dependencies = [
"cfg-if",
"cfg-if 1.0.0",
"instant",
"libc",
"redox_syscall 0.2.16",
@ -1492,7 +1546,7 @@ version = "0.9.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "93f00c865fe7cabf650081affecd3871070f26767e7b2070a3ffae14c654b447"
dependencies = [
"cfg-if",
"cfg-if 1.0.0",
"libc",
"redox_syscall 0.3.5",
"smallvec",
@ -1644,6 +1698,12 @@ dependencies = [
"version_check",
]
[[package]]
name = "proc-macro-hack"
version = "0.5.20+deprecated"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dc375e1527247fe1a97d8b7156678dfe7c1af2fc075c9a4db3690ecd2a148068"
[[package]]
name = "proc-macro2"
version = "1.0.69"
@ -1662,6 +1722,14 @@ dependencies = [
"cc",
]
[[package]]
name = "pubgrub"
version = "0.2.1"
dependencies = [
"rustc-hash",
"thiserror",
]
[[package]]
name = "puffin-cli"
version = "0.0.1"
@ -1784,11 +1852,13 @@ dependencies = [
"pep508_rs",
"platform-host",
"platform-tags",
"pubgrub",
"puffin-client",
"puffin-package",
"thiserror",
"tokio",
"tracing",
"waitmap",
"wheel-filename",
]
@ -1798,7 +1868,7 @@ version = "0.19.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e681a6cfdc4adcc93b4d3cf993749a4552018ee0a9b65fc0ccfad74352c72a38"
dependencies = [
"cfg-if",
"cfg-if 1.0.0",
"indoc 1.0.9",
"libc",
"memoffset",
@ -1972,7 +2042,7 @@ version = "0.1.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d7e3e017e993f86feeddf8a7fb609ca49f89082309e328e27aefd4a25bb317a4"
dependencies = [
"cfg-if",
"cfg-if 1.0.0",
"ioctl-sys",
"windows 0.51.1",
]
@ -2132,6 +2202,12 @@ version = "0.1.23"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d626bb9dae77e28219937af045c257c28bfd3f69333c512553507f5f9798cb76"
[[package]]
name = "rustc-hash"
version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2"
[[package]]
name = "rustix"
version = "0.38.18"
@ -2279,7 +2355,7 @@ version = "0.10.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f5058ada175748e33390e40e872bd0fe59a19f265d0158daa551c5a88a76009c"
dependencies = [
"cfg-if",
"cfg-if 1.0.0",
"cpufeatures",
"digest",
]
@ -2290,7 +2366,7 @@ version = "0.10.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e3bf829a2d51ab4a5ddf1352d8470c140cadc8301b2ae1789db023f01cedd6ba"
dependencies = [
"cfg-if",
"cfg-if 1.0.0",
"cpufeatures",
"digest",
]
@ -2301,7 +2377,7 @@ version = "0.10.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "793db75ad2bcafc3ffa7c68b215fee268f537982cd901d132f89c6343f3a3dc8"
dependencies = [
"cfg-if",
"cfg-if 1.0.0",
"cpufeatures",
"digest",
]
@ -2380,7 +2456,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c886bd4480155fd3ef527d45e9ac8dd7118a898a46530b7b94c3e21866259fce"
dependencies = [
"cc",
"cfg-if",
"cfg-if 1.0.0",
"libc",
"psm",
"winapi",
@ -2456,7 +2532,7 @@ version = "3.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cb94d2f3cc536af71caac6b6fcebf65860b347e7ce0cc9ebe8f70d3e521054ef"
dependencies = [
"cfg-if",
"cfg-if 1.0.0",
"fastrand",
"redox_syscall 0.3.5",
"rustix",
@ -2478,7 +2554,7 @@ version = "3.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "54c25e2cb8f5fcd7318157634e8838aa6f7e4715c96637f969fabaccd1ef5462"
dependencies = [
"cfg-if",
"cfg-if 1.0.0",
"proc-macro-error",
"proc-macro2",
"quote",
@ -2533,7 +2609,7 @@ version = "1.1.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3fdd6f064ccff2d6567adcb3873ca630700f00b5ad3f060c25b5dcfd9a4ce152"
dependencies = [
"cfg-if",
"cfg-if 1.0.0",
"once_cell",
]
@ -2565,6 +2641,15 @@ dependencies = [
"time-core",
]
[[package]]
name = "tiny-keccak"
version = "2.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2c9d3793400a45f954c52e73d068316d76b6f4e36977e3fcebb13a2721e80237"
dependencies = [
"crunchy",
]
[[package]]
name = "tinyvec"
version = "1.6.0"
@ -2656,7 +2741,7 @@ version = "0.1.37"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8ce8c33a8d48bd45d624a6e523445fd21ec13d3653cd51f681abf67418f54eb8"
dependencies = [
"cfg-if",
"cfg-if 1.0.0",
"log",
"pin-project-lite",
"tracing-attributes",
@ -2821,6 +2906,16 @@ version = "0.9.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f"
[[package]]
name = "waitmap"
version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "28491611b6b9a0b9f027be139a4be792b13a20780100dd8b054d44dbf596d52b"
dependencies = [
"dashmap",
"smallvec",
]
[[package]]
name = "walkdir"
version = "2.4.0"
@ -2852,7 +2947,7 @@ version = "0.2.87"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7706a72ab36d8cb1f80ffbf0e071533974a60d0a308d01a5d0375bf60499a342"
dependencies = [
"cfg-if",
"cfg-if 1.0.0",
"wasm-bindgen-macro",
]
@ -2877,7 +2972,7 @@ version = "0.4.37"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c02dbc21516f9f1f04f187958890d7e6026df8d16540b7ad9492bc34a67cea03"
dependencies = [
"cfg-if",
"cfg-if 1.0.0",
"js-sys",
"wasm-bindgen",
"web-sys",
@ -3167,7 +3262,7 @@ version = "0.50.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "524e57b2c537c0f9b1e69f1965311ec12182b4122e45035b1508cd24d2adadb1"
dependencies = [
"cfg-if",
"cfg-if 1.0.0",
"windows-sys 0.48.0",
]

View file

@ -1,5 +1,6 @@
[workspace]
members = ["crates/*"]
exclude = ["vendor/pubgrub"]
resolver = "2"
[workspace.package]
@ -58,6 +59,7 @@ tracing-tree = { version = "0.2.5" }
unicode-width = { version = "0.1.8" }
unscanny = { version = "0.1.0" }
url = { version = "2.4.1" }
waitmap = { version = "1.1.0" }
walkdir = { version = "2.4.0" }
which = { version = "4.4.2" }
zip = { version = "0.6.6", default-features = false, features = ["deflate"] }

View file

@ -264,7 +264,10 @@ impl Requirement {
#[allow(clippy::needless_pass_by_value)]
#[pyo3(name = "evaluate_markers")]
pub fn py_evaluate_markers(&self, env: &MarkerEnvironment, extras: Vec<String>) -> bool {
self.evaluate_markers(env, &extras)
self.evaluate_markers(
env,
&extras.iter().map(String::as_str).collect::<Vec<&str>>(),
)
}
/// Returns whether the requirement would be satisfied, independent of environment markers, i.e.
@ -320,12 +323,9 @@ impl Requirement {
}
/// Returns whether the markers apply for the given environment
pub fn evaluate_markers(&self, env: &MarkerEnvironment, extras: &[String]) -> bool {
pub fn evaluate_markers(&self, env: &MarkerEnvironment, extras: &[&str]) -> bool {
if let Some(marker) = &self.marker {
marker.evaluate(
env,
&extras.iter().map(String::as_str).collect::<Vec<&str>>(),
)
marker.evaluate(env, extras)
} else {
true
}

View file

@ -159,7 +159,6 @@ impl Os {
} else {
return Err(PlatformError::OsVersionDetectionError("Couldn't detect neither glibc version nor musl libc version, at least one of which is required".to_string()));
};
trace!("libc: {}", linux);
Ok(linux)
}
}

View file

@ -13,7 +13,6 @@ use puffin_client::PypiClientBuilder;
use puffin_interpreter::PythonExecutable;
use puffin_package::requirements_txt::RequirementsTxt;
use crate::commands::reporters::ResolverReporter;
use crate::commands::{elapsed, ExitStatus};
use crate::printer::Printer;
@ -62,14 +61,8 @@ pub(crate) async fn compile(
};
// Resolve the dependencies.
let resolver = puffin_resolver::Resolver::new(markers, &tags, &client)
.with_reporter(ResolverReporter::from(printer));
let resolution = resolver
.resolve(
requirements.iter(),
puffin_resolver::ResolveFlags::default(),
)
.await?;
let resolver = puffin_resolver::Resolver::new(requirements, markers, &tags, &client);
let resolution = resolver.resolve().await?;
let s = if resolution.len() == 1 { "" } else { "s" };
writeln!(

View file

@ -6,11 +6,11 @@ use puffin_package::package_name::PackageName;
use crate::printer::Printer;
#[derive(Debug)]
pub(crate) struct ResolverReporter {
pub(crate) struct WheelFinderReporter {
progress: ProgressBar,
}
impl From<Printer> for ResolverReporter {
impl From<Printer> for WheelFinderReporter {
fn from(printer: Printer) -> Self {
let progress = ProgressBar::with_draw_target(None, printer.target());
progress.set_message("Resolving dependencies...");
@ -21,7 +21,7 @@ impl From<Printer> for ResolverReporter {
}
}
impl ResolverReporter {
impl WheelFinderReporter {
#[must_use]
pub(crate) fn with_length(self, length: u64) -> Self {
self.progress.set_length(length);
@ -29,20 +29,14 @@ impl ResolverReporter {
}
}
impl puffin_resolver::Reporter for ResolverReporter {
fn on_dependency_added(&self) {
self.progress.inc_length(1);
}
fn on_resolve_progress(&self, package: &puffin_resolver::PinnedPackage) {
self.progress.set_message(format!(
"{}=={}",
package.metadata.name, package.metadata.version
));
impl puffin_resolver::Reporter for WheelFinderReporter {
fn on_progress(&self, package: &puffin_resolver::PinnedPackage) {
self.progress
.set_message(format!("{}=={}", package.name(), package.version()));
self.progress.inc(1);
}
fn on_resolve_complete(&self) {
fn on_complete(&self) {
self.progress.finish_and_clear();
}
}

View file

@ -16,7 +16,7 @@ use puffin_package::package_name::PackageName;
use puffin_package::requirements_txt::RequirementsTxt;
use crate::commands::reporters::{
DownloadReporter, InstallReporter, ResolverReporter, UnzipReporter,
DownloadReporter, InstallReporter, UnzipReporter, WheelFinderReporter,
};
use crate::commands::{elapsed, ExitStatus};
use crate::printer::Printer;
@ -59,7 +59,6 @@ pub(crate) async fn sync(
);
// Determine the current environment markers.
let markers = python.markers();
let tags = Tags::from_env(python.platform(), python.simple_version())?;
// Index all the already-installed packages in site-packages.
@ -143,19 +142,17 @@ pub(crate) async fn sync(
let resolution = if uncached.is_empty() {
puffin_resolver::Resolution::default()
} else {
let resolver = puffin_resolver::Resolver::new(markers, &tags, &client)
.with_reporter(ResolverReporter::from(printer).with_length(uncached.len() as u64));
let resolution = resolver
.resolve(uncached.iter(), puffin_resolver::ResolveFlags::NO_DEPS)
.await?;
let wheel_finder = puffin_resolver::WheelFinder::new(&tags, &client)
.with_reporter(WheelFinderReporter::from(printer).with_length(uncached.len() as u64));
let resolution = wheel_finder.resolve(&uncached).await?;
let s = if uncached.len() == 1 { "" } else { "s" };
let s = if resolution.len() == 1 { "" } else { "s" };
writeln!(
printer,
"{}",
format!(
"Resolved {} in {}",
format!("{} package{}", uncached.len(), s).bold(),
format!("{} package{}", resolution.len(), s).bold(),
elapsed(start.elapsed())
)
.dimmed()

View file

@ -27,7 +27,7 @@ impl PypiClient {
url.set_query(Some("format=application/vnd.pypi.simple.v1+json"));
trace!(
"fetching metadata for {} from {}",
"Fetching metadata for {} from {}",
package_name.as_ref(),
url
);
@ -74,7 +74,7 @@ impl PypiClient {
self.proxy.join(file.url.parse::<Url>()?.path())?
};
trace!("fetching file {} from {}", file.filename, url);
trace!("Fetching file {} from {}", file.filename, url);
// Fetch from the registry.
let text = self.file_impl(&file.filename, &url).await?;
@ -135,7 +135,6 @@ pub struct SimpleJson {
pub versions: Vec<String>,
}
// TODO(charlie): Can we rename this? What does this look like for source distributions?
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "kebab-case")]
pub struct File {

View file

@ -9,6 +9,12 @@ use crate::dist_info_name::DistInfoName;
#[derive(Debug, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)]
pub struct PackageName(String);
impl From<&PackageName> for PackageName {
fn from(package_name: &PackageName) -> Self {
package_name.clone()
}
}
impl Display for PackageName {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
self.0.fmt(f)

View file

@ -10,20 +10,23 @@ authors = { workspace = true }
license = { workspace = true }
[dependencies]
pep440_rs = { path = "../pep440-rs" }
pep508_rs = { path = "../pep508-rs" }
platform-host = { path = "../platform-host" }
platform-tags = { path = "../platform-tags" }
pubgrub = { path = "../../vendor/pubgrub" }
puffin-client = { path = "../puffin-client" }
puffin-package = { path = "../puffin-package" }
platform-host = { path = "../platform-host" }
wheel-filename = { path = "../wheel-filename" }
anyhow = { workspace = true }
bitflags = { workspace = true }
futures = { workspace = true }
pep440_rs = { path = "../pep440-rs" }
pep508_rs = { path = "../pep508-rs" }
once_cell = { workspace = true }
thiserror = { workspace = true }
tokio = { workspace = true }
tracing = { workspace = true }
waitmap = { workspace = true }
[dev-dependencies]
tokio = { version = "1.33.0" }
once_cell = { version = "1.18.0" }

View file

@ -2,6 +2,9 @@ use thiserror::Error;
use pep508_rs::Requirement;
use crate::pubgrub::package::PubGrubPackage;
use crate::pubgrub::version::PubGrubVersion;
#[derive(Error, Debug)]
pub enum ResolveError {
#[error("Failed to find a version of {0} that satisfies the requirement")]
@ -12,6 +15,9 @@ pub enum ResolveError {
#[error(transparent)]
TrySend(#[from] futures::channel::mpsc::SendError),
#[error(transparent)]
PubGrub(#[from] pubgrub::error::PubGrubError<PubGrubPackage, PubGrubVersion>),
}
impl<T> From<futures::channel::mpsc::TrySendError<T>> for ResolveError {

View file

@ -1,6 +1,9 @@
pub use resolution::{PinnedPackage, Resolution};
pub use resolver::{Reporter, ResolveFlags, Resolver};
pub use resolver::Resolver;
pub use wheel_finder::{Reporter, WheelFinder};
mod error;
mod pubgrub;
mod resolution;
mod resolver;
mod wheel_finder;

View file

@ -0,0 +1,81 @@
use anyhow::Result;
use pubgrub::range::Range;
use pep508_rs::{MarkerEnvironment, Requirement};
use puffin_package::dist_info_name::DistInfoName;
use puffin_package::package_name::PackageName;
use crate::pubgrub::package::PubGrubPackage;
use crate::pubgrub::specifier::PubGrubSpecifier;
use crate::pubgrub::version::{PubGrubVersion, MAX_VERSION};
pub(crate) mod package;
mod specifier;
pub(crate) mod version;
/// Convert a set of requirements to a set of `PubGrub` packages and ranges.
pub(crate) fn iter_requirements<'a>(
requirements: impl Iterator<Item = &'a Requirement> + 'a,
extra: Option<&'a DistInfoName>,
env: &'a MarkerEnvironment,
) -> impl Iterator<Item = (PubGrubPackage, Range<PubGrubVersion>)> + 'a {
requirements
.filter(move |requirement| {
// TODO(charlie): We shouldn't need a vector here.
let extra = if let Some(extra) = extra {
vec![extra.as_ref()]
} else {
vec![]
};
requirement.evaluate_markers(env, &extra)
})
.flat_map(|requirement| {
let normalized_name = PackageName::normalize(&requirement.name);
let package = PubGrubPackage::Package(normalized_name.clone(), None);
let versions = version_range(requirement.version_or_url.as_ref()).unwrap();
std::iter::once((package, versions)).chain(
requirement
.extras
.clone()
.into_iter()
.flatten()
.map(move |extra| {
let package = PubGrubPackage::Package(
normalized_name.clone(),
Some(DistInfoName::normalize(extra)),
);
let versions = version_range(requirement.version_or_url.as_ref()).unwrap();
(package, versions)
}),
)
})
}
/// Convert a PEP 508 specifier to a `PubGrub` range.
fn version_range(specifiers: Option<&pep508_rs::VersionOrUrl>) -> Result<Range<PubGrubVersion>> {
let Some(specifiers) = specifiers else {
return Ok(Range::any());
};
let pep508_rs::VersionOrUrl::VersionSpecifier(specifiers) = specifiers else {
return Ok(Range::any());
};
let mut final_range = Range::any();
for spec in specifiers.iter() {
let spec_range =
PubGrubSpecifier::try_from(spec)?
.into_iter()
.fold(Range::none(), |accum, range| {
accum.union(&if range.end < *MAX_VERSION {
Range::between(range.start, range.end)
} else {
Range::higher_than(range.start)
})
});
final_range = final_range.intersection(&spec_range);
}
Ok(final_range)
}

View file

@ -0,0 +1,27 @@
use puffin_package::dist_info_name::DistInfoName;
use puffin_package::package_name::PackageName;
/// A PubGrub-compatible wrapper around a "Python package", with two notable characteristics:
///
/// 1. Includes a [`PubGrubPackage::Root`] variant, to satisfy `PubGrub`'s requirement that a
/// resolution starts from a single root.
/// 2. Uses the same strategy as pip and posy to handle extras: for each extra, we create a virtual
/// package (e.g., `black[colorama]`), and mark it as a dependency of the real package (e.g.,
/// `black`). We then discard the virtual packages at the end of the resolution process.
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub enum PubGrubPackage {
Root,
Package(PackageName, Option<DistInfoName>),
}
impl std::fmt::Display for PubGrubPackage {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
PubGrubPackage::Root => write!(f, "<root>"),
PubGrubPackage::Package(name, None) => write!(f, "{name}"),
PubGrubPackage::Package(name, Some(extra)) => {
write!(f, "{name}[{extra}]")
}
}
}
}

View file

@ -0,0 +1,159 @@
use std::ops::Range;
use anyhow::Result;
use pep440_rs::{Operator, VersionSpecifier};
use crate::pubgrub::version::{PubGrubVersion, MAX_VERSION, MIN_VERSION};
/// A range of versions that can be used to satisfy a requirement.
#[derive(Debug)]
pub(crate) struct PubGrubSpecifier(Vec<Range<PubGrubVersion>>);
impl IntoIterator for PubGrubSpecifier {
type Item = Range<PubGrubVersion>;
type IntoIter = std::vec::IntoIter<Self::Item>;
fn into_iter(self) -> Self::IntoIter {
self.0.into_iter()
}
}
impl TryFrom<&VersionSpecifier> for PubGrubSpecifier {
type Error = anyhow::Error;
/// Convert a PEP 508 specifier to a `PubGrub`-compatible version range.
fn try_from(specifier: &VersionSpecifier) -> Result<Self> {
let ranges = match specifier.operator() {
Operator::Equal => {
let version = PubGrubVersion::from(specifier.version().clone());
vec![version.clone()..version.next()]
}
Operator::ExactEqual => {
let version = PubGrubVersion::from(specifier.version().clone());
vec![version.clone()..version.next()]
}
Operator::NotEqual => {
let version = PubGrubVersion::from(specifier.version().clone());
vec![
MIN_VERSION.clone()..version.clone(),
version.next()..MAX_VERSION.clone(),
]
}
Operator::TildeEqual => {
let [rest @ .., last, _] = specifier.version().release.as_slice() else {
return Err(anyhow::anyhow!(
"~= operator requires at least two release segments"
));
};
let upper = PubGrubVersion::from(pep440_rs::Version {
dev: Some(0),
epoch: specifier.version().epoch,
local: None,
post: None,
pre: None,
release: rest
.iter()
.chain(std::iter::once(&(last + 1)))
.copied()
.collect(),
});
let lower = PubGrubVersion::from(specifier.version().clone());
vec![lower..upper]
}
Operator::LessThan => {
// Per PEP 440: "The exclusive ordered comparison <V MUST NOT allow a pre-release of
// the specified version unless the specified version is itself a pre-release."
if specifier.version().any_prerelease() {
let version = PubGrubVersion::from(specifier.version().clone());
vec![MIN_VERSION.clone()..version.clone()]
} else {
let max_version = pep440_rs::Version {
post: None,
dev: Some(0),
local: None,
..specifier.version().clone()
};
let version = PubGrubVersion::from(max_version);
vec![MIN_VERSION.clone()..version.clone()]
}
}
Operator::LessThanEqual => {
let version = PubGrubVersion::from(specifier.version().clone());
vec![MIN_VERSION.clone()..version.next()]
}
Operator::GreaterThan => {
// Per PEP 440: "The exclusive ordered comparison >V MUST NOT allow a post-release of
// the given version unless V itself is a post release."
let mut low = specifier.version().clone();
if let Some(dev) = low.dev {
low.dev = Some(dev + 1);
} else if let Some(post) = low.post {
low.post = Some(post + 1);
} else {
low.post = Some(usize::MAX);
}
let version = PubGrubVersion::from(low);
vec![version..MAX_VERSION.clone()]
}
Operator::GreaterThanEqual => {
let version = PubGrubVersion::from(specifier.version().clone());
vec![version..MAX_VERSION.clone()]
}
Operator::EqualStar => {
let low = pep440_rs::Version {
dev: Some(0),
..specifier.version().clone()
};
let mut high = pep440_rs::Version {
dev: Some(0),
..specifier.version().clone()
};
if let Some(post) = high.post {
high.post = Some(post + 1);
} else if let Some(pre) = high.pre {
high.pre = Some(match pre {
(pep440_rs::PreRelease::Rc, n) => (pep440_rs::PreRelease::Rc, n + 1),
(pep440_rs::PreRelease::Alpha, n) => (pep440_rs::PreRelease::Alpha, n + 1),
(pep440_rs::PreRelease::Beta, n) => (pep440_rs::PreRelease::Beta, n + 1),
});
} else {
*high.release.last_mut().unwrap() += 1;
}
vec![PubGrubVersion::from(low)..PubGrubVersion::from(high)]
}
Operator::NotEqualStar => {
let low = pep440_rs::Version {
dev: Some(0),
..specifier.version().clone()
};
let mut high = pep440_rs::Version {
dev: Some(0),
..specifier.version().clone()
};
if let Some(post) = high.post {
high.post = Some(post + 1);
} else if let Some(pre) = high.pre {
high.pre = Some(match pre {
(pep440_rs::PreRelease::Rc, n) => (pep440_rs::PreRelease::Rc, n + 1),
(pep440_rs::PreRelease::Alpha, n) => (pep440_rs::PreRelease::Alpha, n + 1),
(pep440_rs::PreRelease::Beta, n) => (pep440_rs::PreRelease::Beta, n + 1),
});
} else {
*high.release.last_mut().unwrap() += 1;
}
vec![
MIN_VERSION.clone()..PubGrubVersion::from(low),
PubGrubVersion::from(high)..MAX_VERSION.clone(),
]
}
};
Ok(Self(ranges))
}
}

View file

@ -0,0 +1,75 @@
use std::str::FromStr;
use once_cell::sync::Lazy;
/// A PubGrub-compatible wrapper around a PEP 440 version.
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct PubGrubVersion(pep440_rs::Version);
impl PubGrubVersion {
/// Returns `true` if this is a pre-release version.
pub fn is_prerelease(&self) -> bool {
self.0.pre.is_some() || self.0.dev.is_some()
}
/// Returns the smallest PEP 440 version that is larger than `self`.
pub fn next(&self) -> PubGrubVersion {
let mut next = self.clone();
if let Some(dev) = &mut next.0.dev {
*dev += 1;
} else if let Some(post) = &mut next.0.post {
*post += 1;
} else {
next.0.post = Some(0);
next.0.dev = Some(0);
}
next
}
}
impl std::fmt::Display for PubGrubVersion {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
self.0.fmt(f)
}
}
impl pubgrub::version::Version for PubGrubVersion {
fn lowest() -> Self {
MIN_VERSION.to_owned()
}
fn bump(&self) -> Self {
self.next()
}
}
impl<'a> From<&'a PubGrubVersion> for &'a pep440_rs::Version {
fn from(version: &'a PubGrubVersion) -> Self {
&version.0
}
}
impl From<pep440_rs::Version> for PubGrubVersion {
fn from(version: pep440_rs::Version) -> Self {
Self(version)
}
}
impl From<PubGrubVersion> for pep440_rs::Version {
fn from(version: PubGrubVersion) -> Self {
version.0
}
}
pub(crate) static MIN_VERSION: Lazy<PubGrubVersion> =
Lazy::new(|| PubGrubVersion::from(pep440_rs::Version::from_str("0a0.dev0").unwrap()));
pub(crate) static MAX_VERSION: Lazy<PubGrubVersion> = Lazy::new(|| {
PubGrubVersion(pep440_rs::Version {
epoch: usize::MAX,
release: vec![usize::MAX, usize::MAX, usize::MAX],
pre: None,
post: Some(usize::MAX),
dev: None,
local: None,
})
});

View file

@ -4,7 +4,6 @@ use std::io::Write;
use pep440_rs::Version;
use puffin_client::File;
use puffin_package::metadata::Metadata21;
use puffin_package::package_name::PackageName;
#[derive(Debug, Default)]
@ -48,25 +47,47 @@ impl Resolution {
impl std::fmt::Display for Resolution {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut first = true;
for (name, package) in self.iter() {
for (name, pin) in self.iter() {
if !first {
writeln!(f)?;
}
first = false;
write!(f, "{}=={}", name, package.version())?;
write!(f, "{}=={}", name, pin.version())?;
}
Ok(())
}
}
/// A package pinned at a specific version.
#[derive(Debug)]
pub struct PinnedPackage {
pub metadata: Metadata21,
pub file: File,
name: PackageName,
version: Version,
file: File,
}
impl PinnedPackage {
/// Initialize a new pinned package.
pub fn new(name: PackageName, version: Version, file: File) -> Self {
Self {
name,
version,
file,
}
}
/// Return the name of the pinned package.
pub fn name(&self) -> &PackageName {
&self.name
}
/// Return the version of the pinned package.
pub fn version(&self) -> &Version {
&self.metadata.version
&self.version
}
/// Return the file of the pinned package.
pub fn file(&self) -> &File {
&self.file
}
}

View file

@ -1,219 +1,518 @@
use std::collections::{BTreeMap, HashSet};
//! Given a set of requirements, find a set of compatible packages.
use std::borrow::Borrow;
use std::collections::{BTreeMap, HashMap, HashSet};
use std::str::FromStr;
use std::sync::Arc;
use anyhow::Result;
use bitflags::bitflags;
use futures::future::Either;
use futures::{StreamExt, TryFutureExt};
use tracing::debug;
use pubgrub::error::PubGrubError;
use pubgrub::range::Range;
use pubgrub::solver::{DependencyConstraints, Incompatibility, State};
use pubgrub::type_aliases::SelectedDependencies;
use tracing::{debug, trace};
use waitmap::WaitMap;
use pep440_rs::Version;
use pep508_rs::{MarkerEnvironment, Requirement};
use platform_tags::Tags;
use puffin_client::{File, PypiClient, SimpleJson};
use puffin_package::dist_info_name::DistInfoName;
use puffin_package::metadata::Metadata21;
use puffin_package::package_name::PackageName;
use wheel_filename::WheelFilename;
use crate::error::ResolveError;
use crate::pubgrub::iter_requirements;
use crate::pubgrub::package::PubGrubPackage;
use crate::pubgrub::version::{PubGrubVersion, MIN_VERSION};
use crate::resolution::{PinnedPackage, Resolution};
pub struct Resolver<'a> {
requirements: Vec<Requirement>,
markers: &'a MarkerEnvironment,
tags: &'a Tags,
client: &'a PypiClient,
reporter: Option<Box<dyn Reporter>>,
}
impl<'a> Resolver<'a> {
/// Initialize a new resolver.
pub fn new(markers: &'a MarkerEnvironment, tags: &'a Tags, client: &'a PypiClient) -> Self {
pub fn new(
requirements: Vec<Requirement>,
markers: &'a MarkerEnvironment,
tags: &'a Tags,
client: &'a PypiClient,
) -> Self {
Self {
requirements,
markers,
tags,
client,
reporter: None,
}
}
/// Set the [`Reporter`] to use for this resolver.
#[must_use]
pub fn with_reporter(self, reporter: impl Reporter + 'static) -> Self {
Self {
reporter: Some(Box::new(reporter)),
..self
}
}
/// Resolve a set of requirements into a set of pinned versions.
pub async fn resolve(
&self,
requirements: impl Iterator<Item = &Requirement>,
flags: ResolveFlags,
) -> Result<Resolution, ResolveError> {
pub async fn resolve(self) -> Result<Resolution, ResolveError> {
let client = Arc::new(self.client.clone());
let cache = Arc::new(SolverCache::default());
// A channel to fetch package metadata (e.g., given `flask`, fetch all versions) and version
// metadata (e.g., given `flask==1.0.0`, fetch the metadata for that version).
let (package_sink, package_stream) = futures::channel::mpsc::unbounded();
// Initialize the package stream.
let mut package_stream = package_stream
.map(|request: Request| match request {
Request::Package(requirement) => Either::Left(
self.client
// TODO(charlie): Remove this clone.
.simple(requirement.name.clone())
.map_ok(move |metadata| Response::Package(requirement, metadata)),
),
Request::Version(requirement, file) => Either::Right(
self.client
// TODO(charlie): Remove this clone.
.file(file.clone())
.map_ok(move |metadata| Response::Version(requirement, file, metadata)),
),
})
.buffer_unordered(32)
.ready_chunks(32);
// Push all the requirements into the package sink.
let mut in_flight: HashSet<PackageName> = HashSet::new();
for requirement in requirements {
debug!("Adding root dependency: {}", requirement);
package_sink.unbounded_send(Request::Package(requirement.clone()))?;
in_flight.insert(PackageName::normalize(&requirement.name));
}
if in_flight.is_empty() {
return Ok(Resolution::default());
}
// Resolve the requirements.
let mut resolution: BTreeMap<PackageName, PinnedPackage> = BTreeMap::new();
while let Some(chunk) = package_stream.next().await {
for result in chunk {
let result: Response = result?;
match result {
Response::Package(requirement, metadata) => {
// Pick a version that satisfies the requirement.
let Some(file) = metadata.files.iter().rev().find(|file| {
// We only support wheels for now.
let Ok(name) = WheelFilename::from_str(file.filename.as_str()) else {
return false;
};
let Ok(version) = Version::from_str(&name.version) else {
return false;
};
if !name.is_compatible(self.tags) {
return false;
}
requirement.is_satisfied_by(&version)
}) else {
return Err(ResolveError::NotFound(requirement));
};
package_sink.unbounded_send(Request::Version(requirement, file.clone()))?;
}
Response::Version(requirement, file, metadata) => {
debug!(
"Selecting: {}=={} ({})",
metadata.name, metadata.version, file.filename
);
let package = PinnedPackage {
metadata: metadata.clone(),
file,
};
if let Some(reporter) = self.reporter.as_ref() {
reporter.on_resolve_progress(&package);
let (request_sink, request_stream) = futures::channel::mpsc::unbounded();
tokio::spawn({
let cache = cache.clone();
let client = client.clone();
async move {
let mut response_stream = request_stream
.map({
|request: Request| match request {
Request::Package(package_name) => Either::Left(
client
// TODO(charlie): Remove this clone.
.simple(package_name.clone())
.map_ok(move |metadata| {
Response::Package(package_name, metadata)
}),
),
Request::Version(file) => Either::Right(
client
// TODO(charlie): Remove this clone.
.file(file.clone())
.map_ok(move |metadata| Response::Version(file, metadata)),
),
}
})
.buffer_unordered(32)
.ready_chunks(32);
// Add to the resolved set.
let normalized_name = PackageName::normalize(&requirement.name);
in_flight.remove(&normalized_name);
resolution.insert(normalized_name, package);
if !flags.intersects(ResolveFlags::NO_DEPS) {
// Enqueue its dependencies.
for dependency in metadata.requires_dist {
if !dependency.evaluate_markers(
self.markers,
requirement.extras.as_ref().map_or(&[], Vec::as_slice),
) {
debug!("Ignoring {dependency} due to environment mismatch");
continue;
}
let normalized_name = PackageName::normalize(&dependency.name);
if resolution.contains_key(&normalized_name) {
continue;
}
if !in_flight.insert(normalized_name) {
continue;
}
debug!("Adding transitive dependency: {}", dependency);
package_sink.unbounded_send(Request::Package(dependency))?;
if let Some(reporter) = self.reporter.as_ref() {
reporter.on_dependency_added();
}
while let Some(chunk) = response_stream.next().await {
for response in chunk {
match response? {
Response::Package(package_name, metadata) => {
trace!("Received package metadata for {}", package_name);
cache.packages.insert(package_name.clone(), metadata);
}
};
Response::Version(file, metadata) => {
trace!("Received file metadata for {}", file.filename);
cache.versions.insert(file.hashes.sha256.clone(), metadata);
}
}
}
}
}
if in_flight.is_empty() {
break;
Ok::<(), anyhow::Error>(())
}
});
// Push all the requirements into the package sink.
for requirement in &self.requirements {
debug!("Adding root dependency: {}", requirement);
let package_name = PackageName::normalize(&requirement.name);
request_sink.unbounded_send(Request::Package(package_name))?;
}
if let Some(reporter) = self.reporter.as_ref() {
reporter.on_resolve_complete();
// Track the packages and versions that we've requested.
let mut requested_packages = HashSet::new();
let mut requested_versions = HashSet::new();
let mut pins = HashMap::new();
let selected_dependencies = self
.solve(
&cache,
&mut pins,
&mut requested_packages,
&mut requested_versions,
&request_sink,
)
.await?;
// Map to our own internal resolution type.
let mut resolution = BTreeMap::new();
for (package, version) in selected_dependencies {
let PubGrubPackage::Package(package_name, None) = package else {
continue;
};
let version = pep440_rs::Version::from(version);
let file = pins
.get(&package_name)
.and_then(|versions| versions.get(&version))
.unwrap()
.clone();
let pinned_package = PinnedPackage::new(package_name.clone(), version, file);
resolution.insert(package_name, pinned_package);
}
Ok(Resolution::new(resolution))
}
/// Run the `PubGrub` solver.
async fn solve(
&self,
cache: &SolverCache,
pins: &mut HashMap<PackageName, HashMap<pep440_rs::Version, File>>,
requested_packages: &mut HashSet<PackageName>,
requested_versions: &mut HashSet<String>,
request_sink: &futures::channel::mpsc::UnboundedSender<Request>,
) -> Result<SelectedDependencies<PubGrubPackage, PubGrubVersion>, ResolveError> {
let root = PubGrubPackage::Root;
// Start the solve.
let mut state = State::init(root.clone(), MIN_VERSION.clone());
let mut added_dependencies: HashMap<PubGrubPackage, HashSet<PubGrubVersion>> =
HashMap::default();
let mut next = root;
loop {
// Run unit propagation.
state.unit_propagation(next)?;
// Fetch the list of candidates.
let Some(potential_packages) = state.partial_solution.potential_packages() else {
return state.partial_solution.extract_solution().ok_or_else(|| {
PubGrubError::Failure(
"How did we end up with no package to choose but no solution?".into(),
)
.into()
});
};
// Choose a package version.
let potential_packages = potential_packages.collect::<Vec<_>>();
let decision = self
.choose_package_version(
potential_packages,
cache,
pins,
requested_versions,
request_sink,
)
.await?;
next = decision.0.clone();
// Pick the next compatible version.
let version = match decision.1 {
None => {
let term_intersection = state
.partial_solution
.term_intersection_for_package(&next)
.expect("a package was chosen but we don't have a term.");
let inc = Incompatibility::no_versions(next.clone(), term_intersection.clone());
state.add_incompatibility(inc);
continue;
}
Some(version) => version,
};
if added_dependencies
.entry(next.clone())
.or_default()
.insert(version.clone())
{
// Retrieve that package dependencies.
let package = &next;
let dependencies = match self
.get_dependencies(
package,
&version,
cache,
pins,
requested_packages,
request_sink,
)
.await?
{
Dependencies::Unknown => {
state.add_incompatibility(Incompatibility::unavailable_dependencies(
package.clone(),
version.clone(),
));
continue;
}
Dependencies::Known(constraints) => {
if constraints.contains_key(package) {
return Err(PubGrubError::SelfDependency {
package: package.clone(),
version: version.clone(),
}
.into());
}
if let Some((dependent, _)) =
constraints.iter().find(|(_, r)| r == &&Range::none())
{
return Err(PubGrubError::DependencyOnTheEmptySet {
package: package.clone(),
version: version.clone(),
dependent: dependent.clone(),
}
.into());
}
constraints
}
};
// Add that package and version if the dependencies are not problematic.
let dep_incompats = state.add_incompatibility_from_dependencies(
package.clone(),
version.clone(),
&dependencies,
);
if state.incompatibility_store[dep_incompats.clone()]
.iter()
.any(|incompat| state.is_terminal(incompat))
{
// For a dependency incompatibility to be terminal,
// it can only mean that root depend on not root?
return Err(PubGrubError::Failure(
"Root package depends on itself at a different version?".into(),
)
.into());
}
state.partial_solution.add_version(
package.clone(),
version,
dep_incompats,
&state.incompatibility_store,
);
} else {
// `dep_incompats` are already in `incompatibilities` so we know there are not satisfied
// terms and can add the decision directly.
state.partial_solution.add_decision(next.clone(), version);
}
}
}
/// Given a set of candidate packages, choose the next package (and version) to add to the
/// partial solution.
async fn choose_package_version<T: Borrow<PubGrubPackage>, U: Borrow<Range<PubGrubVersion>>>(
&self,
mut potential_packages: Vec<(T, U)>,
cache: &SolverCache,
pins: &mut HashMap<PackageName, HashMap<pep440_rs::Version, File>>,
in_flight: &mut HashSet<String>,
request_sink: &futures::channel::mpsc::UnboundedSender<Request>,
) -> Result<(T, Option<PubGrubVersion>), ResolveError> {
let mut selection = 0usize;
// Iterate over the potential packages, and fetch file metadata for any of them. These
// represent our current best guesses for the versions that we might want to select.
for (index, (package, range)) in potential_packages.iter().enumerate() {
let PubGrubPackage::Package(package_name, _) = package.borrow() else {
continue;
};
// If we don't have metadata for this package, we can't make an early decision.
let Some(entry) = cache.packages.get(package_name) else {
continue;
};
// Find a compatible version.
let simple_json = entry.value();
let Some(file) = simple_json.files.iter().rev().find(|file| {
let Ok(name) = WheelFilename::from_str(file.filename.as_str()) else {
return false;
};
let Ok(version) = pep440_rs::Version::from_str(&name.version) else {
return false;
};
if !name.is_compatible(self.tags) {
return false;
}
if !range
.borrow()
.contains(&PubGrubVersion::from(version.clone()))
{
return false;
};
true
}) else {
continue;
};
// Emit a request to fetch the metadata for this version.
if in_flight.insert(file.hashes.sha256.clone()) {
request_sink.unbounded_send(Request::Version(file.clone()))?;
}
selection = index;
}
// TODO(charlie): This is really ugly, but we need to return `T`, not `&T` (and yet
// we also need to iterate over `potential_packages` multiple times, so we can't
// use `into_iter()`.)
let (package, range) = potential_packages.remove(selection);
return match package.borrow() {
PubGrubPackage::Root => Ok((package, Some(MIN_VERSION.clone()))),
PubGrubPackage::Package(package_name, _) => {
// Wait for the metadata to be available.
// TODO(charlie): Ideally, we'd choose the first package for which metadata is
// available.
let entry = cache.packages.wait(package_name).await.unwrap();
let simple_json = entry.value();
// Find a compatible version.
let name_version_file = simple_json.files.iter().rev().find_map(|file| {
let Ok(name) = WheelFilename::from_str(file.filename.as_str()) else {
return None;
};
let Ok(version) = pep440_rs::Version::from_str(&name.version) else {
return None;
};
if !name.is_compatible(self.tags) {
return None;
}
if !range
.borrow()
.contains(&PubGrubVersion::from(version.clone()))
{
return None;
};
Some((package_name.clone(), version.clone(), file.clone()))
});
if let Some((name, version, file)) = name_version_file {
debug!("Selecting: {}=={} ({})", name, version, file.filename);
// We want to return a package pinned to a specific version; but we _also_ want to
// store the exact file that we selected to satisfy that version.
pins.entry(name)
.or_default()
.insert(version.clone(), file.clone());
// Emit a request to fetch the metadata for this version.
if cache.versions.get(&file.hashes.sha256).is_none() {
if in_flight.insert(file.hashes.sha256.clone()) {
request_sink.unbounded_send(Request::Version(file.clone()))?;
}
}
Ok((package, Some(PubGrubVersion::from(version))))
} else {
// We have metadata for the package, but no compatible version.
Ok((package, None))
}
}
};
}
/// Given a candidate package and version, return its dependencies.
async fn get_dependencies(
&self,
package: &PubGrubPackage,
version: &PubGrubVersion,
cache: &SolverCache,
pins: &mut HashMap<PackageName, HashMap<pep440_rs::Version, File>>,
requested_packages: &mut HashSet<PackageName>,
request_sink: &futures::channel::mpsc::UnboundedSender<Request>,
) -> Result<Dependencies, ResolveError> {
match package {
PubGrubPackage::Root => {
let mut constraints = DependencyConstraints::default();
for (package, version) in
iter_requirements(self.requirements.iter(), None, self.markers)
{
constraints.insert(package, version);
}
Ok(Dependencies::Known(constraints))
}
PubGrubPackage::Package(package_name, extra) => {
debug!("Fetching dependencies for {}[{:?}]", package_name, extra);
// Wait for the metadata to be available.
let versions = pins.get(package_name).unwrap();
let file = versions.get(version.into()).unwrap();
let entry = cache.versions.wait(&file.hashes.sha256).await.unwrap();
let metadata = entry.value();
let mut constraints = DependencyConstraints::default();
for (package, version) in
iter_requirements(metadata.requires_dist.iter(), extra.as_ref(), self.markers)
{
debug!("Adding transitive dependency: {package} {version}");
// Emit a request to fetch the metadata for this package.
if let PubGrubPackage::Package(package_name, None) = &package {
if requested_packages.insert(package_name.clone()) {
request_sink.unbounded_send(Request::Package(package_name.clone()))?;
}
}
// Add it to the constraints.
constraints.insert(package, version);
}
if let Some(extra) = extra {
if !metadata
.provides_extras
.iter()
.any(|provided_extra| DistInfoName::normalize(provided_extra) == *extra)
{
return Ok(Dependencies::Unknown);
}
constraints.insert(
PubGrubPackage::Package(package_name.clone(), None),
Range::exact(version.clone()),
);
}
Ok(Dependencies::Known(constraints))
}
}
}
}
#[derive(Debug)]
enum Request {
/// A request to fetch the metadata for a package.
Package(Requirement),
Package(PackageName),
/// A request to fetch the metadata for a specific version of a package.
Version(Requirement, File),
Version(File),
}
#[derive(Debug)]
enum Response {
/// The returned metadata for a package.
Package(Requirement, SimpleJson),
Package(PackageName, SimpleJson),
/// The returned metadata for a specific version of a package.
Version(Requirement, File, Metadata21),
Version(File, Metadata21),
}
pub trait Reporter: Send + Sync {
/// Callback to invoke when a dependency is added to the resolution.
fn on_dependency_added(&self);
struct SolverCache {
/// A map from package name to the metadata for that package.
packages: WaitMap<PackageName, SimpleJson>,
/// Callback to invoke when a dependency is resolved.
fn on_resolve_progress(&self, package: &PinnedPackage);
/// Callback to invoke when the resolution is complete.
fn on_resolve_complete(&self);
/// A map from wheel SHA to the metadata for that wheel.
versions: WaitMap<String, Metadata21>,
}
bitflags! {
#[derive(Debug, Copy, Clone, Default)]
pub struct ResolveFlags: u8 {
/// Don't install package dependencies.
const NO_DEPS = 1 << 0;
impl Default for SolverCache {
fn default() -> Self {
Self {
packages: WaitMap::new(),
versions: WaitMap::new(),
}
}
}
/// An enum used by [`DependencyProvider`] that holds information about package dependencies.
/// For each [Package] there is a [Range] of concrete versions it allows as a dependency.
#[derive(Clone)]
enum Dependencies {
/// Package dependencies are unavailable.
Unknown,
/// Container for all available package versions.
Known(DependencyConstraints<PubGrubPackage, PubGrubVersion>),
}

View file

@ -0,0 +1,171 @@
//! Given a set of selected packages, find a compatible set of wheels to install.
//!
//! This is similar to running `pip install` with the `--no-deps` flag.
use std::collections::BTreeMap;
use std::str::FromStr;
use anyhow::Result;
use futures::future::Either;
use futures::{StreamExt, TryFutureExt};
use tracing::debug;
use pep440_rs::Version;
use pep508_rs::Requirement;
use platform_tags::Tags;
use puffin_client::{File, PypiClient, SimpleJson};
use puffin_package::metadata::Metadata21;
use puffin_package::package_name::PackageName;
use wheel_filename::WheelFilename;
use crate::error::ResolveError;
use crate::resolution::{PinnedPackage, Resolution};
pub struct WheelFinder<'a> {
tags: &'a Tags,
client: &'a PypiClient,
reporter: Option<Box<dyn Reporter>>,
}
impl<'a> WheelFinder<'a> {
/// Initialize a new wheel finder.
pub fn new(tags: &'a Tags, client: &'a PypiClient) -> Self {
Self {
tags,
client,
reporter: None,
}
}
/// Set the [`Reporter`] to use for this resolution.
#[must_use]
pub fn with_reporter(self, reporter: impl Reporter + 'static) -> Self {
Self {
reporter: Some(Box::new(reporter)),
..self
}
}
/// Resolve a set of pinned packages into a set of wheels.
pub async fn resolve(&self, requirements: &[Requirement]) -> Result<Resolution, ResolveError> {
if requirements.is_empty() {
return Ok(Resolution::default());
}
// A channel to fetch package metadata (e.g., given `flask`, fetch all versions) and version
// metadata (e.g., given `flask==1.0.0`, fetch the metadata for that version).
let (package_sink, package_stream) = futures::channel::mpsc::unbounded();
// Initialize the package stream.
let mut package_stream = package_stream
.map(|request: Request| match request {
Request::Package(requirement) => Either::Left(
self.client
// TODO(charlie): Remove this clone.
.simple(requirement.name.clone())
.map_ok(move |metadata| Response::Package(requirement, metadata)),
),
Request::Version(requirement, file) => Either::Right(
self.client
// TODO(charlie): Remove this clone.
.file(file.clone())
.map_ok(move |metadata| Response::Version(requirement, file, metadata)),
),
})
.buffer_unordered(32)
.ready_chunks(32);
// Push all the requirements into the package sink.
for requirement in requirements {
package_sink.unbounded_send(Request::Package(requirement.clone()))?;
}
// Resolve the requirements.
let mut resolution: BTreeMap<PackageName, PinnedPackage> = BTreeMap::new();
while let Some(chunk) = package_stream.next().await {
for result in chunk {
let result: Response = result?;
match result {
Response::Package(requirement, metadata) => {
// Pick a version that satisfies the requirement.
let Some(file) = metadata.files.iter().rev().find(|file| {
// We only support wheels for now.
let Ok(name) = WheelFilename::from_str(file.filename.as_str()) else {
return false;
};
let Ok(version) = Version::from_str(&name.version) else {
return false;
};
if !name.is_compatible(self.tags) {
return false;
}
requirement.is_satisfied_by(&version)
}) else {
return Err(ResolveError::NotFound(requirement));
};
package_sink.unbounded_send(Request::Version(requirement, file.clone()))?;
}
Response::Version(requirement, file, metadata) => {
debug!(
"Selecting: {}=={} ({})",
metadata.name, metadata.version, file.filename
);
let package = PinnedPackage::new(
PackageName::normalize(&metadata.name),
metadata.version,
file,
);
if let Some(reporter) = self.reporter.as_ref() {
reporter.on_progress(&package);
}
// Add to the resolved set.
let normalized_name = PackageName::normalize(&requirement.name);
resolution.insert(normalized_name, package);
}
}
}
if resolution.len() == requirements.len() {
break;
}
}
if let Some(reporter) = self.reporter.as_ref() {
reporter.on_complete();
}
Ok(Resolution::new(resolution))
}
}
#[derive(Debug)]
enum Request {
/// A request to fetch the metadata for a package.
Package(Requirement),
/// A request to fetch the metadata for a specific version of a package.
Version(Requirement, File),
}
#[derive(Debug)]
enum Response {
/// The returned metadata for a package.
Package(Requirement, SimpleJson),
/// The returned metadata for a specific version of a package.
Version(Requirement, File, Metadata21),
}
pub trait Reporter: Send + Sync {
/// Callback to invoke when a package is resolved to a wheel.
fn on_progress(&self, package: &PinnedPackage);
/// Callback to invoke when the resolution is complete.
fn on_complete(&self);
}

View file

@ -10,19 +10,15 @@ use pep508_rs::{MarkerEnvironment, Requirement, StringVersion};
use platform_host::{Arch, Os, Platform};
use platform_tags::Tags;
use puffin_client::PypiClientBuilder;
use puffin_resolver::{ResolveFlags, Resolver};
use puffin_resolver::Resolver;
#[tokio::test]
async fn black() -> Result<()> {
let client = PypiClientBuilder::default().build();
let resolver = Resolver::new(&MARKERS_311, &TAGS_311, &client);
let resolution = resolver
.resolve(
[Requirement::from_str("black<=23.9.1").unwrap()].iter(),
ResolveFlags::default(),
)
.await?;
let requirements = vec![Requirement::from_str("black<=23.9.1").unwrap()];
let resolver = Resolver::new(requirements, &MARKERS_311, &TAGS_311, &client);
let resolution = resolver.resolve().await?;
assert_eq!(
format!("{resolution}"),
@ -44,13 +40,9 @@ async fn black() -> Result<()> {
async fn black_colorama() -> Result<()> {
let client = PypiClientBuilder::default().build();
let resolver = Resolver::new(&MARKERS_311, &TAGS_311, &client);
let resolution = resolver
.resolve(
[Requirement::from_str("black[colorama]<=23.9.1").unwrap()].iter(),
ResolveFlags::default(),
)
.await?;
let requirements = vec![Requirement::from_str("black[colorama]<=23.9.1").unwrap()];
let resolver = Resolver::new(requirements, &MARKERS_311, &TAGS_311, &client);
let resolution = resolver.resolve().await?;
assert_eq!(
format!("{resolution}"),
@ -73,13 +65,9 @@ async fn black_colorama() -> Result<()> {
async fn black_python_310() -> Result<()> {
let client = PypiClientBuilder::default().build();
let resolver = Resolver::new(&MARKERS_310, &TAGS_310, &client);
let resolution = resolver
.resolve(
[Requirement::from_str("black<=23.9.1").unwrap()].iter(),
ResolveFlags::default(),
)
.await?;
let requirements = vec![Requirement::from_str("black<=23.9.1").unwrap()];
let resolver = Resolver::new(requirements, &MARKERS_310, &TAGS_310, &client);
let resolution = resolver.resolve().await?;
assert_eq!(
format!("{resolution}"),
@ -99,6 +87,36 @@ async fn black_python_310() -> Result<()> {
Ok(())
}
#[tokio::test]
async fn htmldate() -> Result<()> {
let client = PypiClientBuilder::default().build();
let requirements = vec![Requirement::from_str("htmldate<=1.5.0").unwrap()];
let resolver = Resolver::new(requirements, &MARKERS_311, &TAGS_311, &client);
let resolution = resolver.resolve().await?;
// Resolves to `htmldate==1.4.3` (rather than `htmldate==1.5.2`) because `htmldate==1.5.2` has
// a dependency on `lxml` versions that don't provide universal wheels.
assert_eq!(
format!("{resolution}"),
[
"charset-normalizer==3.3.0",
"dateparser==1.1.8",
"htmldate==1.4.3",
"lxml==4.9.3",
"python-dateutil==2.8.2",
"pytz==2023.3.post1",
"regex==2023.10.3",
"six==1.16.0",
"tzlocal==5.1",
"urllib3==2.0.6"
]
.join("\n")
);
Ok(())
}
static MARKERS_311: Lazy<MarkerEnvironment> = Lazy::new(|| {
MarkerEnvironment {
implementation_name: "cpython".to_string(),

View file

@ -16,12 +16,12 @@ TARGET=${1}
# Resolution with a cold cache.
###
hyperfine --runs 20 --warmup 3 --prepare "rm -f /tmp/requirements.txt" \
"./target/release/puffin-cli compile ${TARGET} --no-cache > /tmp/requirements.txt" \
"pip-compile ${TARGET} --rebuild --pip-args '--no-cache-dir' -o /tmp/requirements.txt"
"./target/release/puffin --no-cache compile ${TARGET} > /tmp/requirements.txt" \
"./target/release/main --no-cache compile ${TARGET} > /tmp/requirements.txt"
###
# Resolution with a warm cache.
###
hyperfine --runs 20 --warmup 3 --prepare "rm -f /tmp/requirements.txt" \
"./target/release/puffin-cli compile ${TARGET} > /tmp/requirements.txt" \
"pip-compile ${TARGET} -o /tmp/requirements.txt"
"./target/release/puffin compile ${TARGET} > /tmp/requirements.txt" \
"./target/release/main compile ${TARGET} > /tmp/requirements.txt"

170
vendor/pubgrub/CHANGELOG.md vendored Normal file
View file

@ -0,0 +1,170 @@
# Changelog
All notable changes to this project will be documented in this file.
## Unreleased [(diff)][unreleased-diff]
## [0.2.1] - 2021-06-30 - [(diff with 0.2.0)][0.2.0-diff]
This release is focused on performance improvements and code readability, without any change to the public API.
The code tends to be simpler around tricky parts of the algorithm such as conflict resolution.
Some data structures have been rewritten (with no unsafe) to lower memory usage.
Depending on scenarios, version 0.2.1 is 3 to 8 times faster than 0.2.0.
As an example, solving all elm package versions existing went from 580ms to 175ms on my laptop.
While solving a specific subset of packages from crates.io went from 2.5s to 320ms on my laptop.
Below are listed all the important changes in the internal parts of the API.
#### Added
- New `SmallVec` data structure (with no unsafe) using fixed size arrays for up to 2 entries.
- New `SmallMap` data structure (with no unsafe) using fixed size arrays for up to 2 entries.
- New `Arena` data structure (with no unsafe) backed by a `Vec` and indexed with `Id<T>` where `T` is phantom data.
#### Changed
- Updated the `large_case` benchmark to run with both u16 and string package identifiers in registries.
- Use the new `Arena` for the incompatibility store, and use its `Id<T>` identifiers to reference incompatibilities instead of full owned copies in the `incompatibilities` field of the solver `State`.
- Save satisfier indices of each package involved in an incompatibility when looking for its satisfier. This speeds up the search for the previous satisfier.
- Early unit propagation loop restart at the first conflict found instead of continuing evaluation for the current package.
- Index incompatibilities by package in a hash map instead of using a vec.
- Keep track of already contradicted incompatibilities in a `Set` until the next backtrack to speed up unit propagation.
- Unify `history` and `memory` in `partial_solution` under a unique hash map indexed by packages. This should speed up access to relevan terms in conflict resolution.
## [0.2.0] - 2020-11-19 - [(diff with 0.1.0)][0.1.0-diff]
This release brings many important improvements to PubGrub.
The gist of it is:
- A bug in the algorithm's implementation was [fixed](https://github.com/pubgrub-rs/pubgrub/pull/23).
- The solver is now implemented in a `resolve` function taking as argument
an implementer of the `DependencyProvider` trait,
which has more control over the decision making process.
- End-to-end property testing of large synthetic registries was added.
- More than 10x performance improvement.
### Changes affecting the public API
#### Added
- Links to code items in the code documenation.
- New `"serde"` feature that allows serializing some library types, useful for making simple reproducible bug reports.
- New variants for `error::PubGrubError` which are `DependencyOnTheEmptySet`,
`SelfDependency`, `ErrorChoosingPackageVersion` and `ErrorInShouldCancel`.
- New `type_alias::Map` defined as `rustc_hash::FxHashMap`.
- New `type_alias::SelectedDependencies<P, V>` defined as `Map<P, V>`.
- The types `Dependencies` and `DependencyConstraints` were introduced to clarify intent.
- New function `choose_package_with_fewest_versions` to help implement
the `choose_package_version` method of a `DependencyProvider`.
- Implement `FromStr` for `SemanticVersion`.
- Add the `VersionParseError` type for parsing of semantic versions.
#### Changed
- The `Solver` trait was replaced by a `DependencyProvider` trait
which now must implement a `choose_package_version` method
instead of `list_available_versions`.
So it now has the ability to choose a package in addition to a version.
The `DependencyProvider` also has a new optional method `should_cancel`
that may be used to stop the solver if needed.
- The `choose_package_version` and `get_dependencies` methods of the
`DependencyProvider` trait now take an immutable reference to `self`.
Interior mutability can be used by implementor if mutability is needed.
- The `Solver.run` method was thus replaced by a free function `solver::resolve`
taking a dependency provider as first argument.
- The `OfflineSolver` is thus replaced by an `OfflineDependencyProvider`.
- `SemanticVersion` now takes `u32` instead of `usize` for its 3 parts.
- `NumberVersion` now uses `u32` instead of `usize`.
#### Removed
- `ErrorRetrievingVersions` variant of `error::PubGrubError`.
### Changes in the internal parts of the API
#### Added
- `benches/large_case.rs` enables benchmarking of serialized registries of packages.
- `examples/caching_dependency_provider.rs` an example dependency provider caching dependencies.
- `PackageTerm<P, V> = (P, Term<V>)` new type alias for readability.
- `Memory.term_intersection_for_package(&mut self, package: &P) -> Option<&Term<V>>`
- New types were introduces for conflict resolution in `internal::partial_solution`
to clarify the intent and return values of some functions.
Those types are `DatedAssignment` and `SatisfierAndPreviousHistory`.
- `PartialSolution.term_intersection_for_package` calling the same function
from its `memory`.
- New property tests for ranges: `negate_contains_opposite`, `intesection_contains_both`
and `union_contains_either`.
- A large synthetic test case was added in `test-examples/`.
- A new test example `double_choices` was added
for the detection of a bug (fixed) in the implementation.
- Property testing of big synthetic datasets was added in `tests/proptest.rs`.
- Comparison of PubGrub solver and a SAT solver
was added with `tests/sat_dependency_provider.rs`.
- Other regression and unit tests were added to `tests/tests.rs`.
#### Changed
- CI workflow was improved (`./github/workflows/`), including a check for
[Conventional Commits](https://www.conventionalcommits.org/en/v1.0.0/) and
[Clippy](https://github.com/rust-lang/rust-clippy) for source code linting.
- Using SPDX license identifiers instead of MPL-2.0 classic file headers.
- `State.incompatibilities` is now wrapped inside a `Rc`.
- `DecisionLevel(u32)` is used in place of `usize` for partial solution decision levels.
- `State.conflict_resolution` now also returns the almost satisfied package
to avoid an unnecessary call to `self.partial_solution.relation(...)` after conflict resolution.
- `Kind::NoVersion` renamed to `Kind::NoVersions` and all other usage of `noversion`
has been changed to `no_versions`.
- Variants of the `incompatibility::Relation` enum have changed.
- Incompatibility now uses a deterministic hasher to store packages in its hash map.
- `incompatibility.relation(...)` now takes a function as argument to avoid computations
of unnecessary terms intersections.
- `Memory` now uses a deterministic hasher instead of the default one.
- `memory::PackageAssignments` is now an enum instead of a struct.
- Derivations in a `PackageAssignments` keep a precomputed intersection of derivation terms.
- `potential_packages` method now returns a `Range`
instead of a `Term` for the versions constraint of each package.
- `PartialSolution.relation` now takes `&mut self` instead of `&self`
to be able to store computation of terms intersection.
- `Term.accept_version` was renamed `Term.contains`.
- The `satisfied_by` and `contradicted_by` methods of a `Term`
now directly takes a reference to the intersection of other terms.
Same for `relation_with`.
#### Removed
- `term` field of an `Assignment::Derivation` variant.
- `Memory.all_terms` method was removed.
- `Memory.remove_decision` method was removed in favor of a check before using `Memory.add_decision`.
- `PartialSolution` methods `pick_package` and `pick_version` have been removed
since control was given back to the dependency provider to choose a package version.
- `PartialSolution` methods `remove_last_decision` and `satisfies_any_of` were removed
in favor of a preventive check before calling `add_decision`.
- `Term.is_negative`.
#### Fixed
- Prior cause computation (`incompatibility::prior_cause`) now uses the intersection of package terms
instead of their union, which was an implementation error.
## [0.1.0] - 2020-10-01
### Added
- `README.md` as the home page of this repository.
- `LICENSE`, code is provided under the MPL 2.0 license.
- `Cargo.toml` configuration of this Rust project.
- `src/` containing all the source code for this first implementation of PubGrub in Rust.
- `tests/` containing test end-to-end examples.
- `examples/` other examples, not in the form of tests.
- `.gitignore` configured for a Rust project.
- `.github/workflows/` CI to automatically build, test and document on push and pull requests.
[0.2.1]: https://github.com/pubgrub-rs/pubgrub/releases/tag/v0.2.1
[0.2.0]: https://github.com/pubgrub-rs/pubgrub/releases/tag/v0.2.0
[0.1.0]: https://github.com/pubgrub-rs/pubgrub/releases/tag/v0.1.0
[unreleased-diff]: https://github.com/pubgrub-rs/pubgrub/compare/release...dev
[0.2.0-diff]: https://github.com/pubgrub-rs/pubgrub/compare/v0.2.0...v0.2.1
[0.1.0-diff]: https://github.com/pubgrub-rs/pubgrub/compare/v0.1.0...v0.2.0

36
vendor/pubgrub/Cargo.toml vendored Normal file
View file

@ -0,0 +1,36 @@
# SPDX-License-Identifier: MPL-2.0
[package]
name = "pubgrub"
version = "0.2.1"
authors = [
"Matthieu Pizenberg <matthieu.pizenberg@gmail.com>",
"Alex Tokarev <aleksator@gmail.com>",
"Jacob Finkelman <Eh2406@wayne.edu>",
]
edition = "2018"
description = "PubGrub version solving algorithm"
readme = "README.md"
repository = "https://github.com/pubgrub-rs/pubgrub"
license = "MPL-2.0"
keywords = ["dependency", "pubgrub", "semver", "solver", "version"]
categories = ["algorithms"]
include = ["Cargo.toml", "LICENSE", "README.md", "src/**", "tests/**", "examples/**", "benches/**"]
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
thiserror = "1.0"
rustc-hash = "1.1.0"
serde = { version = "1.0", features = ["derive"], optional = true }
[dev-dependencies]
proptest = "0.10.1"
ron = "0.6"
varisat = "0.2.2"
criterion = "0.3"
[[bench]]
name = "large_case"
harness = false
required-features = ["serde"]

373
vendor/pubgrub/LICENSE vendored Normal file
View file

@ -0,0 +1,373 @@
Mozilla Public License Version 2.0
==================================
1. Definitions
--------------
1.1. "Contributor"
means each individual or legal entity that creates, contributes to
the creation of, or owns Covered Software.
1.2. "Contributor Version"
means the combination of the Contributions of others (if any) used
by a Contributor and that particular Contributor's Contribution.
1.3. "Contribution"
means Covered Software of a particular Contributor.
1.4. "Covered Software"
means Source Code Form to which the initial Contributor has attached
the notice in Exhibit A, the Executable Form of such Source Code
Form, and Modifications of such Source Code Form, in each case
including portions thereof.
1.5. "Incompatible With Secondary Licenses"
means
(a) that the initial Contributor has attached the notice described
in Exhibit B to the Covered Software; or
(b) that the Covered Software was made available under the terms of
version 1.1 or earlier of the License, but not also under the
terms of a Secondary License.
1.6. "Executable Form"
means any form of the work other than Source Code Form.
1.7. "Larger Work"
means a work that combines Covered Software with other material, in
a separate file or files, that is not Covered Software.
1.8. "License"
means this document.
1.9. "Licensable"
means having the right to grant, to the maximum extent possible,
whether at the time of the initial grant or subsequently, any and
all of the rights conveyed by this License.
1.10. "Modifications"
means any of the following:
(a) any file in Source Code Form that results from an addition to,
deletion from, or modification of the contents of Covered
Software; or
(b) any new file in Source Code Form that contains any Covered
Software.
1.11. "Patent Claims" of a Contributor
means any patent claim(s), including without limitation, method,
process, and apparatus claims, in any patent Licensable by such
Contributor that would be infringed, but for the grant of the
License, by the making, using, selling, offering for sale, having
made, import, or transfer of either its Contributions or its
Contributor Version.
1.12. "Secondary License"
means either the GNU General Public License, Version 2.0, the GNU
Lesser General Public License, Version 2.1, the GNU Affero General
Public License, Version 3.0, or any later versions of those
licenses.
1.13. "Source Code Form"
means the form of the work preferred for making modifications.
1.14. "You" (or "Your")
means an individual or a legal entity exercising rights under this
License. For legal entities, "You" includes any entity that
controls, is controlled by, or is under common control with You. For
purposes of this definition, "control" means (a) the power, direct
or indirect, to cause the direction or management of such entity,
whether by contract or otherwise, or (b) ownership of more than
fifty percent (50%) of the outstanding shares or beneficial
ownership of such entity.
2. License Grants and Conditions
--------------------------------
2.1. Grants
Each Contributor hereby grants You a world-wide, royalty-free,
non-exclusive license:
(a) under intellectual property rights (other than patent or trademark)
Licensable by such Contributor to use, reproduce, make available,
modify, display, perform, distribute, and otherwise exploit its
Contributions, either on an unmodified basis, with Modifications, or
as part of a Larger Work; and
(b) under Patent Claims of such Contributor to make, use, sell, offer
for sale, have made, import, and otherwise transfer either its
Contributions or its Contributor Version.
2.2. Effective Date
The licenses granted in Section 2.1 with respect to any Contribution
become effective for each Contribution on the date the Contributor first
distributes such Contribution.
2.3. Limitations on Grant Scope
The licenses granted in this Section 2 are the only rights granted under
this License. No additional rights or licenses will be implied from the
distribution or licensing of Covered Software under this License.
Notwithstanding Section 2.1(b) above, no patent license is granted by a
Contributor:
(a) for any code that a Contributor has removed from Covered Software;
or
(b) for infringements caused by: (i) Your and any other third party's
modifications of Covered Software, or (ii) the combination of its
Contributions with other software (except as part of its Contributor
Version); or
(c) under Patent Claims infringed by Covered Software in the absence of
its Contributions.
This License does not grant any rights in the trademarks, service marks,
or logos of any Contributor (except as may be necessary to comply with
the notice requirements in Section 3.4).
2.4. Subsequent Licenses
No Contributor makes additional grants as a result of Your choice to
distribute the Covered Software under a subsequent version of this
License (see Section 10.2) or under the terms of a Secondary License (if
permitted under the terms of Section 3.3).
2.5. Representation
Each Contributor represents that the Contributor believes its
Contributions are its original creation(s) or it has sufficient rights
to grant the rights to its Contributions conveyed by this License.
2.6. Fair Use
This License is not intended to limit any rights You have under
applicable copyright doctrines of fair use, fair dealing, or other
equivalents.
2.7. Conditions
Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted
in Section 2.1.
3. Responsibilities
-------------------
3.1. Distribution of Source Form
All distribution of Covered Software in Source Code Form, including any
Modifications that You create or to which You contribute, must be under
the terms of this License. You must inform recipients that the Source
Code Form of the Covered Software is governed by the terms of this
License, and how they can obtain a copy of this License. You may not
attempt to alter or restrict the recipients' rights in the Source Code
Form.
3.2. Distribution of Executable Form
If You distribute Covered Software in Executable Form then:
(a) such Covered Software must also be made available in Source Code
Form, as described in Section 3.1, and You must inform recipients of
the Executable Form how they can obtain a copy of such Source Code
Form by reasonable means in a timely manner, at a charge no more
than the cost of distribution to the recipient; and
(b) You may distribute such Executable Form under the terms of this
License, or sublicense it under different terms, provided that the
license for the Executable Form does not attempt to limit or alter
the recipients' rights in the Source Code Form under this License.
3.3. Distribution of a Larger Work
You may create and distribute a Larger Work under terms of Your choice,
provided that You also comply with the requirements of this License for
the Covered Software. If the Larger Work is a combination of Covered
Software with a work governed by one or more Secondary Licenses, and the
Covered Software is not Incompatible With Secondary Licenses, this
License permits You to additionally distribute such Covered Software
under the terms of such Secondary License(s), so that the recipient of
the Larger Work may, at their option, further distribute the Covered
Software under the terms of either this License or such Secondary
License(s).
3.4. Notices
You may not remove or alter the substance of any license notices
(including copyright notices, patent notices, disclaimers of warranty,
or limitations of liability) contained within the Source Code Form of
the Covered Software, except that You may alter any license notices to
the extent required to remedy known factual inaccuracies.
3.5. Application of Additional Terms
You may choose to offer, and to charge a fee for, warranty, support,
indemnity or liability obligations to one or more recipients of Covered
Software. However, You may do so only on Your own behalf, and not on
behalf of any Contributor. You must make it absolutely clear that any
such warranty, support, indemnity, or liability obligation is offered by
You alone, and You hereby agree to indemnify every Contributor for any
liability incurred by such Contributor as a result of warranty, support,
indemnity or liability terms You offer. You may include additional
disclaimers of warranty and limitations of liability specific to any
jurisdiction.
4. Inability to Comply Due to Statute or Regulation
---------------------------------------------------
If it is impossible for You to comply with any of the terms of this
License with respect to some or all of the Covered Software due to
statute, judicial order, or regulation then You must: (a) comply with
the terms of this License to the maximum extent possible; and (b)
describe the limitations and the code they affect. Such description must
be placed in a text file included with all distributions of the Covered
Software under this License. Except to the extent prohibited by statute
or regulation, such description must be sufficiently detailed for a
recipient of ordinary skill to be able to understand it.
5. Termination
--------------
5.1. The rights granted under this License will terminate automatically
if You fail to comply with any of its terms. However, if You become
compliant, then the rights granted under this License from a particular
Contributor are reinstated (a) provisionally, unless and until such
Contributor explicitly and finally terminates Your grants, and (b) on an
ongoing basis, if such Contributor fails to notify You of the
non-compliance by some reasonable means prior to 60 days after You have
come back into compliance. Moreover, Your grants from a particular
Contributor are reinstated on an ongoing basis if such Contributor
notifies You of the non-compliance by some reasonable means, this is the
first time You have received notice of non-compliance with this License
from such Contributor, and You become compliant prior to 30 days after
Your receipt of the notice.
5.2. If You initiate litigation against any entity by asserting a patent
infringement claim (excluding declaratory judgment actions,
counter-claims, and cross-claims) alleging that a Contributor Version
directly or indirectly infringes any patent, then the rights granted to
You by any and all Contributors for the Covered Software under Section
2.1 of this License shall terminate.
5.3. In the event of termination under Sections 5.1 or 5.2 above, all
end user license agreements (excluding distributors and resellers) which
have been validly granted by You or Your distributors under this License
prior to termination shall survive termination.
************************************************************************
* *
* 6. Disclaimer of Warranty *
* ------------------------- *
* *
* Covered Software is provided under this License on an "as is" *
* basis, without warranty of any kind, either expressed, implied, or *
* statutory, including, without limitation, warranties that the *
* Covered Software is free of defects, merchantable, fit for a *
* particular purpose or non-infringing. The entire risk as to the *
* quality and performance of the Covered Software is with You. *
* Should any Covered Software prove defective in any respect, You *
* (not any Contributor) assume the cost of any necessary servicing, *
* repair, or correction. This disclaimer of warranty constitutes an *
* essential part of this License. No use of any Covered Software is *
* authorized under this License except under this disclaimer. *
* *
************************************************************************
************************************************************************
* *
* 7. Limitation of Liability *
* -------------------------- *
* *
* Under no circumstances and under no legal theory, whether tort *
* (including negligence), contract, or otherwise, shall any *
* Contributor, or anyone who distributes Covered Software as *
* permitted above, be liable to You for any direct, indirect, *
* special, incidental, or consequential damages of any character *
* including, without limitation, damages for lost profits, loss of *
* goodwill, work stoppage, computer failure or malfunction, or any *
* and all other commercial damages or losses, even if such party *
* shall have been informed of the possibility of such damages. This *
* limitation of liability shall not apply to liability for death or *
* personal injury resulting from such party's negligence to the *
* extent applicable law prohibits such limitation. Some *
* jurisdictions do not allow the exclusion or limitation of *
* incidental or consequential damages, so this exclusion and *
* limitation may not apply to You. *
* *
************************************************************************
8. Litigation
-------------
Any litigation relating to this License may be brought only in the
courts of a jurisdiction where the defendant maintains its principal
place of business and such litigation shall be governed by laws of that
jurisdiction, without reference to its conflict-of-law provisions.
Nothing in this Section shall prevent a party's ability to bring
cross-claims or counter-claims.
9. Miscellaneous
----------------
This License represents the complete agreement concerning the subject
matter hereof. If any provision of this License is held to be
unenforceable, such provision shall be reformed only to the extent
necessary to make it enforceable. Any law or regulation which provides
that the language of a contract shall be construed against the drafter
shall not be used to construe this License against a Contributor.
10. Versions of the License
---------------------------
10.1. New Versions
Mozilla Foundation is the license steward. Except as provided in Section
10.3, no one other than the license steward has the right to modify or
publish new versions of this License. Each version will be given a
distinguishing version number.
10.2. Effect of New Versions
You may distribute the Covered Software under the terms of the version
of the License under which You originally received the Covered Software,
or under the terms of any subsequent version published by the license
steward.
10.3. Modified Versions
If you create software not governed by this License, and you want to
create a new license for such software, you may create and use a
modified version of this License if you rename the license and remove
any references to the name of the license steward (except to note that
such modified license differs from this License).
10.4. Distributing Source Code Form that is Incompatible With Secondary
Licenses
If You choose to distribute Source Code Form that is Incompatible With
Secondary Licenses under the terms of this version of the License, the
notice described in Exhibit B of this License must be attached.
Exhibit A - Source Code Form License Notice
-------------------------------------------
This Source Code Form is subject to the terms of the Mozilla Public
License, v. 2.0. If a copy of the MPL was not distributed with this
file, You can obtain one at http://mozilla.org/MPL/2.0/.
If it is not possible or desirable to put the notice in a particular
file, then You may include the notice in a location (such as a LICENSE
file in a relevant directory) where a recipient would be likely to look
for such a notice.
You may add additional accurate notices of copyright ownership.
Exhibit B - "Incompatible With Secondary Licenses" Notice
---------------------------------------------------------
This Source Code Form is "Incompatible With Secondary Licenses", as
defined by the Mozilla Public License, v. 2.0.

83
vendor/pubgrub/README.md vendored Normal file
View file

@ -0,0 +1,83 @@
# PubGrub version solving algorithm
![license](https://img.shields.io/crates/l/pubgrub.svg)
[![crates.io](https://img.shields.io/crates/v/pubgrub.svg?logo=rust)][crates]
[![docs.rs](https://img.shields.io/badge/docs.rs-pubgrub-yellow)][docs]
[![guide](https://img.shields.io/badge/guide-pubgrub-pink?logo=read-the-docs)][guide]
Version solving consists in efficiently finding a set of packages and versions
that satisfy all the constraints of a given project dependencies.
In addition, when that is not possible,
PubGrub tries to provide a very human-readable and clear
explanation as to why that failed.
The [introductory blog post about PubGrub][medium-pubgrub] presents
one such example of failure explanation:
```txt
Because dropdown >=2.0.0 depends on icons >=2.0.0 and
root depends on icons <2.0.0, dropdown >=2.0.0 is forbidden.
And because menu >=1.1.0 depends on dropdown >=2.0.0,
menu >=1.1.0 is forbidden.
And because menu <1.1.0 depends on dropdown >=1.0.0 <2.0.0
which depends on intl <4.0.0, every version of menu
requires intl <4.0.0.
So, because root depends on both menu >=1.0.0 and intl >=5.0.0,
version solving failed.
```
This pubgrub crate provides a Rust implementation of PubGrub.
It is generic and works for any type of dependency system
as long as packages (P) and versions (V) implement
the provided `Package` and `Version` traits.
## Using the pubgrub crate
A [guide][guide] with both high-level explanations and
in-depth algorithm details is available online.
The [API documentation is available on docs.rs][docs].
A version of the [API docs for the unreleased functionality][docs-dev] from `dev` branch is also
accessible for convenience.
## Contributing
Discussion and development happens here on GitHub and on our
[Zulip stream](https://rust-lang.zulipchat.com/#narrow/stream/260232-t-cargo.2FPubGrub).
Please join in!
Remember to always be considerate of others,
who may have different native languages, cultures and experiences.
We want everyone to feel welcomed,
let us know with a private message on Zulip if you don't feel that way.
## PubGrub
PubGrub is a version solving algorithm,
written in 2018 by Natalie Weizenbaum
for the Dart package manager.
It is supposed to be very fast and to explain errors
more clearly than the alternatives.
An introductory blog post was
[published on Medium][medium-pubgrub] by its author.
The detailed explanation of the algorithm is
[provided on GitHub][github-pubgrub],
and complemented by the ["Internals" section of our guide][guide-internals].
The foundation of the algorithm is based on ASP (Answer Set Programming),
and a book called
"[Answer Set Solving in Practice][potassco-book]"
by Martin Gebser, Roland Kaminski, Benjamin Kaufmann and Torsten Schaub.
[crates]: https://crates.io/crates/pubgrub
[guide]: https://pubgrub-rs-guide.netlify.app/
[guide-internals]: https://pubgrub-rs-guide.netlify.app/internals/intro.html
[docs]: https://docs.rs/pubgrub
[docs-dev]: https://pubgrub-rs.github.io/pubgrub/pubgrub/
[medium-pubgrub]: https://medium.com/@nex3/pubgrub-2fb6470504f
[github-pubgrub]: https://github.com/dart-lang/pub/blob/master/doc/solver.md
[potassco-book]: https://potassco.org/book/

51
vendor/pubgrub/benches/large_case.rs vendored Normal file
View file

@ -0,0 +1,51 @@
// SPDX-License-Identifier: MPL-2.0
use std::time::Duration;
extern crate criterion;
use self::criterion::*;
use pubgrub::package::Package;
use pubgrub::solver::{resolve, OfflineDependencyProvider};
use pubgrub::version::{NumberVersion, SemanticVersion, Version};
use serde::de::Deserialize;
use std::hash::Hash;
fn bench<'a, P: Package + Deserialize<'a>, V: Version + Hash + Deserialize<'a>>(
b: &mut Bencher,
case: &'a str,
) {
let dependency_provider: OfflineDependencyProvider<P, V> = ron::de::from_str(&case).unwrap();
b.iter(|| {
for p in dependency_provider.packages() {
for n in dependency_provider.versions(p).unwrap() {
let _ = resolve(&dependency_provider, p.clone(), n.clone());
}
}
});
}
fn bench_nested(c: &mut Criterion) {
let mut group = c.benchmark_group("large_cases");
group.measurement_time(Duration::from_secs(20));
for case in std::fs::read_dir("test-examples").unwrap() {
let case = case.unwrap().path();
let name = case.file_name().unwrap().to_string_lossy();
let data = std::fs::read_to_string(&case).unwrap();
if name.ends_with("u16_NumberVersion.ron") {
group.bench_function(name, |b| {
bench::<u16, NumberVersion>(b, &data);
});
} else if name.ends_with("str_SemanticVersion.ron") {
group.bench_function(name, |b| {
bench::<&str, SemanticVersion>(b, &data);
});
}
}
group.finish();
}
criterion_group!(benches, bench_nested);
criterion_main!(benches);

View file

@ -0,0 +1,65 @@
// SPDX-License-Identifier: MPL-2.0
use pubgrub::error::PubGrubError;
use pubgrub::range::Range;
use pubgrub::report::{DefaultStringReporter, Reporter};
use pubgrub::solver::{resolve, OfflineDependencyProvider};
use pubgrub::version::SemanticVersion;
// https://github.com/dart-lang/pub/blob/master/doc/solver.md#branching-error-reporting
fn main() {
let mut dependency_provider = OfflineDependencyProvider::<&str, SemanticVersion>::new();
#[rustfmt::skip]
// root 1.0.0 depends on foo ^1.0.0
dependency_provider.add_dependencies(
"root", (1, 0, 0),
vec![("foo", Range::between((1, 0, 0), (2, 0, 0)))],
);
#[rustfmt::skip]
// foo 1.0.0 depends on a ^1.0.0 and b ^1.0.0
dependency_provider.add_dependencies(
"foo", (1, 0, 0),
vec![
("a", Range::between((1, 0, 0), (2, 0, 0))),
("b", Range::between((1, 0, 0), (2, 0, 0))),
],
);
#[rustfmt::skip]
// foo 1.1.0 depends on x ^1.0.0 and y ^1.0.0
dependency_provider.add_dependencies(
"foo", (1, 1, 0),
vec![
("x", Range::between((1, 0, 0), (2, 0, 0))),
("y", Range::between((1, 0, 0), (2, 0, 0))),
],
);
#[rustfmt::skip]
// a 1.0.0 depends on b ^2.0.0
dependency_provider.add_dependencies(
"a", (1, 0, 0),
vec![("b", Range::between((2, 0, 0), (3, 0, 0)))],
);
// b 1.0.0 and 2.0.0 have no dependencies.
dependency_provider.add_dependencies("b", (1, 0, 0), vec![]);
dependency_provider.add_dependencies("b", (2, 0, 0), vec![]);
#[rustfmt::skip]
// x 1.0.0 depends on y ^2.0.0.
dependency_provider.add_dependencies(
"x", (1, 0, 0),
vec![("y", Range::between((2, 0, 0), (3, 0, 0)))],
);
// y 1.0.0 and 2.0.0 have no dependencies.
dependency_provider.add_dependencies("y", (1, 0, 0), vec![]);
dependency_provider.add_dependencies("y", (2, 0, 0), vec![]);
// Run the algorithm.
match resolve(&dependency_provider, "root", (1, 0, 0)) {
Ok(sol) => println!("{:?}", sol),
Err(PubGrubError::NoSolution(mut derivation_tree)) => {
derivation_tree.collapse_no_versions();
eprintln!("{}", DefaultStringReporter::report(&derivation_tree));
std::process::exit(1);
}
Err(err) => panic!("{:?}", err),
};
}

View file

@ -0,0 +1,78 @@
// SPDX-License-Identifier: MPL-2.0
use std::cell::RefCell;
use std::error::Error;
use pubgrub::package::Package;
use pubgrub::range::Range;
use pubgrub::solver::{resolve, Dependencies, DependencyProvider, OfflineDependencyProvider};
use pubgrub::version::{NumberVersion, Version};
// An example implementing caching dependency provider that will
// store queried dependencies in memory and check them before querying more from remote.
struct CachingDependencyProvider<P: Package, V: Version, DP: DependencyProvider<P, V>> {
remote_dependencies: DP,
cached_dependencies: RefCell<OfflineDependencyProvider<P, V>>,
}
impl<P: Package, V: Version, DP: DependencyProvider<P, V>> CachingDependencyProvider<P, V, DP> {
pub fn new(remote_dependencies_provider: DP) -> Self {
CachingDependencyProvider {
remote_dependencies: remote_dependencies_provider,
cached_dependencies: RefCell::new(OfflineDependencyProvider::new()),
}
}
}
impl<P: Package, V: Version, DP: DependencyProvider<P, V>> DependencyProvider<P, V>
for CachingDependencyProvider<P, V, DP>
{
fn choose_package_version<T: std::borrow::Borrow<P>, U: std::borrow::Borrow<Range<V>>>(
&self,
packages: impl Iterator<Item = (T, U)>,
) -> Result<(T, Option<V>), Box<dyn Error>> {
self.remote_dependencies.choose_package_version(packages)
}
// Caches dependencies if they were already queried
fn get_dependencies(
&self,
package: &P,
version: &V,
) -> Result<Dependencies<P, V>, Box<dyn Error>> {
let mut cache = self.cached_dependencies.borrow_mut();
match cache.get_dependencies(package, version) {
Ok(Dependencies::Unknown) => {
let dependencies = self.remote_dependencies.get_dependencies(package, version);
match dependencies {
Ok(Dependencies::Known(dependencies)) => {
cache.add_dependencies(
package.clone(),
version.clone(),
dependencies.clone().into_iter(),
);
Ok(Dependencies::Known(dependencies))
}
Ok(Dependencies::Unknown) => Ok(Dependencies::Unknown),
error @ Err(_) => error,
}
}
dependencies @ Ok(_) => dependencies,
error @ Err(_) => error,
}
}
}
fn main() {
// Simulating remote provider locally.
let mut remote_dependencies_provider = OfflineDependencyProvider::<&str, NumberVersion>::new();
// Add dependencies as needed. Here only root package is added.
remote_dependencies_provider.add_dependencies("root", 1, Vec::new());
let caching_dependencies_provider =
CachingDependencyProvider::new(remote_dependencies_provider);
let solution = resolve(&caching_dependencies_provider, "root", 1);
println!("Solution: {:?}", solution);
}

View file

@ -0,0 +1,24 @@
// SPDX-License-Identifier: MPL-2.0
use pubgrub::range::Range;
use pubgrub::solver::{resolve, OfflineDependencyProvider};
use pubgrub::version::NumberVersion;
// `root` depends on `menu` and `icons`
// `menu` depends on `dropdown`
// `dropdown` depends on `icons`
// `icons` has no dependency
#[rustfmt::skip]
fn main() {
let mut dependency_provider = OfflineDependencyProvider::<&str, NumberVersion>::new();
dependency_provider.add_dependencies(
"root", 1, vec![("menu", Range::any()), ("icons", Range::any())],
);
dependency_provider.add_dependencies("menu", 1, vec![("dropdown", Range::any())]);
dependency_provider.add_dependencies("dropdown", 1, vec![("icons", Range::any())]);
dependency_provider.add_dependencies("icons", 1, vec![]);
// Run the algorithm.
let solution = resolve(&dependency_provider, "root", 1);
println!("Solution: {:?}", solution);
}

View file

@ -0,0 +1,81 @@
// SPDX-License-Identifier: MPL-2.0
use pubgrub::error::PubGrubError;
use pubgrub::range::Range;
use pubgrub::report::{DefaultStringReporter, Reporter};
use pubgrub::solver::{resolve, OfflineDependencyProvider};
use pubgrub::version::SemanticVersion;
// `root` depends on `menu`, `icons 1.0.0` and `intl 5.0.0`
// `menu 1.0.0` depends on `dropdown < 2.0.0`
// `menu >= 1.1.0` depends on `dropdown >= 2.0.0`
// `dropdown 1.8.0` depends on `intl 3.0.0`
// `dropdown >= 2.0.0` depends on `icons 2.0.0`
// `icons` has no dependency
// `intl` has no dependency
#[rustfmt::skip]
fn main() {
let mut dependency_provider = OfflineDependencyProvider::<&str, SemanticVersion>::new();
// Direct dependencies: menu and icons.
dependency_provider.add_dependencies("root", (1, 0, 0), vec![
("menu", Range::any()),
("icons", Range::exact((1, 0, 0))),
("intl", Range::exact((5, 0, 0))),
]);
// Dependencies of the menu lib.
dependency_provider.add_dependencies("menu", (1, 0, 0), vec![
("dropdown", Range::strictly_lower_than((2, 0, 0))),
]);
dependency_provider.add_dependencies("menu", (1, 1, 0), vec![
("dropdown", Range::higher_than((2, 0, 0))),
]);
dependency_provider.add_dependencies("menu", (1, 2, 0), vec![
("dropdown", Range::higher_than((2, 0, 0))),
]);
dependency_provider.add_dependencies("menu", (1, 3, 0), vec![
("dropdown", Range::higher_than((2, 0, 0))),
]);
dependency_provider.add_dependencies("menu", (1, 4, 0), vec![
("dropdown", Range::higher_than((2, 0, 0))),
]);
dependency_provider.add_dependencies("menu", (1, 5, 0), vec![
("dropdown", Range::higher_than((2, 0, 0))),
]);
// Dependencies of the dropdown lib.
dependency_provider.add_dependencies("dropdown", (1, 8, 0), vec![
("intl", Range::exact((3, 0, 0))),
]);
dependency_provider.add_dependencies("dropdown", (2, 0, 0), vec![
("icons", Range::exact((2, 0, 0))),
]);
dependency_provider.add_dependencies("dropdown", (2, 1, 0), vec![
("icons", Range::exact((2, 0, 0))),
]);
dependency_provider.add_dependencies("dropdown", (2, 2, 0), vec![
("icons", Range::exact((2, 0, 0))),
]);
dependency_provider.add_dependencies("dropdown", (2, 3, 0), vec![
("icons", Range::exact((2, 0, 0))),
]);
// Icons have no dependencies.
dependency_provider.add_dependencies("icons", (1, 0, 0), vec![]);
dependency_provider.add_dependencies("icons", (2, 0, 0), vec![]);
// Intl have no dependencies.
dependency_provider.add_dependencies("intl", (3, 0, 0), vec![]);
dependency_provider.add_dependencies("intl", (4, 0, 0), vec![]);
dependency_provider.add_dependencies("intl", (5, 0, 0), vec![]);
// Run the algorithm.
match resolve(&dependency_provider, "root", (1, 0, 0)) {
Ok(sol) => println!("{:?}", sol),
Err(PubGrubError::NoSolution(mut derivation_tree)) => {
derivation_tree.collapse_no_versions();
eprintln!("{}", DefaultStringReporter::report(&derivation_tree));
}
Err(err) => panic!("{:?}", err),
};
}

View file

@ -0,0 +1,72 @@
// SPDX-License-Identifier: MPL-2.0
use pubgrub::error::PubGrubError;
use pubgrub::range::Range;
use pubgrub::report::{DefaultStringReporter, Reporter};
use pubgrub::solver::{resolve, OfflineDependencyProvider};
use pubgrub::version::SemanticVersion;
// `root` depends on `menu` and `icons 1.0.0`
// `menu 1.0.0` depends on `dropdown < 2.0.0`
// `menu >= 1.1.0` depends on `dropdown >= 2.0.0`
// `dropdown 1.8.0` has no dependency
// `dropdown >= 2.0.0` depends on `icons 2.0.0`
// `icons` has no dependency
#[rustfmt::skip]
fn main() {
let mut dependency_provider = OfflineDependencyProvider::<&str, SemanticVersion>::new();
// Direct dependencies: menu and icons.
dependency_provider.add_dependencies("root", (1, 0, 0), vec![
("menu", Range::any()),
("icons", Range::exact((1, 0, 0))),
]);
// Dependencies of the menu lib.
dependency_provider.add_dependencies("menu", (1, 0, 0), vec![
("dropdown", Range::strictly_lower_than((2, 0, 0))),
]);
dependency_provider.add_dependencies("menu", (1, 1, 0), vec![
("dropdown", Range::higher_than((2, 0, 0))),
]);
dependency_provider.add_dependencies("menu", (1, 2, 0), vec![
("dropdown", Range::higher_than((2, 0, 0))),
]);
dependency_provider.add_dependencies("menu", (1, 3, 0), vec![
("dropdown", Range::higher_than((2, 0, 0))),
]);
dependency_provider.add_dependencies("menu", (1, 4, 0), vec![
("dropdown", Range::higher_than((2, 0, 0))),
]);
dependency_provider.add_dependencies("menu", (1, 5, 0), vec![
("dropdown", Range::higher_than((2, 0, 0))),
]);
// Dependencies of the dropdown lib.
dependency_provider.add_dependencies("dropdown", (1, 8, 0), vec![]);
dependency_provider.add_dependencies("dropdown", (2, 0, 0), vec![
("icons", Range::exact((2, 0, 0))),
]);
dependency_provider.add_dependencies("dropdown", (2, 1, 0), vec![
("icons", Range::exact((2, 0, 0))),
]);
dependency_provider.add_dependencies("dropdown", (2, 2, 0), vec![
("icons", Range::exact((2, 0, 0))),
]);
dependency_provider.add_dependencies("dropdown", (2, 3, 0), vec![
("icons", Range::exact((2, 0, 0))),
]);
// Icons has no dependency.
dependency_provider.add_dependencies("icons", (1, 0, 0), vec![]);
dependency_provider.add_dependencies("icons", (2, 0, 0), vec![]);
// Run the algorithm.
match resolve(&dependency_provider, "root", (1, 0, 0)) {
Ok(sol) => println!("{:?}", sol),
Err(PubGrubError::NoSolution(mut derivation_tree)) => {
derivation_tree.collapse_no_versions();
eprintln!("{}", DefaultStringReporter::report(&derivation_tree));
}
Err(err) => panic!("{:?}", err),
};
}

View file

@ -0,0 +1,47 @@
// SPDX-License-Identifier: MPL-2.0
use pubgrub::error::PubGrubError;
use pubgrub::range::Range;
use pubgrub::report::{DefaultStringReporter, Reporter};
use pubgrub::solver::{resolve, OfflineDependencyProvider};
use pubgrub::version::SemanticVersion;
// https://github.com/dart-lang/pub/blob/master/doc/solver.md#linear-error-reporting
fn main() {
let mut dependency_provider = OfflineDependencyProvider::<&str, SemanticVersion>::new();
#[rustfmt::skip]
// root 1.0.0 depends on foo ^1.0.0 and baz ^1.0.0
dependency_provider.add_dependencies(
"root", (1, 0, 0),
vec![
("foo", Range::between((1, 0, 0), (2, 0, 0))),
("baz", Range::between((1, 0, 0), (2, 0, 0))),
],
);
#[rustfmt::skip]
// foo 1.0.0 depends on bar ^2.0.0
dependency_provider.add_dependencies(
"foo", (1, 0, 0),
vec![("bar", Range::between((2, 0, 0), (3, 0, 0)))],
);
#[rustfmt::skip]
// bar 2.0.0 depends on baz ^3.0.0
dependency_provider.add_dependencies(
"bar", (2, 0, 0),
vec![("baz", Range::between((3, 0, 0), (4, 0, 0)))],
);
// baz 1.0.0 and 3.0.0 have no dependencies
dependency_provider.add_dependencies("baz", (1, 0, 0), vec![]);
dependency_provider.add_dependencies("baz", (3, 0, 0), vec![]);
// Run the algorithm.
match resolve(&dependency_provider, "root", (1, 0, 0)) {
Ok(sol) => println!("{:?}", sol),
Err(PubGrubError::NoSolution(mut derivation_tree)) => {
derivation_tree.collapse_no_versions();
eprintln!("{}", DefaultStringReporter::report(&derivation_tree));
std::process::exit(1);
}
Err(err) => panic!("{:?}", err),
};
}

76
vendor/pubgrub/src/error.rs vendored Normal file
View file

@ -0,0 +1,76 @@
// SPDX-License-Identifier: MPL-2.0
//! Handling pubgrub errors.
use thiserror::Error;
use crate::package::Package;
use crate::report::DerivationTree;
use crate::version::Version;
/// Errors that may occur while solving dependencies.
#[derive(Error, Debug)]
pub enum PubGrubError<P: Package, V: Version> {
/// There is no solution for this set of dependencies.
#[error("No solution")]
NoSolution(DerivationTree<P, V>),
/// Error arising when the implementer of
/// [DependencyProvider](crate::solver::DependencyProvider)
/// returned an error in the method
/// [get_dependencies](crate::solver::DependencyProvider::get_dependencies).
#[error("Retrieving dependencies of {package} {version} failed")]
ErrorRetrievingDependencies {
/// Package whose dependencies we want.
package: P,
/// Version of the package for which we want the dependencies.
version: V,
/// Error raised by the implementer of
/// [DependencyProvider](crate::solver::DependencyProvider).
source: Box<dyn std::error::Error + Send + Sync>,
},
/// Error arising when the implementer of
/// [DependencyProvider](crate::solver::DependencyProvider)
/// returned a dependency on an empty range.
/// This technically means that the package can not be selected,
/// but is clearly some kind of mistake.
#[error("Package {dependent} required by {package} {version} depends on the empty set")]
DependencyOnTheEmptySet {
/// Package whose dependencies we want.
package: P,
/// Version of the package for which we want the dependencies.
version: V,
/// The dependent package that requires us to pick from the empty set.
dependent: P,
},
/// Error arising when the implementer of
/// [DependencyProvider](crate::solver::DependencyProvider)
/// returned a dependency on the requested package.
/// This technically means that the package directly depends on itself,
/// and is clearly some kind of mistake.
#[error("{package} {version} depends on itself")]
SelfDependency {
/// Package whose dependencies we want.
package: P,
/// Version of the package for which we want the dependencies.
version: V,
},
/// Error arising when the implementer of
/// [DependencyProvider](crate::solver::DependencyProvider)
/// returned an error in the method
/// [choose_package_version](crate::solver::DependencyProvider::choose_package_version).
#[error("Decision making failed")]
ErrorChoosingPackageVersion(Box<dyn std::error::Error + Send + Sync>),
/// Error arising when the implementer of [DependencyProvider](crate::solver::DependencyProvider)
/// returned an error in the method [should_cancel](crate::solver::DependencyProvider::should_cancel).
#[error("We should cancel")]
ErrorInShouldCancel(Box<dyn std::error::Error + Send + Sync>),
/// Something unexpected happened.
#[error("{0}")]
Failure(String),
}

122
vendor/pubgrub/src/internal/arena.rs vendored Normal file
View file

@ -0,0 +1,122 @@
use std::{
fmt,
hash::{Hash, Hasher},
marker::PhantomData,
ops::{Index, Range},
};
/// The index of a value allocated in an arena that holds `T`s.
///
/// The Clone, Copy and other traits are defined manually because
/// deriving them adds some additional constraints on the `T` generic type
/// that we actually don't need since it is phantom.
///
/// <https://github.com/rust-lang/rust/issues/26925>
pub struct Id<T> {
raw: u32,
_ty: PhantomData<fn() -> T>,
}
impl<T> Clone for Id<T> {
fn clone(&self) -> Self {
*self
}
}
impl<T> Copy for Id<T> {}
impl<T> PartialEq for Id<T> {
fn eq(&self, other: &Id<T>) -> bool {
self.raw == other.raw
}
}
impl<T> Eq for Id<T> {}
impl<T> Hash for Id<T> {
fn hash<H: Hasher>(&self, state: &mut H) {
self.raw.hash(state)
}
}
impl<T> fmt::Debug for Id<T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let mut type_name = std::any::type_name::<T>();
if let Some(id) = type_name.rfind(':') {
type_name = &type_name[id + 1..]
}
write!(f, "Id::<{}>({})", type_name, self.raw)
}
}
impl<T> Id<T> {
pub fn into_raw(self) -> usize {
self.raw as usize
}
fn from(n: u32) -> Self {
Self {
raw: n as u32,
_ty: PhantomData,
}
}
pub fn range_to_iter(range: Range<Self>) -> impl Iterator<Item = Self> {
let start = range.start.raw;
let end = range.end.raw;
(start..end).map(Self::from)
}
}
/// Yet another index-based arena.
///
/// An arena is a kind of simple grow-only allocator, backed by a `Vec`
/// where all items have the same lifetime, making it easier
/// to have references between those items.
/// They are all dropped at once when the arena is dropped.
#[derive(Clone, PartialEq, Eq)]
pub struct Arena<T> {
data: Vec<T>,
}
impl<T: fmt::Debug> fmt::Debug for Arena<T> {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
fmt.debug_struct("Arena")
.field("len", &self.data.len())
.field("data", &self.data)
.finish()
}
}
impl<T> Arena<T> {
pub fn new() -> Arena<T> {
Arena { data: Vec::new() }
}
pub fn alloc(&mut self, value: T) -> Id<T> {
let raw = self.data.len();
self.data.push(value);
Id::from(raw as u32)
}
pub fn alloc_iter<I: Iterator<Item = T>>(&mut self, values: I) -> Range<Id<T>> {
let start = Id::from(self.data.len() as u32);
values.for_each(|v| {
self.alloc(v);
});
let end = Id::from(self.data.len() as u32);
Range { start, end }
}
}
impl<T> Index<Id<T>> for Arena<T> {
type Output = T;
fn index(&self, id: Id<T>) -> &T {
&self.data[id.raw as usize]
}
}
impl<T> Index<Range<Id<T>>> for Arena<T> {
type Output = [T];
fn index(&self, id: Range<Id<T>>) -> &[T] {
&self.data[(id.start.raw as usize)..(id.end.raw as usize)]
}
}

270
vendor/pubgrub/src/internal/core.rs vendored Normal file
View file

@ -0,0 +1,270 @@
// SPDX-License-Identifier: MPL-2.0
//! Core model and functions
//! to write a functional PubGrub algorithm.
use std::collections::HashSet as Set;
use crate::error::PubGrubError;
use crate::internal::arena::Arena;
use crate::internal::incompatibility::{IncompId, Incompatibility, Relation};
use crate::internal::partial_solution::SatisfierSearch::{
DifferentDecisionLevels, SameDecisionLevels,
};
use crate::internal::partial_solution::{DecisionLevel, PartialSolution};
use crate::internal::small_vec::SmallVec;
use crate::package::Package;
use crate::report::DerivationTree;
use crate::solver::DependencyConstraints;
use crate::type_aliases::Map;
use crate::version::Version;
/// Current state of the PubGrub algorithm.
#[derive(Clone)]
pub struct State<P: Package, V: Version> {
root_package: P,
root_version: V,
incompatibilities: Map<P, Vec<IncompId<P, V>>>,
/// Store the ids of incompatibilities that are already contradicted
/// and will stay that way until the next conflict and backtrack is operated.
contradicted_incompatibilities: rustc_hash::FxHashSet<IncompId<P, V>>,
/// Partial solution.
/// TODO: remove pub.
pub partial_solution: PartialSolution<P, V>,
/// The store is the reference storage for all incompatibilities.
pub incompatibility_store: Arena<Incompatibility<P, V>>,
/// This is a stack of work to be done in `unit_propagation`.
/// It can definitely be a local variable to that method, but
/// this way we can reuse the same allocation for better performance.
unit_propagation_buffer: SmallVec<P>,
}
impl<P: Package, V: Version> State<P, V> {
/// Initialization of PubGrub state.
pub fn init(root_package: P, root_version: V) -> Self {
let mut incompatibility_store = Arena::new();
let not_root_id = incompatibility_store.alloc(Incompatibility::not_root(
root_package.clone(),
root_version.clone(),
));
let mut incompatibilities = Map::default();
incompatibilities.insert(root_package.clone(), vec![not_root_id]);
Self {
root_package,
root_version,
incompatibilities,
contradicted_incompatibilities: rustc_hash::FxHashSet::default(),
partial_solution: PartialSolution::empty(),
incompatibility_store,
unit_propagation_buffer: SmallVec::Empty,
}
}
/// Add an incompatibility to the state.
pub fn add_incompatibility(&mut self, incompat: Incompatibility<P, V>) {
let id = self.incompatibility_store.alloc(incompat);
self.merge_incompatibility(id);
}
/// Add an incompatibility to the state.
pub fn add_incompatibility_from_dependencies(
&mut self,
package: P,
version: V,
deps: &DependencyConstraints<P, V>,
) -> std::ops::Range<IncompId<P, V>> {
// Create incompatibilities and allocate them in the store.
let new_incompats_id_range = self
.incompatibility_store
.alloc_iter(deps.iter().map(|dep| {
Incompatibility::from_dependency(package.clone(), version.clone(), dep)
}));
// Merge the newly created incompatibilities with the older ones.
for id in IncompId::range_to_iter(new_incompats_id_range.clone()) {
self.merge_incompatibility(id);
}
new_incompats_id_range
}
/// Check if an incompatibility is terminal.
pub fn is_terminal(&self, incompatibility: &Incompatibility<P, V>) -> bool {
incompatibility.is_terminal(&self.root_package, &self.root_version)
}
/// Unit propagation is the core mechanism of the solving algorithm.
/// CF <https://github.com/dart-lang/pub/blob/master/doc/solver.md#unit-propagation>
pub fn unit_propagation(&mut self, package: P) -> Result<(), PubGrubError<P, V>> {
self.unit_propagation_buffer.clear();
self.unit_propagation_buffer.push(package);
while let Some(current_package) = self.unit_propagation_buffer.pop() {
// Iterate over incompatibilities in reverse order
// to evaluate first the newest incompatibilities.
let mut conflict_id = None;
// We only care about incompatibilities if it contains the current package.
for &incompat_id in self.incompatibilities[&current_package].iter().rev() {
if self.contradicted_incompatibilities.contains(&incompat_id) {
continue;
}
let current_incompat = &self.incompatibility_store[incompat_id];
match self.partial_solution.relation(current_incompat) {
// If the partial solution satisfies the incompatibility
// we must perform conflict resolution.
Relation::Satisfied => {
conflict_id = Some(incompat_id);
break;
}
Relation::AlmostSatisfied(package_almost) => {
self.unit_propagation_buffer.push(package_almost.clone());
// Add (not term) to the partial solution with incompat as cause.
self.partial_solution.add_derivation(
package_almost,
incompat_id,
&self.incompatibility_store,
);
// With the partial solution updated, the incompatibility is now contradicted.
self.contradicted_incompatibilities.insert(incompat_id);
}
Relation::Contradicted(_) => {
self.contradicted_incompatibilities.insert(incompat_id);
}
_ => {}
}
}
if let Some(incompat_id) = conflict_id {
let (package_almost, root_cause) = self.conflict_resolution(incompat_id)?;
self.unit_propagation_buffer.clear();
self.unit_propagation_buffer.push(package_almost.clone());
// Add to the partial solution with incompat as cause.
self.partial_solution.add_derivation(
package_almost,
root_cause,
&self.incompatibility_store,
);
// After conflict resolution and the partial solution update,
// the root cause incompatibility is now contradicted.
self.contradicted_incompatibilities.insert(root_cause);
}
}
// If there are no more changed packages, unit propagation is done.
Ok(())
}
/// Return the root cause and the backtracked model.
/// CF <https://github.com/dart-lang/pub/blob/master/doc/solver.md#unit-propagation>
fn conflict_resolution(
&mut self,
incompatibility: IncompId<P, V>,
) -> Result<(P, IncompId<P, V>), PubGrubError<P, V>> {
let mut current_incompat_id = incompatibility;
let mut current_incompat_changed = false;
loop {
if self.incompatibility_store[current_incompat_id]
.is_terminal(&self.root_package, &self.root_version)
{
return Err(PubGrubError::NoSolution(
self.build_derivation_tree(current_incompat_id),
));
} else {
let (package, satisfier_search_result) = self.partial_solution.satisfier_search(
&self.incompatibility_store[current_incompat_id],
&self.incompatibility_store,
);
match satisfier_search_result {
DifferentDecisionLevels {
previous_satisfier_level,
} => {
self.backtrack(
current_incompat_id,
current_incompat_changed,
previous_satisfier_level,
);
return Ok((package, current_incompat_id));
}
SameDecisionLevels { satisfier_cause } => {
let prior_cause = Incompatibility::prior_cause(
current_incompat_id,
satisfier_cause,
&package,
&self.incompatibility_store,
);
current_incompat_id = self.incompatibility_store.alloc(prior_cause);
current_incompat_changed = true;
}
}
}
}
}
/// Backtracking.
fn backtrack(
&mut self,
incompat: IncompId<P, V>,
incompat_changed: bool,
decision_level: DecisionLevel,
) {
self.partial_solution
.backtrack(decision_level, &self.incompatibility_store);
self.contradicted_incompatibilities.clear();
if incompat_changed {
self.merge_incompatibility(incompat);
}
}
/// Add this incompatibility into the set of all incompatibilities.
///
/// Pub collapses identical dependencies from adjacent package versions
/// into individual incompatibilities.
/// This substantially reduces the total number of incompatibilities
/// and makes it much easier for Pub to reason about multiple versions of packages at once.
///
/// For example, rather than representing
/// foo 1.0.0 depends on bar ^1.0.0 and
/// foo 1.1.0 depends on bar ^1.0.0
/// as two separate incompatibilities,
/// they are collapsed together into the single incompatibility {foo ^1.0.0, not bar ^1.0.0}
/// (provided that no other version of foo exists between 1.0.0 and 2.0.0).
/// We could collapse them into { foo (1.0.0 1.1.0), not bar ^1.0.0 }
/// without having to check the existence of other versions though.
///
/// Here we do the simple stupid thing of just growing the Vec.
/// It may not be trivial since those incompatibilities
/// may already have derived others.
fn merge_incompatibility(&mut self, id: IncompId<P, V>) {
for (pkg, _term) in self.incompatibility_store[id].iter() {
self.incompatibilities
.entry(pkg.clone())
.or_default()
.push(id);
}
}
// Error reporting #########################################################
fn build_derivation_tree(&self, incompat: IncompId<P, V>) -> DerivationTree<P, V> {
let shared_ids = self.find_shared_ids(incompat);
Incompatibility::build_derivation_tree(incompat, &shared_ids, &self.incompatibility_store)
}
fn find_shared_ids(&self, incompat: IncompId<P, V>) -> Set<IncompId<P, V>> {
let mut all_ids = Set::new();
let mut shared_ids = Set::new();
let mut stack = vec![incompat];
while let Some(i) = stack.pop() {
if let Some((id1, id2)) = self.incompatibility_store[i].causes() {
if all_ids.contains(&i) {
shared_ids.insert(i);
} else {
all_ids.insert(i);
stack.push(id1);
stack.push(id2);
}
}
}
shared_ids
}
}

View file

@ -0,0 +1,296 @@
// SPDX-License-Identifier: MPL-2.0
//! An incompatibility is a set of terms for different packages
//! that should never be satisfied all together.
use std::collections::HashSet as Set;
use std::fmt;
use crate::internal::arena::{Arena, Id};
use crate::internal::small_map::SmallMap;
use crate::package::Package;
use crate::range::Range;
use crate::report::{DefaultStringReporter, DerivationTree, Derived, External};
use crate::term::{self, Term};
use crate::version::Version;
/// An incompatibility is a set of terms for different packages
/// that should never be satisfied all together.
/// An incompatibility usually originates from a package dependency.
/// For example, if package A at version 1 depends on package B
/// at version 2, you can never have both terms `A = 1`
/// and `not B = 2` satisfied at the same time in a partial solution.
/// This would mean that we found a solution with package A at version 1
/// but not with package B at version 2.
/// Yet A at version 1 depends on B at version 2 so this is not possible.
/// Therefore, the set `{ A = 1, not B = 2 }` is an incompatibility,
/// defined from dependencies of A at version 1.
///
/// Incompatibilities can also be derived from two other incompatibilities
/// during conflict resolution. More about all this in
/// [PubGrub documentation](https://github.com/dart-lang/pub/blob/master/doc/solver.md#incompatibility).
#[derive(Debug, Clone)]
pub struct Incompatibility<P: Package, V: Version> {
package_terms: SmallMap<P, Term<V>>,
kind: Kind<P, V>,
}
/// Type alias of unique identifiers for incompatibilities.
pub(crate) type IncompId<P, V> = Id<Incompatibility<P, V>>;
#[derive(Debug, Clone)]
enum Kind<P: Package, V: Version> {
/// Initial incompatibility aiming at picking the root package for the first decision.
NotRoot(P, V),
/// There are no versions in the given range for this package.
NoVersions(P, Range<V>),
/// Dependencies of the package are unavailable for versions in that range.
UnavailableDependencies(P, Range<V>),
/// Incompatibility coming from the dependencies of a given package.
FromDependencyOf(P, Range<V>, P, Range<V>),
/// Derived from two causes. Stores cause ids.
DerivedFrom(IncompId<P, V>, IncompId<P, V>),
}
/// A Relation describes how a set of terms can be compared to an incompatibility.
/// Typically, the set of terms comes from the partial solution.
#[derive(Eq, PartialEq, Debug)]
pub enum Relation<P: Package> {
/// We say that a set of terms S satisfies an incompatibility I
/// if S satisfies every term in I.
Satisfied,
/// We say that S contradicts I
/// if S contradicts at least one term in I.
Contradicted(P),
/// If S satisfies all but one of I's terms and is inconclusive for the remaining term,
/// we say S "almost satisfies" I and we call the remaining term the "unsatisfied term".
AlmostSatisfied(P),
/// Otherwise, we say that their relation is inconclusive.
Inconclusive,
}
impl<P: Package, V: Version> Incompatibility<P, V> {
/// Create the initial "not Root" incompatibility.
pub fn not_root(package: P, version: V) -> Self {
Self {
package_terms: SmallMap::One([(
package.clone(),
Term::Negative(Range::exact(version.clone())),
)]),
kind: Kind::NotRoot(package, version),
}
}
/// Create an incompatibility to remember
/// that a given range does not contain any version.
pub fn no_versions(package: P, term: Term<V>) -> Self {
let range = match &term {
Term::Positive(r) => r.clone(),
Term::Negative(_) => panic!("No version should have a positive term"),
};
Self {
package_terms: SmallMap::One([(package.clone(), term)]),
kind: Kind::NoVersions(package, range),
}
}
/// Create an incompatibility to remember
/// that a package version is not selectable
/// because its list of dependencies is unavailable.
pub fn unavailable_dependencies(package: P, version: V) -> Self {
let range = Range::exact(version);
Self {
package_terms: SmallMap::One([(package.clone(), Term::Positive(range.clone()))]),
kind: Kind::UnavailableDependencies(package, range),
}
}
/// Build an incompatibility from a given dependency.
pub fn from_dependency(package: P, version: V, dep: (&P, &Range<V>)) -> Self {
let range1 = Range::exact(version);
let (p2, range2) = dep;
Self {
package_terms: SmallMap::Two([
(package.clone(), Term::Positive(range1.clone())),
(p2.clone(), Term::Negative(range2.clone())),
]),
kind: Kind::FromDependencyOf(package, range1, p2.clone(), range2.clone()),
}
}
/// Prior cause of two incompatibilities using the rule of resolution.
pub fn prior_cause(
incompat: Id<Self>,
satisfier_cause: Id<Self>,
package: &P,
incompatibility_store: &Arena<Self>,
) -> Self {
let kind = Kind::DerivedFrom(incompat, satisfier_cause);
let mut package_terms = incompatibility_store[incompat].package_terms.clone();
let t1 = package_terms.remove(package).unwrap();
let satisfier_cause_terms = &incompatibility_store[satisfier_cause].package_terms;
package_terms.merge(
satisfier_cause_terms.iter().filter(|(p, _)| p != &package),
|t1, t2| Some(t1.intersection(t2)),
);
let term = t1.union(satisfier_cause_terms.get(package).unwrap());
if term != Term::any() {
package_terms.insert(package.clone(), term);
}
Self {
package_terms,
kind,
}
}
/// Check if an incompatibility should mark the end of the algorithm
/// because it satisfies the root package.
pub fn is_terminal(&self, root_package: &P, root_version: &V) -> bool {
if self.package_terms.len() == 0 {
true
} else if self.package_terms.len() > 1 {
false
} else {
let (package, term) = self.package_terms.iter().next().unwrap();
(package == root_package) && term.contains(&root_version)
}
}
/// Get the term related to a given package (if it exists).
pub fn get(&self, package: &P) -> Option<&Term<V>> {
self.package_terms.get(package)
}
/// Iterate over packages.
pub fn iter(&self) -> impl Iterator<Item = (&P, &Term<V>)> {
self.package_terms.iter()
}
// Reporting ###############################################################
/// Retrieve parent causes if of type DerivedFrom.
pub fn causes(&self) -> Option<(Id<Self>, Id<Self>)> {
match self.kind {
Kind::DerivedFrom(id1, id2) => Some((id1, id2)),
_ => None,
}
}
/// Build a derivation tree for error reporting.
pub fn build_derivation_tree(
self_id: Id<Self>,
shared_ids: &Set<Id<Self>>,
store: &Arena<Self>,
) -> DerivationTree<P, V> {
match &store[self_id].kind {
Kind::DerivedFrom(id1, id2) => {
let cause1 = Self::build_derivation_tree(*id1, shared_ids, store);
let cause2 = Self::build_derivation_tree(*id2, shared_ids, store);
let derived = Derived {
terms: store[self_id].package_terms.as_map(),
shared_id: shared_ids.get(&self_id).map(|id| id.into_raw()),
cause1: Box::new(cause1),
cause2: Box::new(cause2),
};
DerivationTree::Derived(derived)
}
Kind::NotRoot(package, version) => {
DerivationTree::External(External::NotRoot(package.clone(), version.clone()))
}
Kind::NoVersions(package, range) => {
DerivationTree::External(External::NoVersions(package.clone(), range.clone()))
}
Kind::UnavailableDependencies(package, range) => DerivationTree::External(
External::UnavailableDependencies(package.clone(), range.clone()),
),
Kind::FromDependencyOf(package, range, dep_package, dep_range) => {
DerivationTree::External(External::FromDependencyOf(
package.clone(),
range.clone(),
dep_package.clone(),
dep_range.clone(),
))
}
}
}
}
impl<'a, P: Package, V: Version + 'a> Incompatibility<P, V> {
/// CF definition of Relation enum.
pub fn relation(&self, terms: impl Fn(&P) -> Option<&'a Term<V>>) -> Relation<P> {
let mut relation = Relation::Satisfied;
for (package, incompat_term) in self.package_terms.iter() {
match terms(package).map(|term| incompat_term.relation_with(&term)) {
Some(term::Relation::Satisfied) => {}
Some(term::Relation::Contradicted) => {
return Relation::Contradicted(package.clone());
}
None | Some(term::Relation::Inconclusive) => {
// If a package is not present, the intersection is the same as [Term::any].
// According to the rules of satisfactions, the relation would be inconclusive.
// It could also be satisfied if the incompatibility term was also [Term::any],
// but we systematically remove those from incompatibilities
// so we're safe on that front.
if relation == Relation::Satisfied {
relation = Relation::AlmostSatisfied(package.clone());
} else {
relation = Relation::Inconclusive;
}
}
}
}
relation
}
}
impl<P: Package, V: Version> fmt::Display for Incompatibility<P, V> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(
f,
"{}",
DefaultStringReporter::string_terms(&self.package_terms.as_map())
)
}
}
// TESTS #######################################################################
#[cfg(test)]
pub mod tests {
use super::*;
use crate::term::tests::strategy as term_strat;
use crate::type_aliases::Map;
use proptest::prelude::*;
proptest! {
/// For any three different packages p1, p2 and p3,
/// for any three terms t1, t2 and t3,
/// if we have the two following incompatibilities:
/// { p1: t1, p2: not t2 }
/// { p2: t2, p3: t3 }
/// the rule of resolution says that we can deduce the following incompatibility:
/// { p1: t1, p3: t3 }
#[test]
fn rule_of_resolution(t1 in term_strat(), t2 in term_strat(), t3 in term_strat()) {
let mut store = Arena::new();
let i1 = store.alloc(Incompatibility {
package_terms: SmallMap::Two([("p1", t1.clone()), ("p2", t2.negate())]),
kind: Kind::UnavailableDependencies("0", Range::any())
});
let i2 = store.alloc(Incompatibility {
package_terms: SmallMap::Two([("p2", t2), ("p3", t3.clone())]),
kind: Kind::UnavailableDependencies("0", Range::any())
});
let mut i3 = Map::default();
i3.insert("p1", t1);
i3.insert("p3", t3);
let i_resolution = Incompatibility::prior_cause(i1, i2, &"p2", &store);
assert_eq!(i_resolution.package_terms.as_map(), i3);
}
}
}

10
vendor/pubgrub/src/internal/mod.rs vendored Normal file
View file

@ -0,0 +1,10 @@
// SPDX-License-Identifier: MPL-2.0
//! Non exposed modules.
pub(crate) mod arena;
pub(crate) mod core;
pub(crate) mod incompatibility;
pub(crate) mod partial_solution;
pub(crate) mod small_map;
pub(crate) mod small_vec;

View file

@ -0,0 +1,444 @@
// SPDX-License-Identifier: MPL-2.0
//! A Memory acts like a structured partial solution
//! where terms are regrouped by package in a [Map](crate::type_aliases::Map).
use crate::internal::arena::Arena;
use crate::internal::incompatibility::{IncompId, Incompatibility, Relation};
use crate::internal::small_map::SmallMap;
use crate::package::Package;
use crate::range::Range;
use crate::term::Term;
use crate::type_aliases::{Map, SelectedDependencies};
use crate::version::Version;
use super::small_vec::SmallVec;
#[derive(Debug, Copy, Clone, Ord, PartialOrd, Eq, PartialEq)]
pub struct DecisionLevel(pub u32);
impl DecisionLevel {
pub fn increment(self) -> Self {
Self(self.0 + 1)
}
}
/// The partial solution contains all package assignments,
/// organized by package and historically ordered.
#[derive(Clone, Debug)]
pub struct PartialSolution<P: Package, V: Version> {
next_global_index: u32,
current_decision_level: DecisionLevel,
package_assignments: Map<P, PackageAssignments<P, V>>,
}
/// Package assignments contain the potential decision and derivations
/// that have already been made for a given package,
/// as well as the intersection of terms by all of these.
#[derive(Clone, Debug)]
struct PackageAssignments<P: Package, V: Version> {
smallest_decision_level: DecisionLevel,
highest_decision_level: DecisionLevel,
dated_derivations: SmallVec<DatedDerivation<P, V>>,
assignments_intersection: AssignmentsIntersection<V>,
}
#[derive(Clone, Debug)]
pub(crate) struct DatedDerivation<P: Package, V: Version> {
global_index: u32,
decision_level: DecisionLevel,
cause: IncompId<P, V>,
}
#[derive(Clone, Debug)]
enum AssignmentsIntersection<V: Version> {
Decision((u32, V, Term<V>)),
Derivations(Term<V>),
}
#[derive(Clone, Debug)]
pub enum SatisfierSearch<P: Package, V: Version> {
DifferentDecisionLevels {
previous_satisfier_level: DecisionLevel,
},
SameDecisionLevels {
satisfier_cause: IncompId<P, V>,
},
}
impl<P: Package, V: Version> PartialSolution<P, V> {
/// Initialize an empty PartialSolution.
pub fn empty() -> Self {
Self {
next_global_index: 0,
current_decision_level: DecisionLevel(0),
package_assignments: Map::default(),
}
}
/// Add a decision.
pub fn add_decision(&mut self, package: P, version: V) {
// Check that add_decision is never used in the wrong context.
if cfg!(debug_assertions) {
match self.package_assignments.get_mut(&package) {
None => panic!("Derivations must already exist"),
Some(pa) => match &pa.assignments_intersection {
// Cannot be called when a decision has already been taken.
AssignmentsIntersection::Decision(_) => panic!("Already existing decision"),
// Cannot be called if the versions is not contained in the terms intersection.
AssignmentsIntersection::Derivations(term) => {
debug_assert!(term.contains(&version))
}
},
}
}
self.current_decision_level = self.current_decision_level.increment();
let pa = self
.package_assignments
.get_mut(&package)
.expect("Derivations must already exist");
pa.highest_decision_level = self.current_decision_level;
pa.assignments_intersection = AssignmentsIntersection::Decision((
self.next_global_index,
version.clone(),
Term::exact(version),
));
self.next_global_index += 1;
}
/// Add a derivation.
pub fn add_derivation(
&mut self,
package: P,
cause: IncompId<P, V>,
store: &Arena<Incompatibility<P, V>>,
) {
use std::collections::hash_map::Entry;
let term = store[cause].get(&package).unwrap().negate();
let dated_derivation = DatedDerivation {
global_index: self.next_global_index,
decision_level: self.current_decision_level,
cause,
};
self.next_global_index += 1;
match self.package_assignments.entry(package) {
Entry::Occupied(mut occupied) => {
let pa = occupied.get_mut();
pa.highest_decision_level = self.current_decision_level;
match &mut pa.assignments_intersection {
// Check that add_derivation is never called in the wrong context.
AssignmentsIntersection::Decision(_) => {
panic!("add_derivation should not be called after a decision")
}
AssignmentsIntersection::Derivations(t) => {
*t = t.intersection(&term);
}
}
pa.dated_derivations.push(dated_derivation);
}
Entry::Vacant(v) => {
v.insert(PackageAssignments {
smallest_decision_level: self.current_decision_level,
highest_decision_level: self.current_decision_level,
dated_derivations: SmallVec::One([dated_derivation]),
assignments_intersection: AssignmentsIntersection::Derivations(term),
});
}
}
}
/// Extract potential packages for the next iteration of unit propagation.
/// Return `None` if there is no suitable package anymore, which stops the algorithm.
/// A package is a potential pick if there isn't an already
/// selected version (no "decision")
/// and if it contains at least one positive derivation term
/// in the partial solution.
pub fn potential_packages(&self) -> Option<impl Iterator<Item = (&P, &Range<V>)>> {
let mut iter = self
.package_assignments
.iter()
.filter_map(|(p, pa)| pa.assignments_intersection.potential_package_filter(p))
.peekable();
if iter.peek().is_some() {
Some(iter)
} else {
None
}
}
/// If a partial solution has, for every positive derivation,
/// a corresponding decision that satisfies that assignment,
/// it's a total solution and version solving has succeeded.
pub fn extract_solution(&self) -> Option<SelectedDependencies<P, V>> {
let mut solution = Map::default();
for (p, pa) in &self.package_assignments {
match &pa.assignments_intersection {
AssignmentsIntersection::Decision((_, v, _)) => {
solution.insert(p.clone(), v.clone());
}
AssignmentsIntersection::Derivations(term) => {
if term.is_positive() {
return None;
}
}
}
}
Some(solution)
}
/// Backtrack the partial solution to a given decision level.
pub fn backtrack(
&mut self,
decision_level: DecisionLevel,
store: &Arena<Incompatibility<P, V>>,
) {
self.current_decision_level = decision_level;
self.package_assignments.retain(|p, pa| {
if pa.smallest_decision_level > decision_level {
// Remove all entries that have a smallest decision level higher than the backtrack target.
false
} else if pa.highest_decision_level <= decision_level {
// Do not change entries older than the backtrack decision level target.
true
} else {
// smallest_decision_level <= decision_level < highest_decision_level
//
// Since decision_level < highest_decision_level,
// We can be certain that there will be no decision in this package assignments
// after backtracking, because such decision would have been the last
// assignment and it would have the "highest_decision_level".
// Truncate the history.
while pa.dated_derivations.last().map(|dd| dd.decision_level) > Some(decision_level)
{
pa.dated_derivations.pop();
}
debug_assert!(!pa.dated_derivations.is_empty());
// Update highest_decision_level.
pa.highest_decision_level = pa.dated_derivations.last().unwrap().decision_level;
// Recompute the assignments intersection.
pa.assignments_intersection = AssignmentsIntersection::Derivations(
pa.dated_derivations
.iter()
.fold(Term::any(), |acc, dated_derivation| {
let term = store[dated_derivation.cause].get(&p).unwrap().negate();
acc.intersection(&term)
}),
);
true
}
});
}
/// We can add the version to the partial solution as a decision
/// if it doesn't produce any conflict with the new incompatibilities.
/// In practice I think it can only produce a conflict if one of the dependencies
/// (which are used to make the new incompatibilities)
/// is already in the partial solution with an incompatible version.
pub fn add_version(
&mut self,
package: P,
version: V,
new_incompatibilities: std::ops::Range<IncompId<P, V>>,
store: &Arena<Incompatibility<P, V>>,
) {
let exact = Term::exact(version.clone());
let not_satisfied = |incompat: &Incompatibility<P, V>| {
incompat.relation(|p| {
if p == &package {
Some(&exact)
} else {
self.term_intersection_for_package(p)
}
}) != Relation::Satisfied
};
// Check none of the dependencies (new_incompatibilities)
// would create a conflict (be satisfied).
if store[new_incompatibilities].iter().all(not_satisfied) {
self.add_decision(package, version);
}
}
/// Check if the terms in the partial solution satisfy the incompatibility.
pub fn relation(&self, incompat: &Incompatibility<P, V>) -> Relation<P> {
incompat.relation(|package| self.term_intersection_for_package(package))
}
/// Retrieve intersection of terms related to package.
pub fn term_intersection_for_package(&self, package: &P) -> Option<&Term<V>> {
self.package_assignments
.get(package)
.map(|pa| pa.assignments_intersection.term())
}
/// Figure out if the satisfier and previous satisfier are of different decision levels.
pub fn satisfier_search(
&self,
incompat: &Incompatibility<P, V>,
store: &Arena<Incompatibility<P, V>>,
) -> (P, SatisfierSearch<P, V>) {
let satisfied_map = Self::find_satisfier(incompat, &self.package_assignments, store);
let (satisfier_package, &(satisfier_index, _, satisfier_decision_level)) = satisfied_map
.iter()
.max_by_key(|(_p, (_, global_index, _))| global_index)
.unwrap();
let satisfier_package = satisfier_package.clone();
let previous_satisfier_level = Self::find_previous_satisfier(
incompat,
&satisfier_package,
satisfied_map,
&self.package_assignments,
store,
);
if previous_satisfier_level < satisfier_decision_level {
let search_result = SatisfierSearch::DifferentDecisionLevels {
previous_satisfier_level,
};
(satisfier_package, search_result)
} else {
let satisfier_pa = self.package_assignments.get(&satisfier_package).unwrap();
let dd = &satisfier_pa.dated_derivations[satisfier_index];
let search_result = SatisfierSearch::SameDecisionLevels {
satisfier_cause: dd.cause,
};
(satisfier_package, search_result)
}
}
/// A satisfier is the earliest assignment in partial solution such that the incompatibility
/// is satisfied by the partial solution up to and including that assignment.
///
/// Returns a map indicating for each package term, when that was first satisfied in history.
/// If we effectively found a satisfier, the returned map must be the same size that incompat.
///
/// Question: This is possible since we added a "global_index" to every dated_derivation.
/// It would be nice if we could get rid of it, but I don't know if then it will be possible
/// to return a coherent previous_satisfier_level.
fn find_satisfier(
incompat: &Incompatibility<P, V>,
package_assignments: &Map<P, PackageAssignments<P, V>>,
store: &Arena<Incompatibility<P, V>>,
) -> SmallMap<P, (usize, u32, DecisionLevel)> {
let mut satisfied = SmallMap::Empty;
for (package, incompat_term) in incompat.iter() {
let pa = package_assignments.get(package).expect("Must exist");
satisfied.insert(
package.clone(),
pa.satisfier(package, incompat_term, Term::any(), store),
);
}
satisfied
}
/// Earliest assignment in the partial solution before satisfier
/// such that incompatibility is satisfied by the partial solution up to
/// and including that assignment plus satisfier.
fn find_previous_satisfier(
incompat: &Incompatibility<P, V>,
satisfier_package: &P,
mut satisfied_map: SmallMap<P, (usize, u32, DecisionLevel)>,
package_assignments: &Map<P, PackageAssignments<P, V>>,
store: &Arena<Incompatibility<P, V>>,
) -> DecisionLevel {
// First, let's retrieve the previous derivations and the initial accum_term.
let satisfier_pa = package_assignments.get(satisfier_package).unwrap();
let (satisfier_index, _gidx, _dl) = satisfied_map.get_mut(satisfier_package).unwrap();
let accum_term = if *satisfier_index == satisfier_pa.dated_derivations.len() {
match &satisfier_pa.assignments_intersection {
AssignmentsIntersection::Derivations(_) => panic!("must be a decision"),
AssignmentsIntersection::Decision((_, _, term)) => term.clone(),
}
} else {
let dd = &satisfier_pa.dated_derivations[*satisfier_index];
store[dd.cause].get(satisfier_package).unwrap().negate()
};
let incompat_term = incompat
.get(satisfier_package)
.expect("satisfier package not in incompat");
satisfied_map.insert(
satisfier_package.clone(),
satisfier_pa.satisfier(satisfier_package, incompat_term, accum_term, store),
);
// Finally, let's identify the decision level of that previous satisfier.
let (_, &(_, _, decision_level)) = satisfied_map
.iter()
.max_by_key(|(_p, (_, global_index, _))| global_index)
.unwrap();
decision_level.max(DecisionLevel(1))
}
}
impl<P: Package, V: Version> PackageAssignments<P, V> {
fn satisfier(
&self,
package: &P,
incompat_term: &Term<V>,
start_term: Term<V>,
store: &Arena<Incompatibility<P, V>>,
) -> (usize, u32, DecisionLevel) {
// Term where we accumulate intersections until incompat_term is satisfied.
let mut accum_term = start_term;
// Indicate if we found a satisfier in the list of derivations, otherwise it will be the decision.
for (idx, dated_derivation) in self.dated_derivations.iter().enumerate() {
let this_term = store[dated_derivation.cause].get(package).unwrap().negate();
accum_term = accum_term.intersection(&this_term);
if accum_term.subset_of(incompat_term) {
// We found the derivation causing satisfaction.
return (
idx,
dated_derivation.global_index,
dated_derivation.decision_level,
);
}
}
// If it wasn't found in the derivations,
// it must be the decision which is last (if called in the right context).
match self.assignments_intersection {
AssignmentsIntersection::Decision((global_index, _, _)) => (
self.dated_derivations.len(),
global_index,
self.highest_decision_level,
),
AssignmentsIntersection::Derivations(_) => {
panic!("This must be a decision")
}
}
}
}
impl<V: Version> AssignmentsIntersection<V> {
/// Returns the term intersection of all assignments (decision included).
fn term(&self) -> &Term<V> {
match self {
Self::Decision((_, _, term)) => term,
Self::Derivations(term) => term,
}
}
/// A package is a potential pick if there isn't an already
/// selected version (no "decision")
/// and if it contains at least one positive derivation term
/// in the partial solution.
fn potential_package_filter<'a, P: Package>(
&'a self,
package: &'a P,
) -> Option<(&'a P, &'a Range<V>)> {
match self {
Self::Decision(_) => None,
Self::Derivations(term_intersection) => {
if term_intersection.is_positive() {
Some((package, term_intersection.unwrap_positive()))
} else {
None
}
}
}
}
}

195
vendor/pubgrub/src/internal/small_map.rs vendored Normal file
View file

@ -0,0 +1,195 @@
use crate::type_aliases::Map;
use std::hash::Hash;
#[derive(Debug, Clone)]
pub(crate) enum SmallMap<K, V> {
Empty,
One([(K, V); 1]),
Two([(K, V); 2]),
Flexible(Map<K, V>),
}
impl<K: PartialEq + Eq + Hash, V> SmallMap<K, V> {
pub(crate) fn get(&self, key: &K) -> Option<&V> {
match self {
Self::Empty => None,
Self::One([(k, v)]) if k == key => Some(v),
Self::One(_) => None,
Self::Two([(k1, v1), _]) if key == k1 => Some(v1),
Self::Two([_, (k2, v2)]) if key == k2 => Some(v2),
Self::Two(_) => None,
Self::Flexible(data) => data.get(key),
}
}
pub(crate) fn get_mut(&mut self, key: &K) -> Option<&mut V> {
match self {
Self::Empty => None,
Self::One([(k, v)]) if k == key => Some(v),
Self::One(_) => None,
Self::Two([(k1, v1), _]) if key == k1 => Some(v1),
Self::Two([_, (k2, v2)]) if key == k2 => Some(v2),
Self::Two(_) => None,
Self::Flexible(data) => data.get_mut(key),
}
}
pub(crate) fn remove(&mut self, key: &K) -> Option<V> {
let out;
*self = match std::mem::take(self) {
Self::Empty => {
out = None;
Self::Empty
}
Self::One([(k, v)]) => {
if key == &k {
out = Some(v);
Self::Empty
} else {
out = None;
Self::One([(k, v)])
}
}
Self::Two([(k1, v1), (k2, v2)]) => {
if key == &k1 {
out = Some(v1);
Self::One([(k2, v2)])
} else if key == &k2 {
out = Some(v2);
Self::One([(k1, v1)])
} else {
out = None;
Self::Two([(k1, v1), (k2, v2)])
}
}
Self::Flexible(mut data) => {
out = data.remove(key);
Self::Flexible(data)
}
};
out
}
pub(crate) fn insert(&mut self, key: K, value: V) {
*self = match std::mem::take(self) {
Self::Empty => Self::One([(key, value)]),
Self::One([(k, v)]) => {
if key == k {
Self::One([(k, value)])
} else {
Self::Two([(k, v), (key, value)])
}
}
Self::Two([(k1, v1), (k2, v2)]) => {
if key == k1 {
Self::Two([(k1, value), (k2, v2)])
} else if key == k2 {
Self::Two([(k1, v1), (k2, value)])
} else {
let mut data: Map<K, V> = Map::with_capacity_and_hasher(3, Default::default());
data.insert(key, value);
data.insert(k1, v1);
data.insert(k2, v2);
Self::Flexible(data)
}
}
Self::Flexible(mut data) => {
data.insert(key, value);
Self::Flexible(data)
}
};
}
}
impl<K: Clone + PartialEq + Eq + Hash, V: Clone> SmallMap<K, V> {
/// Merge two hash maps.
///
/// When a key is common to both,
/// apply the provided function to both values.
/// If the result is None, remove that key from the merged map,
/// otherwise add the content of the Some(_).
pub(crate) fn merge<'a>(
&'a mut self,
map_2: impl Iterator<Item = (&'a K, &'a V)>,
f: impl Fn(&V, &V) -> Option<V>,
) {
for (key, val_2) in map_2 {
match self.get_mut(key) {
None => {
self.insert(key.clone(), val_2.clone());
}
Some(val_1) => match f(val_1, val_2) {
None => {
self.remove(key);
}
Some(merged_value) => *val_1 = merged_value,
},
}
}
}
}
impl<K, V> Default for SmallMap<K, V> {
fn default() -> Self {
Self::Empty
}
}
impl<K, V> SmallMap<K, V> {
pub(crate) fn len(&self) -> usize {
match self {
Self::Empty => 0,
Self::One(_) => 1,
Self::Two(_) => 2,
Self::Flexible(data) => data.len(),
}
}
}
impl<K: Eq + Hash + Clone, V: Clone> SmallMap<K, V> {
pub(crate) fn as_map(&self) -> Map<K, V> {
match self {
Self::Empty => Map::default(),
Self::One([(k, v)]) => {
let mut map = Map::with_capacity_and_hasher(1, Default::default());
map.insert(k.clone(), v.clone());
map
}
Self::Two(data) => {
let mut map = Map::with_capacity_and_hasher(2, Default::default());
for (k, v) in data {
map.insert(k.clone(), v.clone());
}
map
}
Self::Flexible(data) => data.clone(),
}
}
}
enum IterSmallMap<'a, K, V> {
Inline(std::slice::Iter<'a, (K, V)>),
Map(std::collections::hash_map::Iter<'a, K, V>),
}
impl<'a, K: 'a, V: 'a> Iterator for IterSmallMap<'a, K, V> {
type Item = (&'a K, &'a V);
fn next(&mut self) -> Option<Self::Item> {
match self {
IterSmallMap::Inline(inner) => inner.next().map(|(k, v)| (k, v)),
IterSmallMap::Map(inner) => inner.next(),
}
}
}
impl<K, V> SmallMap<K, V> {
pub(crate) fn iter(&self) -> impl Iterator<Item = (&K, &V)> {
match self {
Self::Empty => IterSmallMap::Inline([].iter()),
Self::One(data) => IterSmallMap::Inline(data.iter()),
Self::Two(data) => IterSmallMap::Inline(data.iter()),
Self::Flexible(data) => IterSmallMap::Map(data.iter()),
}
}
}

157
vendor/pubgrub/src/internal/small_vec.rs vendored Normal file
View file

@ -0,0 +1,157 @@
use std::fmt;
use std::ops::Deref;
#[derive(Clone)]
pub enum SmallVec<T> {
Empty,
One([T; 1]),
Two([T; 2]),
Flexible(Vec<T>),
}
impl<T> SmallVec<T> {
pub fn empty() -> Self {
Self::Empty
}
pub fn one(t: T) -> Self {
Self::One([t])
}
pub fn as_slice(&self) -> &[T] {
match self {
Self::Empty => &[],
Self::One(v) => v,
Self::Two(v) => v,
Self::Flexible(v) => v,
}
}
pub fn push(&mut self, new: T) {
*self = match std::mem::take(self) {
Self::Empty => Self::One([new]),
Self::One([v1]) => Self::Two([v1, new]),
Self::Two([v1, v2]) => Self::Flexible(vec![v1, v2, new]),
Self::Flexible(mut v) => {
v.push(new);
Self::Flexible(v)
}
}
}
pub fn pop(&mut self) -> Option<T> {
match std::mem::take(self) {
Self::Empty => None,
Self::One([v1]) => {
*self = Self::Empty;
Some(v1)
}
Self::Two([v1, v2]) => {
*self = Self::One([v1]);
Some(v2)
}
Self::Flexible(mut v) => {
let out = v.pop();
*self = Self::Flexible(v);
out
}
}
}
pub fn clear(&mut self) {
if let Self::Flexible(mut v) = std::mem::take(self) {
v.clear();
*self = Self::Flexible(v);
} // else: self already eq Empty from the take
}
pub fn iter(&self) -> std::slice::Iter<'_, T> {
self.as_slice().iter()
}
}
impl<T> Default for SmallVec<T> {
fn default() -> Self {
Self::Empty
}
}
impl<T> Deref for SmallVec<T> {
type Target = [T];
fn deref(&self) -> &Self::Target {
self.as_slice()
}
}
impl<'a, T> IntoIterator for &'a SmallVec<T> {
type Item = &'a T;
type IntoIter = std::slice::Iter<'a, T>;
fn into_iter(self) -> Self::IntoIter {
self.iter()
}
}
impl<T: Eq> Eq for SmallVec<T> {}
impl<T: PartialEq> PartialEq for SmallVec<T> {
fn eq(&self, other: &Self) -> bool {
self.as_slice() == other.as_slice()
}
}
impl<T: fmt::Debug> fmt::Debug for SmallVec<T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
self.as_slice().fmt(f)
}
}
#[cfg(feature = "serde")]
impl<T: serde::Serialize> serde::Serialize for SmallVec<T> {
fn serialize<S: serde::Serializer>(&self, s: S) -> Result<S::Ok, S::Error> {
serde::Serialize::serialize(self.as_slice(), s)
}
}
#[cfg(feature = "serde")]
impl<'de, T: serde::Deserialize<'de>> serde::Deserialize<'de> for SmallVec<T> {
fn deserialize<D: serde::Deserializer<'de>>(d: D) -> Result<Self, D::Error> {
let items: Vec<T> = serde::Deserialize::deserialize(d)?;
let mut v = Self::empty();
for item in items {
v.push(item);
}
Ok(v)
}
}
// TESTS #######################################################################
#[cfg(test)]
pub mod tests {
use super::*;
use proptest::prelude::*;
proptest! {
#[test]
fn push_and_pop(comands: Vec<Option<u8>>) {
let mut v = vec![];
let mut sv = SmallVec::Empty;
for comand in comands {
match comand {
Some(i) => {
v.push(i);
sv.push(i);
}
None => {
assert_eq!(v.pop(), sv.pop());
}
}
assert_eq!(v.as_slice(), sv.as_slice());
}
}
}
}

221
vendor/pubgrub/src/lib.rs vendored Normal file
View file

@ -0,0 +1,221 @@
// SPDX-License-Identifier: MPL-2.0
//! PubGrub version solving algorithm.
//!
//! Version solving consists in efficiently finding a set of packages and versions
//! that satisfy all the constraints of a given project dependencies.
//! In addition, when that is not possible,
//! we should try to provide a very human-readable and clear
//! explanation as to why that failed.
//!
//! # Package and Version traits
//!
//! All the code in this crate is manipulating packages and versions, and for this to work
//! we defined a [Package](package::Package) and [Version](version::Version) traits
//! that are used as bounds on most of the exposed types and functions.
//!
//! Package identifiers needs to implement our [Package](package::Package) trait,
//! which is automatic if the type already implements
//! [Clone] + [Eq] + [Hash] + [Debug] + [Display](std::fmt::Display).
//! So things like [String] will work out of the box.
//!
//! Our [Version](version::Version) trait requires
//! [Clone] + [Ord] + [Debug] + [Display](std::fmt::Display)
//! and also the definition of two methods,
//! [lowest() -> Self](version::Version::lowest) which returns the lowest version existing,
//! and [bump(&self) -> Self](version::Version::bump) which returns the next smallest version
//! strictly higher than the current one.
//! For convenience, this library already provides
//! two implementations of [Version](version::Version).
//! The first one is [NumberVersion](version::NumberVersion), basically a newtype for [u32].
//! The second one is [SemanticVersion](version::NumberVersion)
//! that implements semantic versioning rules.
//!
//! # Basic example
//!
//! Let's imagine that we are building a user interface
//! with a menu containing dropdowns with some icons,
//! icons that we are also directly using in other parts of the interface.
//! For this scenario our direct dependencies are `menu` and `icons`,
//! but the complete set of dependencies looks like follows:
//!
//! - `root` depends on `menu` and `icons`
//! - `menu` depends on `dropdown`
//! - `dropdown` depends on `icons`
//! - `icons` has no dependency
//!
//! We can model that scenario with this library as follows
//! ```
//! # use pubgrub::solver::{OfflineDependencyProvider, resolve};
//! # use pubgrub::version::NumberVersion;
//! # use pubgrub::range::Range;
//! #
//! let mut dependency_provider = OfflineDependencyProvider::<&str, NumberVersion>::new();
//!
//! dependency_provider.add_dependencies(
//! "root", 1, vec![("menu", Range::any()), ("icons", Range::any())],
//! );
//! dependency_provider.add_dependencies("menu", 1, vec![("dropdown", Range::any())]);
//! dependency_provider.add_dependencies("dropdown", 1, vec![("icons", Range::any())]);
//! dependency_provider.add_dependencies("icons", 1, vec![]);
//!
//! // Run the algorithm.
//! let solution = resolve(&dependency_provider, "root", 1).unwrap();
//! ```
//!
//! # DependencyProvider trait
//!
//! In our previous example we used the
//! [OfflineDependencyProvider](solver::OfflineDependencyProvider),
//! which is a basic implementation of the [DependencyProvider](solver::DependencyProvider) trait.
//!
//! But we might want to implement the [DependencyProvider](solver::DependencyProvider)
//! trait for our own type.
//! Let's say that we will use [String] for packages,
//! and [SemanticVersion](version::SemanticVersion) for versions.
//! This may be done quite easily by implementing the two following functions.
//! ```
//! # use pubgrub::solver::{DependencyProvider, Dependencies};
//! # use pubgrub::version::SemanticVersion;
//! # use pubgrub::range::Range;
//! # use pubgrub::type_aliases::Map;
//! # use std::error::Error;
//! # use std::borrow::Borrow;
//! #
//! # struct MyDependencyProvider;
//! #
//! impl DependencyProvider<String, SemanticVersion> for MyDependencyProvider {
//! fn choose_package_version<T: Borrow<String>, U: Borrow<Range<SemanticVersion>>>(&self,packages: impl Iterator<Item=(T, U)>) -> Result<(T, Option<SemanticVersion>), Box<dyn Error>> {
//! unimplemented!()
//! }
//!
//! fn get_dependencies(
//! &self,
//! package: &String,
//! version: &SemanticVersion,
//! ) -> Result<Dependencies<String, SemanticVersion>, Box<dyn Error>> {
//! unimplemented!()
//! }
//! }
//! ```
//!
//! The first method
//! [choose_package_version](crate::solver::DependencyProvider::choose_package_version)
//! chooses a package and available version compatible with the provided options.
//! A helper function
//! [choose_package_with_fewest_versions](crate::solver::choose_package_with_fewest_versions)
//! is provided for convenience
//! in cases when lists of available versions for packages are easily obtained.
//! The strategy of that helper function consists in choosing the package
//! with the fewest number of compatible versions to speed up resolution.
//! But in general you are free to employ whatever strategy suits you best
//! to pick a package and a version.
//!
//! The second method [get_dependencies](crate::solver::DependencyProvider::get_dependencies)
//! aims at retrieving the dependencies of a given package at a given version.
//! Returns [None] if dependencies are unknown.
//!
//! In a real scenario, these two methods may involve reading the file system
//! or doing network request, so you may want to hold a cache in your
//! [DependencyProvider](solver::DependencyProvider) implementation.
//! How exactly this could be achieved is shown in `CachingDependencyProvider`
//! (see `examples/caching_dependency_provider.rs`).
//! You could also use the [OfflineDependencyProvider](solver::OfflineDependencyProvider)
//! type defined by the crate as guidance,
//! but you are free to use whatever approach makes sense in your situation.
//!
//! # Solution and error reporting
//!
//! When everything goes well, the algorithm finds and returns the complete
//! set of direct and indirect dependencies satisfying all the constraints.
//! The packages and versions selected are returned as
//! [SelectedDepedencies<P, V>](type_aliases::SelectedDependencies).
//! But sometimes there is no solution because dependencies are incompatible.
//! In such cases, [resolve(...)](solver::resolve) returns a
//! [PubGrubError::NoSolution(derivation_tree)](error::PubGrubError::NoSolution),
//! where the provided derivation tree is a custom binary tree
//! containing the full chain of reasons why there is no solution.
//!
//! All the items in the tree are called incompatibilities
//! and may be of two types, either "external" or "derived".
//! Leaves of the tree are external incompatibilities,
//! and nodes are derived.
//! External incompatibilities have reasons that are independent
//! of the way this algorithm is implemented such as
//! - dependencies: "package_a" at version 1 depends on "package_b" at version 4
//! - missing dependencies: dependencies of "package_a" are unknown
//! - absence of version: there is no version of "package_a" in the range [3.1.0 4.0.0[
//!
//! Derived incompatibilities are obtained during the algorithm execution by deduction,
//! such as if "a" depends on "b" and "b" depends on "c", "a" depends on "c".
//!
//! This crate defines a [Reporter](crate::report::Reporter) trait, with an associated
//! [Output](crate::report::Reporter::Output) type and a single method.
//! ```
//! # use pubgrub::package::Package;
//! # use pubgrub::version::Version;
//! # use pubgrub::report::DerivationTree;
//! #
//! pub trait Reporter<P: Package, V: Version> {
//! type Output;
//!
//! fn report(derivation_tree: &DerivationTree<P, V>) -> Self::Output;
//! }
//! ```
//! Implementing a [Reporter](crate::report::Reporter) may involve a lot of heuristics
//! to make the output human-readable and natural.
//! For convenience, we provide a default implementation
//! [DefaultStringReporter](crate::report::DefaultStringReporter)
//! that outputs the report as a [String].
//! You may use it as follows:
//! ```
//! # use pubgrub::solver::{resolve, OfflineDependencyProvider};
//! # use pubgrub::report::{DefaultStringReporter, Reporter};
//! # use pubgrub::error::PubGrubError;
//! # use pubgrub::version::NumberVersion;
//! #
//! # let dependency_provider = OfflineDependencyProvider::<&str, NumberVersion>::new();
//! # let root_package = "root";
//! # let root_version = 1;
//! #
//! match resolve(&dependency_provider, root_package, root_version) {
//! Ok(solution) => println!("{:?}", solution),
//! Err(PubGrubError::NoSolution(mut derivation_tree)) => {
//! derivation_tree.collapse_no_versions();
//! eprintln!("{}", DefaultStringReporter::report(&derivation_tree));
//! }
//! Err(err) => panic!("{:?}", err),
//! };
//! ```
//! Notice that we also used
//! [collapse_no_versions()](crate::report::DerivationTree::collapse_no_versions) above.
//! This method simplifies the derivation tree to get rid of the
//! [NoVersions](crate::report::External::NoVersions)
//! external incompatibilities in the derivation tree.
//! So instead of seeing things like this in the report:
//! ```txt
//! Because there is no version of foo in 1.0.1 <= v < 2.0.0
//! and foo 1.0.0 depends on bar 2.0.0 <= v < 3.0.0,
//! foo 1.0.0 <= v < 2.0.0 depends on bar 2.0.0 <= v < 3.0.0.
//! ```
//! you may have directly:
//! ```txt
//! foo 1.0.0 <= v < 2.0.0 depends on bar 2.0.0 <= v < 3.0.0.
//! ```
//! Beware though that if you are using some kind of offline mode
//! with a cache, you may want to know that some versions
//! do not exist in your cache.
#![allow(clippy::rc_buffer)]
#![warn(missing_docs)]
pub mod error;
pub mod package;
pub mod range;
pub mod report;
pub mod solver;
pub mod term;
pub mod type_aliases;
pub mod version;
mod internal;

17
vendor/pubgrub/src/package.rs vendored Normal file
View file

@ -0,0 +1,17 @@
// SPDX-License-Identifier: MPL-2.0
//! Trait for identifying packages.
//! Automatically implemented for traits implementing
//! [Clone] + [Eq] + [Hash] + [Debug] + [Display](std::fmt::Display).
use std::fmt::{Debug, Display};
use std::hash::Hash;
/// Trait for identifying packages.
/// Automatically implemented for types already implementing
/// [Clone] + [Eq] + [Hash] + [Debug] + [Display](std::fmt::Display).
pub trait Package: Clone + Eq + Hash + Debug + Display {}
/// Automatically implement the Package trait for any type
/// that already implement [Clone] + [Eq] + [Hash] + [Debug] + [Display](std::fmt::Display).
impl<T: Clone + Eq + Hash + Debug + Display> Package for T {}

409
vendor/pubgrub/src/range.rs vendored Normal file
View file

@ -0,0 +1,409 @@
// SPDX-License-Identifier: MPL-2.0
//! Ranges are constraints defining sets of versions.
//!
//! Concretely, those constraints correspond to any set of versions
//! representable as the concatenation, union, and complement
//! of the ranges building blocks.
//!
//! Those building blocks are:
//! - [none()](Range::none): the empty set
//! - [any()](Range::any): the set of all possible versions
//! - [exact(v)](Range::exact): the set containing only the version v
//! - [higher_than(v)](Range::higher_than): the set defined by `v <= versions`
//! - [strictly_lower_than(v)](Range::strictly_lower_than): the set defined by `versions < v`
//! - [between(v1, v2)](Range::between): the set defined by `v1 <= versions < v2`
use std::cmp::Ordering;
use std::fmt;
use crate::internal::small_vec::SmallVec;
use crate::version::Version;
/// A Range is a set of versions.
#[derive(Debug, Clone, Eq, PartialEq)]
#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
#[cfg_attr(feature = "serde", serde(transparent))]
pub struct Range<V: Version> {
segments: SmallVec<Interval<V>>,
}
type Interval<V> = (V, Option<V>);
// Range building blocks.
impl<V: Version> Range<V> {
/// Empty set of versions.
pub fn none() -> Self {
Self {
segments: SmallVec::empty(),
}
}
/// Set of all possible versions.
pub fn any() -> Self {
Self::higher_than(V::lowest())
}
/// Set containing exactly one version.
pub fn exact(v: impl Into<V>) -> Self {
let v = v.into();
Self {
segments: SmallVec::one((v.clone(), Some(v.bump()))),
}
}
/// Set of all versions higher or equal to some version.
pub fn higher_than(v: impl Into<V>) -> Self {
Self {
segments: SmallVec::one((v.into(), None)),
}
}
/// Set of all versions strictly lower than some version.
pub fn strictly_lower_than(v: impl Into<V>) -> Self {
let v = v.into();
if v == V::lowest() {
Self::none()
} else {
Self {
segments: SmallVec::one((V::lowest(), Some(v))),
}
}
}
/// Set of all versions comprised between two given versions.
/// The lower bound is included and the higher bound excluded.
/// `v1 <= v < v2`.
pub fn between(v1: impl Into<V>, v2: impl Into<V>) -> Self {
let v1 = v1.into();
let v2 = v2.into();
if v1 < v2 {
Self {
segments: SmallVec::one((v1, Some(v2))),
}
} else {
Self::none()
}
}
}
// Set operations.
impl<V: Version> Range<V> {
// Negate ##################################################################
/// Compute the complement set of versions.
pub fn negate(&self) -> Self {
match self.segments.first() {
None => Self::any(), // Complement of ∅ is *
// First high bound is +∞
Some((v, None)) => {
// Complement of * is ∅
if v == &V::lowest() {
Self::none()
// Complement of "v <= _" is "_ < v"
} else {
Self::strictly_lower_than(v.clone())
}
}
// First high bound is not +∞
Some((v1, Some(v2))) => {
if v1 == &V::lowest() {
Self::negate_segments(v2.clone(), &self.segments[1..])
} else {
Self::negate_segments(V::lowest(), &self.segments)
}
}
}
}
/// Helper function performing the negation of intervals in segments.
/// For example:
/// [ (v1, None) ] => [ (start, Some(v1)) ]
/// [ (v1, Some(v2)) ] => [ (start, Some(v1)), (v2, None) ]
fn negate_segments(start: V, segments: &[Interval<V>]) -> Range<V> {
let mut complement_segments = SmallVec::empty();
let mut start = Some(start);
for (v1, maybe_v2) in segments {
// start.unwrap() is fine because `segments` is not exposed,
// and our usage guaranties that only the last segment may contain a None.
complement_segments.push((start.unwrap(), Some(v1.to_owned())));
start = maybe_v2.to_owned();
}
if let Some(last) = start {
complement_segments.push((last, None));
}
Self {
segments: complement_segments,
}
}
// Union and intersection ##################################################
/// Compute the union of two sets of versions.
pub fn union(&self, other: &Self) -> Self {
self.negate().intersection(&other.negate()).negate()
}
/// Compute the intersection of two sets of versions.
pub fn intersection(&self, other: &Self) -> Self {
let mut segments = SmallVec::empty();
let mut left_iter = self.segments.iter();
let mut right_iter = other.segments.iter();
let mut left = left_iter.next();
let mut right = right_iter.next();
loop {
match (left, right) {
// Both left and right still contain a finite interval:
(Some((l1, Some(l2))), Some((r1, Some(r2)))) => {
if l2 <= r1 {
// Intervals are disjoint, progress on the left.
left = left_iter.next();
} else if r2 <= l1 {
// Intervals are disjoint, progress on the right.
right = right_iter.next();
} else {
// Intervals are not disjoint.
let start = l1.max(r1).to_owned();
if l2 < r2 {
segments.push((start, Some(l2.to_owned())));
left = left_iter.next();
} else {
segments.push((start, Some(r2.to_owned())));
right = right_iter.next();
}
}
}
// Right contains an infinite interval:
(Some((l1, Some(l2))), Some((r1, None))) => match l2.cmp(r1) {
Ordering::Less => {
left = left_iter.next();
}
Ordering::Equal => {
for l in left_iter.cloned() {
segments.push(l)
}
break;
}
Ordering::Greater => {
let start = l1.max(r1).to_owned();
segments.push((start, Some(l2.to_owned())));
for l in left_iter.cloned() {
segments.push(l)
}
break;
}
},
// Left contains an infinite interval:
(Some((l1, None)), Some((r1, Some(r2)))) => match r2.cmp(l1) {
Ordering::Less => {
right = right_iter.next();
}
Ordering::Equal => {
for r in right_iter.cloned() {
segments.push(r)
}
break;
}
Ordering::Greater => {
let start = l1.max(r1).to_owned();
segments.push((start, Some(r2.to_owned())));
for r in right_iter.cloned() {
segments.push(r)
}
break;
}
},
// Both sides contain an infinite interval:
(Some((l1, None)), Some((r1, None))) => {
let start = l1.max(r1).to_owned();
segments.push((start, None));
break;
}
// Left or right has ended.
_ => {
break;
}
}
}
Self { segments }
}
}
// Other useful functions.
impl<V: Version> Range<V> {
/// Check if a range contains a given version.
pub fn contains(&self, version: &V) -> bool {
for (v1, maybe_v2) in &self.segments {
match maybe_v2 {
None => return v1 <= version,
Some(v2) => {
if version < v1 {
return false;
} else if version < v2 {
return true;
}
}
}
}
false
}
/// Return the lowest version in the range (if there is one).
pub fn lowest_version(&self) -> Option<V> {
self.segments.first().map(|(start, _)| start).cloned()
}
}
// REPORT ######################################################################
impl<V: Version> fmt::Display for Range<V> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self.segments.as_slice() {
[] => write!(f, ""),
[(start, None)] if start == &V::lowest() => write!(f, ""),
[(start, None)] => write!(f, "{} <= v", start),
[(start, Some(end))] if end == &start.bump() => write!(f, "{}", start),
[(start, Some(end))] if start == &V::lowest() => write!(f, "v < {}", end),
[(start, Some(end))] => write!(f, "{} <= v < {}", start, end),
more_than_one_interval => {
let string_intervals: Vec<_> = more_than_one_interval
.iter()
.map(interval_to_string)
.collect();
write!(f, "{}", string_intervals.join(" "))
}
}
}
}
fn interval_to_string<V: Version>((start, maybe_end): &Interval<V>) -> String {
match maybe_end {
Some(end) => format!("[ {}, {} [", start, end),
None => format!("[ {}, ∞ [", start),
}
}
// TESTS #######################################################################
#[cfg(test)]
pub mod tests {
use proptest::prelude::*;
use crate::version::NumberVersion;
use super::*;
pub fn strategy() -> impl Strategy<Value = Range<NumberVersion>> {
prop::collection::vec(any::<u32>(), 0..10).prop_map(|mut vec| {
vec.sort_unstable();
vec.dedup();
let mut pair_iter = vec.chunks_exact(2);
let mut segments = SmallVec::empty();
while let Some([v1, v2]) = pair_iter.next() {
segments.push((NumberVersion(*v1), Some(NumberVersion(*v2))));
}
if let [v] = pair_iter.remainder() {
segments.push((NumberVersion(*v), None));
}
Range { segments }
})
}
fn version_strat() -> impl Strategy<Value = NumberVersion> {
any::<u32>().prop_map(NumberVersion)
}
proptest! {
// Testing negate ----------------------------------
#[test]
fn negate_is_different(range in strategy()) {
assert_ne!(range.negate(), range);
}
#[test]
fn double_negate_is_identity(range in strategy()) {
assert_eq!(range.negate().negate(), range);
}
#[test]
fn negate_contains_opposite(range in strategy(), version in version_strat()) {
assert_ne!(range.contains(&version), range.negate().contains(&version));
}
// Testing intersection ----------------------------
#[test]
fn intersection_is_symmetric(r1 in strategy(), r2 in strategy()) {
assert_eq!(r1.intersection(&r2), r2.intersection(&r1));
}
#[test]
fn intersection_with_any_is_identity(range in strategy()) {
assert_eq!(Range::any().intersection(&range), range);
}
#[test]
fn intersection_with_none_is_none(range in strategy()) {
assert_eq!(Range::none().intersection(&range), Range::none());
}
#[test]
fn intersection_is_idempotent(r1 in strategy(), r2 in strategy()) {
assert_eq!(r1.intersection(&r2).intersection(&r2), r1.intersection(&r2));
}
#[test]
fn intersection_is_associative(r1 in strategy(), r2 in strategy(), r3 in strategy()) {
assert_eq!(r1.intersection(&r2).intersection(&r3), r1.intersection(&r2.intersection(&r3)));
}
#[test]
fn intesection_of_complements_is_none(range in strategy()) {
assert_eq!(range.negate().intersection(&range), Range::none());
}
#[test]
fn intesection_contains_both(r1 in strategy(), r2 in strategy(), version in version_strat()) {
assert_eq!(r1.intersection(&r2).contains(&version), r1.contains(&version) && r2.contains(&version));
}
// Testing union -----------------------------------
#[test]
fn union_of_complements_is_any(range in strategy()) {
assert_eq!(range.negate().union(&range), Range::any());
}
#[test]
fn union_contains_either(r1 in strategy(), r2 in strategy(), version in version_strat()) {
assert_eq!(r1.union(&r2).contains(&version), r1.contains(&version) || r2.contains(&version));
}
// Testing contains --------------------------------
#[test]
fn always_contains_exact(version in version_strat()) {
assert!(Range::exact(version).contains(&version));
}
#[test]
fn contains_negation(range in strategy(), version in version_strat()) {
assert_ne!(range.contains(&version), range.negate().contains(&version));
}
#[test]
fn contains_intersection(range in strategy(), version in version_strat()) {
assert_eq!(range.contains(&version), range.intersection(&Range::exact(version)) != Range::none());
}
}
}

485
vendor/pubgrub/src/report.rs vendored Normal file
View file

@ -0,0 +1,485 @@
// SPDX-License-Identifier: MPL-2.0
//! Build a report as clear as possible as to why
//! dependency solving failed.
use std::fmt;
use std::ops::{Deref, DerefMut};
use crate::package::Package;
use crate::range::Range;
use crate::term::Term;
use crate::type_aliases::Map;
use crate::version::Version;
/// Reporter trait.
pub trait Reporter<P: Package, V: Version> {
/// Output type of the report.
type Output;
/// Generate a report from the derivation tree
/// describing the resolution failure.
fn report(derivation_tree: &DerivationTree<P, V>) -> Self::Output;
}
/// Derivation tree resulting in the impossibility
/// to solve the dependencies of our root package.
#[derive(Debug, Clone)]
pub enum DerivationTree<P: Package, V: Version> {
/// External incompatibility.
External(External<P, V>),
/// Incompatibility derived from two others.
Derived(Derived<P, V>),
}
/// Incompatibilities that are not derived from others,
/// they have their own reason.
#[derive(Debug, Clone)]
pub enum External<P: Package, V: Version> {
/// Initial incompatibility aiming at picking the root package for the first decision.
NotRoot(P, V),
/// There are no versions in the given range for this package.
NoVersions(P, Range<V>),
/// Dependencies of the package are unavailable for versions in that range.
UnavailableDependencies(P, Range<V>),
/// Incompatibility coming from the dependencies of a given package.
FromDependencyOf(P, Range<V>, P, Range<V>),
}
/// Incompatibility derived from two others.
#[derive(Debug, Clone)]
pub struct Derived<P: Package, V: Version> {
/// Terms of the incompatibility.
pub terms: Map<P, Term<V>>,
/// Indicate if that incompatibility is present multiple times
/// in the derivation tree.
/// If that is the case, it has a unique id, provided in that option.
/// Then, we may want to only explain it once,
/// and refer to the explanation for the other times.
pub shared_id: Option<usize>,
/// First cause.
pub cause1: Box<DerivationTree<P, V>>,
/// Second cause.
pub cause2: Box<DerivationTree<P, V>>,
}
impl<P: Package, V: Version> DerivationTree<P, V> {
/// Merge the [NoVersions](External::NoVersions) external incompatibilities
/// with the other one they are matched with
/// in a derived incompatibility.
/// This cleans up quite nicely the generated report.
/// You might want to do this if you know that the
/// [DependencyProvider](crate::solver::DependencyProvider)
/// was not run in some kind of offline mode that may not
/// have access to all versions existing.
pub fn collapse_no_versions(&mut self) {
match self {
DerivationTree::External(_) => {}
DerivationTree::Derived(derived) => {
match (derived.cause1.deref_mut(), derived.cause2.deref_mut()) {
(DerivationTree::External(External::NoVersions(p, r)), ref mut cause2) => {
cause2.collapse_no_versions();
*self = cause2
.clone()
.merge_no_versions(p.to_owned(), r.to_owned())
.unwrap_or_else(|| self.to_owned());
}
(ref mut cause1, DerivationTree::External(External::NoVersions(p, r))) => {
cause1.collapse_no_versions();
*self = cause1
.clone()
.merge_no_versions(p.to_owned(), r.to_owned())
.unwrap_or_else(|| self.to_owned());
}
_ => {
derived.cause1.collapse_no_versions();
derived.cause2.collapse_no_versions();
}
}
}
}
}
fn merge_no_versions(self, package: P, range: Range<V>) -> Option<Self> {
match self {
// TODO: take care of the Derived case.
// Once done, we can remove the Option.
DerivationTree::Derived(_) => Some(self),
DerivationTree::External(External::NotRoot(_, _)) => {
panic!("How did we end up with a NoVersions merged with a NotRoot?")
}
DerivationTree::External(External::NoVersions(_, r)) => Some(DerivationTree::External(
External::NoVersions(package, range.union(&r)),
)),
DerivationTree::External(External::UnavailableDependencies(_, r)) => {
Some(DerivationTree::External(External::UnavailableDependencies(
package,
range.union(&r),
)))
}
DerivationTree::External(External::FromDependencyOf(p1, r1, p2, r2)) => {
if p1 == package {
Some(DerivationTree::External(External::FromDependencyOf(
p1,
r1.union(&range),
p2,
r2,
)))
} else {
Some(DerivationTree::External(External::FromDependencyOf(
p1,
r1,
p2,
r2.union(&range),
)))
}
}
}
}
}
impl<P: Package, V: Version> fmt::Display for External<P, V> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Self::NotRoot(package, version) => {
write!(f, "we are solving dependencies of {} {}", package, version)
}
Self::NoVersions(package, range) => {
if range == &Range::any() {
write!(f, "there is no available version for {}", package)
} else {
write!(f, "there is no version of {} in {}", package, range)
}
}
Self::UnavailableDependencies(package, range) => {
if range == &Range::any() {
write!(f, "dependencies of {} are unavailable", package)
} else {
write!(
f,
"dependencies of {} at version {} are unavailable",
package, range
)
}
}
Self::FromDependencyOf(p, range_p, dep, range_dep) => {
if range_p == &Range::any() && range_dep == &Range::any() {
write!(f, "{} depends on {}", p, dep)
} else if range_p == &Range::any() {
write!(f, "{} depends on {} {}", p, dep, range_dep)
} else if range_dep == &Range::any() {
write!(f, "{} {} depends on {}", p, range_p, dep)
} else {
write!(f, "{} {} depends on {} {}", p, range_p, dep, range_dep)
}
}
}
}
}
/// Default reporter able to generate an explanation as a [String].
pub struct DefaultStringReporter {
/// Number of explanations already with a line reference.
ref_count: usize,
/// Shared nodes that have already been marked with a line reference.
/// The incompatibility ids are the keys, and the line references are the values.
shared_with_ref: Map<usize, usize>,
/// Accumulated lines of the report already generated.
lines: Vec<String>,
}
impl DefaultStringReporter {
/// Initialize the reporter.
fn new() -> Self {
Self {
ref_count: 0,
shared_with_ref: Map::default(),
lines: Vec::new(),
}
}
fn build_recursive<P: Package, V: Version>(&mut self, derived: &Derived<P, V>) {
self.build_recursive_helper(derived);
if let Some(id) = derived.shared_id {
if self.shared_with_ref.get(&id) == None {
self.add_line_ref();
self.shared_with_ref.insert(id, self.ref_count);
}
};
}
fn build_recursive_helper<P: Package, V: Version>(&mut self, current: &Derived<P, V>) {
match (current.cause1.deref(), current.cause2.deref()) {
(DerivationTree::External(external1), DerivationTree::External(external2)) => {
// Simplest case, we just combine two external incompatibilities.
self.lines.push(Self::explain_both_external(
external1,
external2,
&current.terms,
));
}
(DerivationTree::Derived(derived), DerivationTree::External(external)) => {
// One cause is derived, so we explain this first
// then we add the one-line external part
// and finally conclude with the current incompatibility.
self.report_one_each(derived, external, &current.terms);
}
(DerivationTree::External(external), DerivationTree::Derived(derived)) => {
self.report_one_each(derived, external, &current.terms);
}
(DerivationTree::Derived(derived1), DerivationTree::Derived(derived2)) => {
// This is the most complex case since both causes are also derived.
match (
self.line_ref_of(derived1.shared_id),
self.line_ref_of(derived2.shared_id),
) {
// If both causes already have been referenced (shared_id),
// the explanation simply uses those references.
(Some(ref1), Some(ref2)) => self.lines.push(Self::explain_both_ref(
ref1,
derived1,
ref2,
derived2,
&current.terms,
)),
// Otherwise, if one only has a line number reference,
// we recursively call the one without reference and then
// add the one with reference to conclude.
(Some(ref1), None) => {
self.build_recursive(derived2);
self.lines
.push(Self::and_explain_ref(ref1, derived1, &current.terms));
}
(None, Some(ref2)) => {
self.build_recursive(derived1);
self.lines
.push(Self::and_explain_ref(ref2, derived2, &current.terms));
}
// Finally, if no line reference exists yet,
// we call recursively the first one and then,
// - if this was a shared node, it will get a line ref
// and we can simply recall this with the current node.
// - otherwise, we add a line reference to it,
// recursively call on the second node,
// and finally conclude.
(None, None) => {
self.build_recursive(derived1);
if derived1.shared_id != None {
self.lines.push("".into());
self.build_recursive(current);
} else {
self.add_line_ref();
let ref1 = self.ref_count;
self.lines.push("".into());
self.build_recursive(derived2);
self.lines
.push(Self::and_explain_ref(ref1, derived1, &current.terms));
}
}
}
}
}
}
/// Report a derived and an external incompatibility.
///
/// The result will depend on the fact that the derived incompatibility
/// has already been explained or not.
fn report_one_each<P: Package, V: Version>(
&mut self,
derived: &Derived<P, V>,
external: &External<P, V>,
current_terms: &Map<P, Term<V>>,
) {
match self.line_ref_of(derived.shared_id) {
Some(ref_id) => self.lines.push(Self::explain_ref_and_external(
ref_id,
derived,
external,
current_terms,
)),
None => self.report_recurse_one_each(derived, external, current_terms),
}
}
/// Report one derived (without a line ref yet) and one external.
fn report_recurse_one_each<P: Package, V: Version>(
&mut self,
derived: &Derived<P, V>,
external: &External<P, V>,
current_terms: &Map<P, Term<V>>,
) {
match (derived.cause1.deref(), derived.cause2.deref()) {
// If the derived cause has itself one external prior cause,
// we can chain the external explanations.
(DerivationTree::Derived(prior_derived), DerivationTree::External(prior_external)) => {
self.build_recursive(prior_derived);
self.lines.push(Self::and_explain_prior_and_external(
prior_external,
external,
current_terms,
));
}
// If the derived cause has itself one external prior cause,
// we can chain the external explanations.
(DerivationTree::External(prior_external), DerivationTree::Derived(prior_derived)) => {
self.build_recursive(prior_derived);
self.lines.push(Self::and_explain_prior_and_external(
prior_external,
external,
current_terms,
));
}
_ => {
self.build_recursive(derived);
self.lines
.push(Self::and_explain_external(external, current_terms));
}
}
}
// String explanations #####################################################
/// Simplest case, we just combine two external incompatibilities.
fn explain_both_external<P: Package, V: Version>(
external1: &External<P, V>,
external2: &External<P, V>,
current_terms: &Map<P, Term<V>>,
) -> String {
// TODO: order should be chosen to make it more logical.
format!(
"Because {} and {}, {}.",
external1,
external2,
Self::string_terms(current_terms)
)
}
/// Both causes have already been explained so we use their refs.
fn explain_both_ref<P: Package, V: Version>(
ref_id1: usize,
derived1: &Derived<P, V>,
ref_id2: usize,
derived2: &Derived<P, V>,
current_terms: &Map<P, Term<V>>,
) -> String {
// TODO: order should be chosen to make it more logical.
format!(
"Because {} ({}) and {} ({}), {}.",
Self::string_terms(&derived1.terms),
ref_id1,
Self::string_terms(&derived2.terms),
ref_id2,
Self::string_terms(current_terms)
)
}
/// One cause is derived (already explained so one-line),
/// the other is a one-line external cause,
/// and finally we conclude with the current incompatibility.
fn explain_ref_and_external<P: Package, V: Version>(
ref_id: usize,
derived: &Derived<P, V>,
external: &External<P, V>,
current_terms: &Map<P, Term<V>>,
) -> String {
// TODO: order should be chosen to make it more logical.
format!(
"Because {} ({}) and {}, {}.",
Self::string_terms(&derived.terms),
ref_id,
external,
Self::string_terms(current_terms)
)
}
/// Add an external cause to the chain of explanations.
fn and_explain_external<P: Package, V: Version>(
external: &External<P, V>,
current_terms: &Map<P, Term<V>>,
) -> String {
format!(
"And because {}, {}.",
external,
Self::string_terms(current_terms)
)
}
/// Add an already explained incompat to the chain of explanations.
fn and_explain_ref<P: Package, V: Version>(
ref_id: usize,
derived: &Derived<P, V>,
current_terms: &Map<P, Term<V>>,
) -> String {
format!(
"And because {} ({}), {}.",
Self::string_terms(&derived.terms),
ref_id,
Self::string_terms(current_terms)
)
}
/// Add an already explained incompat to the chain of explanations.
fn and_explain_prior_and_external<P: Package, V: Version>(
prior_external: &External<P, V>,
external: &External<P, V>,
current_terms: &Map<P, Term<V>>,
) -> String {
format!(
"And because {} and {}, {}.",
prior_external,
external,
Self::string_terms(current_terms)
)
}
/// Try to print terms of an incompatibility in a human-readable way.
pub fn string_terms<P: Package, V: Version>(terms: &Map<P, Term<V>>) -> String {
let terms_vec: Vec<_> = terms.iter().collect();
match terms_vec.as_slice() {
[] => "version solving failed".into(),
// TODO: special case when that unique package is root.
[(package, Term::Positive(range))] => format!("{} {} is forbidden", package, range),
[(package, Term::Negative(range))] => format!("{} {} is mandatory", package, range),
[(p1, Term::Positive(r1)), (p2, Term::Negative(r2))] => {
External::FromDependencyOf(p1, r1.clone(), p2, r2.clone()).to_string()
}
[(p1, Term::Negative(r1)), (p2, Term::Positive(r2))] => {
External::FromDependencyOf(p2, r2.clone(), p1, r1.clone()).to_string()
}
slice => {
let str_terms: Vec<_> = slice.iter().map(|(p, t)| format!("{} {}", p, t)).collect();
str_terms.join(", ") + " are incompatible"
}
}
}
// Helper functions ########################################################
fn add_line_ref(&mut self) {
let new_count = self.ref_count + 1;
self.ref_count = new_count;
if let Some(line) = self.lines.last_mut() {
*line = format!("{} ({})", line, new_count);
}
}
fn line_ref_of(&self, shared_id: Option<usize>) -> Option<usize> {
shared_id.and_then(|id| self.shared_with_ref.get(&id).cloned())
}
}
impl<P: Package, V: Version> Reporter<P, V> for DefaultStringReporter {
type Output = String;
fn report(derivation_tree: &DerivationTree<P, V>) -> Self::Output {
match derivation_tree {
DerivationTree::External(external) => external.to_string(),
DerivationTree::Derived(derived) => {
let mut reporter = Self::new();
reporter.build_recursive(derived);
reporter.lines.join("\n")
}
}
}
}

394
vendor/pubgrub/src/solver.rs vendored Normal file
View file

@ -0,0 +1,394 @@
// SPDX-License-Identifier: MPL-2.0
//! PubGrub version solving algorithm.
//!
//! It consists in efficiently finding a set of packages and versions
//! that satisfy all the constraints of a given project dependencies.
//! In addition, when that is not possible,
//! PubGrub tries to provide a very human-readable and clear
//! explanation as to why that failed.
//! Below is an example of explanation present in
//! the introductory blog post about PubGrub
//!
//! ```txt
//! Because dropdown >=2.0.0 depends on icons >=2.0.0 and
//! root depends on icons <2.0.0, dropdown >=2.0.0 is forbidden.
//!
//! And because menu >=1.1.0 depends on dropdown >=2.0.0,
//! menu >=1.1.0 is forbidden.
//!
//! And because menu <1.1.0 depends on dropdown >=1.0.0 <2.0.0
//! which depends on intl <4.0.0, every version of menu
//! requires intl <4.0.0.
//!
//! So, because root depends on both menu >=1.0.0 and intl >=5.0.0,
//! version solving failed.
//! ```
//!
//! The algorithm is generic and works for any type of dependency system
//! as long as packages (P) and versions (V) implement
//! the [Package](crate::package::Package) and [Version](crate::version::Version) traits.
//! [Package](crate::package::Package) is strictly equivalent and automatically generated
//! for any type that implement [Clone] + [Eq] + [Hash] + [Debug] + [Display](std::fmt::Display).
//! [Version](crate::version::Version) simply states that versions are ordered,
//! that there should be
//! a minimal [lowest](crate::version::Version::lowest) version (like 0.0.0 in semantic versions),
//! and that for any version, it is possible to compute
//! what the next version closest to this one is ([bump](crate::version::Version::bump)).
//! For semantic versions, [bump](crate::version::Version::bump) corresponds to
//! an increment of the patch number.
//!
//! ## API
//!
//! ```
//! # use pubgrub::solver::{resolve, OfflineDependencyProvider};
//! # use pubgrub::version::NumberVersion;
//! # use pubgrub::error::PubGrubError;
//! #
//! # fn try_main() -> Result<(), PubGrubError<&'static str, NumberVersion>> {
//! # let dependency_provider = OfflineDependencyProvider::<&str, NumberVersion>::new();
//! # let package = "root";
//! # let version = 1;
//! let solution = resolve(&dependency_provider, package, version)?;
//! # Ok(())
//! # }
//! # fn main() {
//! # assert!(matches!(try_main(), Err(PubGrubError::NoSolution(_))));
//! # }
//! ```
//!
//! Where `dependency_provider` supplies the list of available packages and versions,
//! as well as the dependencies of every available package
//! by implementing the [DependencyProvider] trait.
//! The call to [resolve] for a given package at a given version
//! will compute the set of packages and versions needed
//! to satisfy the dependencies of that package and version pair.
//! If there is no solution, the reason will be provided as clear as possible.
use std::borrow::Borrow;
use std::collections::{BTreeMap, BTreeSet as Set};
use std::error::Error;
use crate::error::PubGrubError;
pub use crate::internal::core::State;
pub use crate::internal::incompatibility::Incompatibility;
use crate::package::Package;
use crate::range::Range;
use crate::type_aliases::{Map, SelectedDependencies};
use crate::version::Version;
/// Main function of the library.
/// Finds a set of packages satisfying dependency bounds for a given package + version pair.
pub fn resolve<P: Package, V: Version>(
dependency_provider: &impl DependencyProvider<P, V>,
package: P,
version: impl Into<V>,
) -> Result<SelectedDependencies<P, V>, PubGrubError<P, V>> {
let mut state = State::init(package.clone(), version.into());
let mut added_dependencies: Map<P, Set<V>> = Map::default();
let mut next = package;
loop {
dependency_provider
.should_cancel()
.map_err(|err| PubGrubError::ErrorInShouldCancel(err))?;
state.unit_propagation(next)?;
let potential_packages = state.partial_solution.potential_packages();
if potential_packages.is_none() {
drop(potential_packages);
// The borrow checker did not like using a match on potential_packages.
// This `if ... is_none ... drop` is a workaround.
// I believe this is a case where Polonius could help, when and if it lands in rustc.
return state.partial_solution.extract_solution().ok_or_else(|| {
PubGrubError::Failure(
"How did we end up with no package to choose but no solution?".into(),
)
});
}
let decision = dependency_provider
.choose_package_version(potential_packages.unwrap())
.map_err(PubGrubError::ErrorChoosingPackageVersion)?;
next = decision.0.clone();
// Pick the next compatible version.
let term_intersection = state
.partial_solution
.term_intersection_for_package(&next)
.expect("a package was chosen but we don't have a term.");
let v = match decision.1 {
None => {
let inc = Incompatibility::no_versions(next.clone(), term_intersection.clone());
state.add_incompatibility(inc);
continue;
}
Some(x) => x,
};
if !term_intersection.contains(&v) {
return Err(PubGrubError::ErrorChoosingPackageVersion(
"choose_package_version picked an incompatible version".into(),
));
}
if added_dependencies
.entry(next.clone())
.or_default()
.insert(v.clone())
{
// Retrieve that package dependencies.
let p = &next;
let dependencies =
match dependency_provider
.get_dependencies(&p, &v)
.map_err(|err| PubGrubError::ErrorRetrievingDependencies {
package: p.clone(),
version: v.clone(),
source: err,
})? {
Dependencies::Unknown => {
state.add_incompatibility(Incompatibility::unavailable_dependencies(
p.clone(),
v.clone(),
));
continue;
}
Dependencies::Known(x) => {
if x.contains_key(&p) {
return Err(PubGrubError::SelfDependency {
package: p.clone(),
version: v.clone(),
});
}
if let Some((dependent, _)) = x.iter().find(|(_, r)| r == &&Range::none()) {
return Err(PubGrubError::DependencyOnTheEmptySet {
package: p.clone(),
version: v.clone(),
dependent: dependent.clone(),
});
}
x
}
};
// Add that package and version if the dependencies are not problematic.
let dep_incompats =
state.add_incompatibility_from_dependencies(p.clone(), v.clone(), &dependencies);
// TODO: I don't think this check can actually happen.
// We might want to put it under #[cfg(debug_assertions)].
if state.incompatibility_store[dep_incompats.clone()]
.iter()
.any(|incompat| state.is_terminal(incompat))
{
// For a dependency incompatibility to be terminal,
// it can only mean that root depend on not root?
return Err(PubGrubError::Failure(
"Root package depends on itself at a different version?".into(),
));
}
state.partial_solution.add_version(
p.clone(),
v,
dep_incompats,
&state.incompatibility_store,
);
} else {
// `dep_incompats` are already in `incompatibilities` so we know there are not satisfied
// terms and can add the decision directly.
state.partial_solution.add_decision(next.clone(), v);
}
}
}
/// An enum used by [DependencyProvider] that holds information about package dependencies.
/// For each [Package] there is a [Range] of concrete versions it allows as a dependency.
#[derive(Clone)]
pub enum Dependencies<P: Package, V: Version> {
/// Package dependencies are unavailable.
Unknown,
/// Container for all available package versions.
Known(DependencyConstraints<P, V>),
}
/// Subtype of [Dependencies] which holds information about
/// all possible versions a given package can accept.
/// There is a difference in semantics between an empty [Map<P, Range<V>>](crate::type_aliases::Map)
/// inside [DependencyConstraints] and [Dependencies::Unknown]:
/// the former means the package has no dependencies and it is a known fact,
/// while the latter means they could not be fetched by [DependencyProvider].
pub type DependencyConstraints<P, V> = Map<P, Range<V>>;
/// Trait that allows the algorithm to retrieve available packages and their dependencies.
/// An implementor needs to be supplied to the [resolve] function.
pub trait DependencyProvider<P: Package, V: Version> {
/// [Decision making](https://github.com/dart-lang/pub/blob/master/doc/solver.md#decision-making)
/// is the process of choosing the next package
/// and version that will be appended to the partial solution.
/// Every time such a decision must be made,
/// potential valid packages and version ranges are preselected by the resolver,
/// and the dependency provider must choose.
///
/// The strategy employed to choose such package and version
/// cannot change the existence of a solution or not,
/// but can drastically change the performances of the solver,
/// or the properties of the solution.
/// The documentation of Pub (PubGrub implementation for the dart programming language)
/// states the following:
///
/// > Pub chooses the latest matching version of the package
/// > with the fewest versions that match the outstanding constraint.
/// > This tends to find conflicts earlier if any exist,
/// > since these packages will run out of versions to try more quickly.
/// > But there's likely room for improvement in these heuristics.
///
/// A helper function [choose_package_with_fewest_versions] is provided to ease
/// implementations of this method if you can produce an iterator
/// of the available versions in preference order for any package.
///
/// Note: the type `T` ensures that this returns an item from the `packages` argument.
fn choose_package_version<T: Borrow<P>, U: Borrow<Range<V>>>(
&self,
potential_packages: impl Iterator<Item = (T, U)>,
) -> Result<(T, Option<V>), Box<dyn Error + Send + Sync>>;
/// Retrieves the package dependencies.
/// Return [Dependencies::Unknown] if its dependencies are unknown.
fn get_dependencies(
&self,
package: &P,
version: &V,
) -> Result<Dependencies<P, V>, Box<dyn Error + Send + Sync>>;
/// This is called fairly regularly during the resolution,
/// if it returns an Err then resolution will be terminated.
/// This is helpful if you want to add some form of early termination like a timeout,
/// or you want to add some form of user feedback if things are taking a while.
/// If not provided the resolver will run as long as needed.
fn should_cancel(&self) -> Result<(), Box<dyn Error + Send + Sync>> {
Ok(())
}
}
/// This is a helper function to make it easy to implement
/// [DependencyProvider::choose_package_version].
/// It takes a function `list_available_versions` that takes a package and returns an iterator
/// of the available versions in preference order.
/// The helper finds the package from the `packages` argument with the fewest versions from
/// `list_available_versions` contained in the constraints. Then takes that package and finds the
/// first version contained in the constraints.
pub fn choose_package_with_fewest_versions<P: Package, V: Version, T, U, I, F>(
list_available_versions: F,
potential_packages: impl Iterator<Item = (T, U)>,
) -> (T, Option<V>)
where
T: Borrow<P>,
U: Borrow<Range<V>>,
I: Iterator<Item = V>,
F: Fn(&P) -> I,
{
let count_valid = |(p, range): &(T, U)| {
list_available_versions(p.borrow())
.filter(|v| range.borrow().contains(v.borrow()))
.count()
};
let (pkg, range) = potential_packages
.min_by_key(count_valid)
.expect("potential_packages gave us an empty iterator");
let version =
list_available_versions(pkg.borrow()).find(|v| range.borrow().contains(v.borrow()));
(pkg, version)
}
/// A basic implementation of [DependencyProvider].
#[derive(Debug, Clone, Default)]
#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
#[cfg_attr(feature = "serde", serde(transparent))]
pub struct OfflineDependencyProvider<P: Package, V: Version> {
dependencies: Map<P, BTreeMap<V, DependencyConstraints<P, V>>>,
}
impl<P: Package, V: Version> OfflineDependencyProvider<P, V> {
/// Creates an empty OfflineDependencyProvider with no dependencies.
pub fn new() -> Self {
Self {
dependencies: Map::default(),
}
}
/// Registers the dependencies of a package and version pair.
/// Dependencies must be added with a single call to
/// [add_dependencies](OfflineDependencyProvider::add_dependencies).
/// All subsequent calls to
/// [add_dependencies](OfflineDependencyProvider::add_dependencies) for a given
/// package version pair will replace the dependencies by the new ones.
///
/// The API does not allow to add dependencies one at a time to uphold an assumption that
/// [OfflineDependencyProvider.get_dependencies(p, v)](OfflineDependencyProvider::get_dependencies)
/// provides all dependencies of a given package (p) and version (v) pair.
pub fn add_dependencies<I: IntoIterator<Item = (P, Range<V>)>>(
&mut self,
package: P,
version: impl Into<V>,
dependencies: I,
) {
let package_deps = dependencies.into_iter().collect();
let v = version.into();
*self
.dependencies
.entry(package)
.or_default()
.entry(v)
.or_default() = package_deps;
}
/// Lists packages that have been saved.
pub fn packages(&self) -> impl Iterator<Item = &P> {
self.dependencies.keys()
}
/// Lists versions of saved packages in sorted order.
/// Returns [None] if no information is available regarding that package.
pub fn versions(&self, package: &P) -> Option<impl Iterator<Item = &V>> {
self.dependencies.get(package).map(|k| k.keys())
}
/// Lists dependencies of a given package and version.
/// Returns [None] if no information is available regarding that package and version pair.
fn dependencies(&self, package: &P, version: &V) -> Option<DependencyConstraints<P, V>> {
self.dependencies.get(package)?.get(version).cloned()
}
}
/// An implementation of [DependencyProvider] that
/// contains all dependency information available in memory.
/// Packages are picked with the fewest versions contained in the constraints first.
/// Versions are picked with the newest versions first.
impl<P: Package, V: Version> DependencyProvider<P, V> for OfflineDependencyProvider<P, V> {
fn choose_package_version<T: Borrow<P>, U: Borrow<Range<V>>>(
&self,
potential_packages: impl Iterator<Item = (T, U)>,
) -> Result<(T, Option<V>), Box<dyn Error + Send + Sync>> {
Ok(choose_package_with_fewest_versions(
|p| {
self.dependencies
.get(p)
.into_iter()
.flat_map(|k| k.keys())
.rev()
.cloned()
},
potential_packages,
))
}
fn get_dependencies(
&self,
package: &P,
version: &V,
) -> Result<Dependencies<P, V>, Box<dyn Error + Send + Sync>> {
Ok(match self.dependencies(package, version) {
None => Dependencies::Unknown,
Some(dependencies) => Dependencies::Known(dependencies),
})
}
}

212
vendor/pubgrub/src/term.rs vendored Normal file
View file

@ -0,0 +1,212 @@
// SPDX-License-Identifier: MPL-2.0
//! A term is the fundamental unit of operation of the PubGrub algorithm.
//! It is a positive or negative expression regarding a set of versions.
use crate::range::Range;
use crate::version::Version;
use std::fmt;
/// A positive or negative expression regarding a set of versions.
#[derive(Debug, Clone, Eq, PartialEq)]
pub enum Term<V: Version> {
/// For example, "1.0.0 <= v < 2.0.0" is a positive expression
/// that is evaluated true if a version is selected
/// and comprised between version 1.0.0 and version 2.0.0.
Positive(Range<V>),
/// The term "not v < 3.0.0" is a negative expression
/// that is evaluated true if a version is selected >= 3.0.0
/// or if no version is selected at all.
Negative(Range<V>),
}
/// Base methods.
impl<V: Version> Term<V> {
/// A term that is always true.
pub(crate) fn any() -> Self {
Self::Negative(Range::none())
}
/// A term that is never true.
pub(crate) fn empty() -> Self {
Self::Positive(Range::none())
}
/// A positive term containing exactly that version.
pub(crate) fn exact(version: V) -> Self {
Self::Positive(Range::exact(version))
}
/// Simply check if a term is positive.
pub(crate) fn is_positive(&self) -> bool {
match self {
Self::Positive(_) => true,
Self::Negative(_) => false,
}
}
/// Negate a term.
/// Evaluation of a negated term always returns
/// the opposite of the evaluation of the original one.
pub(crate) fn negate(&self) -> Self {
match self {
Self::Positive(range) => Self::Negative(range.clone()),
Self::Negative(range) => Self::Positive(range.clone()),
}
}
/// Evaluate a term regarding a given choice of version.
pub(crate) fn contains(&self, v: &V) -> bool {
match self {
Self::Positive(range) => range.contains(v),
Self::Negative(range) => !(range.contains(v)),
}
}
/// Unwrap the range contains in a positive term.
/// Will panic if used on a negative range.
pub(crate) fn unwrap_positive(&self) -> &Range<V> {
match self {
Self::Positive(range) => range,
_ => panic!("Negative term cannot unwrap positive range"),
}
}
}
/// Set operations with terms.
impl<V: Version> Term<V> {
/// Compute the intersection of two terms.
/// If at least one term is positive, the intersection is also positive.
pub(crate) fn intersection(&self, other: &Term<V>) -> Term<V> {
match (self, other) {
(Self::Positive(r1), Self::Positive(r2)) => Self::Positive(r1.intersection(r2)),
(Self::Positive(r1), Self::Negative(r2)) => {
Self::Positive(r1.intersection(&r2.negate()))
}
(Self::Negative(r1), Self::Positive(r2)) => {
Self::Positive(r1.negate().intersection(r2))
}
(Self::Negative(r1), Self::Negative(r2)) => Self::Negative(r1.union(r2)),
}
}
/// Compute the union of two terms.
/// If at least one term is negative, the union is also negative.
pub(crate) fn union(&self, other: &Term<V>) -> Term<V> {
(self.negate().intersection(&other.negate())).negate()
}
/// Indicate if this term is a subset of another term.
/// Just like for sets, we say that t1 is a subset of t2
/// if and only if t1 ∩ t2 = t1.
pub(crate) fn subset_of(&self, other: &Term<V>) -> bool {
self == &self.intersection(other)
}
}
/// Describe a relation between a set of terms S and another term t.
///
/// As a shorthand, we say that a term v
/// satisfies or contradicts a term t if {v} satisfies or contradicts it.
pub(crate) enum Relation {
/// We say that a set of terms S "satisfies" a term t
/// if t must be true whenever every term in S is true.
Satisfied,
/// Conversely, S "contradicts" t if t must be false
/// whenever every term in S is true.
Contradicted,
/// If neither of these is true we say that S is "inconclusive" for t.
Inconclusive,
}
/// Relation between terms.
impl<'a, V: 'a + Version> Term<V> {
/// Check if a set of terms satisfies this term.
///
/// We say that a set of terms S "satisfies" a term t
/// if t must be true whenever every term in S is true.
///
/// It turns out that this can also be expressed with set operations:
/// S satisfies t if and only if ⋂ S ⊆ t
#[cfg(test)]
fn satisfied_by(&self, terms_intersection: &Term<V>) -> bool {
terms_intersection.subset_of(self)
}
/// Check if a set of terms contradicts this term.
///
/// We say that a set of terms S "contradicts" a term t
/// if t must be false whenever every term in S is true.
///
/// It turns out that this can also be expressed with set operations:
/// S contradicts t if and only if ⋂ S is disjoint with t
/// S contradicts t if and only if (⋂ S) ⋂ t = ∅
#[cfg(test)]
fn contradicted_by(&self, terms_intersection: &Term<V>) -> bool {
terms_intersection.intersection(self) == Self::empty()
}
/// Check if a set of terms satisfies or contradicts a given term.
/// Otherwise the relation is inconclusive.
pub(crate) fn relation_with(&self, other_terms_intersection: &Term<V>) -> Relation {
let full_intersection = self.intersection(other_terms_intersection);
if &full_intersection == other_terms_intersection {
Relation::Satisfied
} else if full_intersection == Self::empty() {
Relation::Contradicted
} else {
Relation::Inconclusive
}
}
}
impl<V: Version> AsRef<Term<V>> for Term<V> {
fn as_ref(&self) -> &Term<V> {
&self
}
}
// REPORT ######################################################################
impl<V: Version + fmt::Display> fmt::Display for Term<V> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Self::Positive(range) => write!(f, "{}", range),
Self::Negative(range) => write!(f, "Not ( {} )", range),
}
}
}
// TESTS #######################################################################
#[cfg(test)]
pub mod tests {
use super::*;
use crate::version::NumberVersion;
use proptest::prelude::*;
pub fn strategy() -> impl Strategy<Value = Term<NumberVersion>> {
prop_oneof![
crate::range::tests::strategy().prop_map(Term::Positive),
crate::range::tests::strategy().prop_map(Term::Negative),
]
}
proptest! {
// Testing relation --------------------------------
#[test]
fn relation_with(term1 in strategy(), term2 in strategy()) {
match term1.relation_with(&term2) {
Relation::Satisfied => assert!(term1.satisfied_by(&term2)),
Relation::Contradicted => assert!(term1.contradicted_by(&term2)),
Relation::Inconclusive => {
assert!(!term1.satisfied_by(&term2));
assert!(!term1.contradicted_by(&term2));
}
}
}
}
}

10
vendor/pubgrub/src/type_aliases.rs vendored Normal file
View file

@ -0,0 +1,10 @@
// SPDX-License-Identifier: MPL-2.0
//! Publicly exported type aliases.
/// Map implementation used by the library.
pub type Map<K, V> = rustc_hash::FxHashMap<K, V>;
/// Concrete dependencies picked by the library during [resolve](crate::solver::resolve)
/// from [DependencyConstraints](crate::solver::DependencyConstraints)
pub type SelectedDependencies<P, V> = Map<P, V>;

260
vendor/pubgrub/src/version.rs vendored Normal file
View file

@ -0,0 +1,260 @@
// SPDX-License-Identifier: MPL-2.0
//! Traits and implementations to create and compare versions.
use std::fmt::{self, Debug, Display};
use std::str::FromStr;
use thiserror::Error;
/// Versions have a minimal version (a "0" version)
/// and are ordered such that every version has a next one.
pub trait Version: Clone + Ord + Debug + Display {
/// Returns the lowest version.
fn lowest() -> Self;
/// Returns the next version, the smallest strictly higher version.
fn bump(&self) -> Self;
}
/// Type for semantic versions: major.minor.patch.
#[derive(Debug, Copy, Clone, Ord, PartialOrd, Eq, PartialEq, Hash)]
pub struct SemanticVersion {
major: u32,
minor: u32,
patch: u32,
}
#[cfg(feature = "serde")]
impl serde::Serialize for SemanticVersion {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
serializer.serialize_str(&format!("{}", self))
}
}
#[cfg(feature = "serde")]
impl<'de> serde::Deserialize<'de> for SemanticVersion {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: serde::Deserializer<'de>,
{
let s = String::deserialize(deserializer)?;
FromStr::from_str(&s).map_err(serde::de::Error::custom)
}
}
// Constructors
impl SemanticVersion {
/// Create a version with "major", "minor" and "patch" values.
/// `version = major.minor.patch`
pub fn new(major: u32, minor: u32, patch: u32) -> Self {
Self {
major,
minor,
patch,
}
}
/// Version 0.0.0.
pub fn zero() -> Self {
Self::new(0, 0, 0)
}
/// Version 1.0.0.
pub fn one() -> Self {
Self::new(1, 0, 0)
}
/// Version 2.0.0.
pub fn two() -> Self {
Self::new(2, 0, 0)
}
}
// Convert a tuple (major, minor, patch) into a version.
impl From<(u32, u32, u32)> for SemanticVersion {
fn from(tuple: (u32, u32, u32)) -> Self {
let (major, minor, patch) = tuple;
Self::new(major, minor, patch)
}
}
// Convert a version into a tuple (major, minor, patch).
impl From<SemanticVersion> for (u32, u32, u32) {
fn from(v: SemanticVersion) -> Self {
(v.major, v.minor, v.patch)
}
}
// Bump versions.
impl SemanticVersion {
/// Bump the patch number of a version.
pub fn bump_patch(self) -> Self {
Self::new(self.major, self.minor, self.patch + 1)
}
/// Bump the minor number of a version.
pub fn bump_minor(self) -> Self {
Self::new(self.major, self.minor + 1, 0)
}
/// Bump the major number of a version.
pub fn bump_major(self) -> Self {
Self::new(self.major + 1, 0, 0)
}
}
/// Error creating [SemanticVersion] from [String].
#[derive(Error, Debug, PartialEq)]
pub enum VersionParseError {
/// [SemanticVersion] must contain major, minor, patch versions.
#[error("version {full_version} must contain 3 numbers separated by dot")]
NotThreeParts {
/// [SemanticVersion] that was being parsed.
full_version: String,
},
/// Wrapper around [ParseIntError](core::num::ParseIntError).
#[error("cannot parse '{version_part}' in '{full_version}' as u32: {parse_error}")]
ParseIntError {
/// [SemanticVersion] that was being parsed.
full_version: String,
/// A version part where parsing failed.
version_part: String,
/// A specific error resulted from parsing a part of the version as [u32].
parse_error: String,
},
}
impl FromStr for SemanticVersion {
type Err = VersionParseError;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let parse_u32 = |part: &str| {
part.parse::<u32>().map_err(|e| Self::Err::ParseIntError {
full_version: s.to_string(),
version_part: part.to_string(),
parse_error: e.to_string(),
})
};
let mut parts = s.split('.');
match (parts.next(), parts.next(), parts.next(), parts.next()) {
(Some(major), Some(minor), Some(patch), None) => {
let major = parse_u32(major)?;
let minor = parse_u32(minor)?;
let patch = parse_u32(patch)?;
Ok(Self {
major,
minor,
patch,
})
}
_ => Err(Self::Err::NotThreeParts {
full_version: s.to_string(),
}),
}
}
}
#[test]
fn from_str_for_semantic_version() {
let parse = |str: &str| str.parse::<SemanticVersion>();
assert!(parse(
&SemanticVersion {
major: 0,
minor: 1,
patch: 0
}
.to_string()
)
.is_ok());
assert!(parse("1.2.3").is_ok());
assert_eq!(
parse("1.abc.3"),
Err(VersionParseError::ParseIntError {
full_version: "1.abc.3".to_owned(),
version_part: "abc".to_owned(),
parse_error: "invalid digit found in string".to_owned(),
})
);
assert_eq!(
parse("1.2.-3"),
Err(VersionParseError::ParseIntError {
full_version: "1.2.-3".to_owned(),
version_part: "-3".to_owned(),
parse_error: "invalid digit found in string".to_owned(),
})
);
assert_eq!(
parse("1.2.9876543210"),
Err(VersionParseError::ParseIntError {
full_version: "1.2.9876543210".to_owned(),
version_part: "9876543210".to_owned(),
parse_error: "number too large to fit in target type".to_owned(),
})
);
assert_eq!(
parse("1.2"),
Err(VersionParseError::NotThreeParts {
full_version: "1.2".to_owned(),
})
);
assert_eq!(
parse("1.2.3."),
Err(VersionParseError::NotThreeParts {
full_version: "1.2.3.".to_owned(),
})
);
}
impl Display for SemanticVersion {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}.{}.{}", self.major, self.minor, self.patch)
}
}
// Implement Version for SemanticVersion.
impl Version for SemanticVersion {
fn lowest() -> Self {
Self::zero()
}
fn bump(&self) -> Self {
self.bump_patch()
}
}
/// Simplest versions possible, just a positive number.
#[derive(Debug, Copy, Clone, Ord, PartialOrd, Eq, PartialEq, Hash)]
#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize,))]
#[cfg_attr(feature = "serde", serde(transparent))]
pub struct NumberVersion(pub u32);
// Convert an usize into a version.
impl From<u32> for NumberVersion {
fn from(v: u32) -> Self {
Self(v)
}
}
// Convert a version into an usize.
impl From<NumberVersion> for u32 {
fn from(version: NumberVersion) -> Self {
version.0
}
}
impl Display for NumberVersion {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", self.0)
}
}
impl Version for NumberVersion {
fn lowest() -> Self {
Self(0)
}
fn bump(&self) -> Self {
Self(self.0 + 1)
}
}

File diff suppressed because it is too large Load diff

192
vendor/pubgrub/tests/examples.rs vendored Normal file
View file

@ -0,0 +1,192 @@
// SPDX-License-Identifier: MPL-2.0
use pubgrub::range::Range;
use pubgrub::solver::{resolve, OfflineDependencyProvider};
use pubgrub::type_aliases::Map;
use pubgrub::version::{NumberVersion, SemanticVersion};
#[test]
/// https://github.com/dart-lang/pub/blob/master/doc/solver.md#no-conflicts
fn no_conflict() {
let mut dependency_provider = OfflineDependencyProvider::<&str, SemanticVersion>::new();
#[rustfmt::skip]
dependency_provider.add_dependencies(
"root", (1, 0, 0),
vec![("foo", Range::between((1, 0, 0), (2, 0, 0)))],
);
#[rustfmt::skip]
dependency_provider.add_dependencies(
"foo", (1, 0, 0),
vec![("bar", Range::between((1, 0, 0), (2, 0, 0)))],
);
dependency_provider.add_dependencies("bar", (1, 0, 0), vec![]);
dependency_provider.add_dependencies("bar", (2, 0, 0), vec![]);
// Run the algorithm.
let computed_solution = resolve(&dependency_provider, "root", (1, 0, 0)).unwrap();
// Solution.
let mut expected_solution = Map::default();
expected_solution.insert("root", (1, 0, 0).into());
expected_solution.insert("foo", (1, 0, 0).into());
expected_solution.insert("bar", (1, 0, 0).into());
// Comparing the true solution with the one computed by the algorithm.
assert_eq!(expected_solution, computed_solution);
}
#[test]
/// https://github.com/dart-lang/pub/blob/master/doc/solver.md#avoiding-conflict-during-decision-making
fn avoiding_conflict_during_decision_making() {
let mut dependency_provider = OfflineDependencyProvider::<&str, SemanticVersion>::new();
#[rustfmt::skip]
dependency_provider.add_dependencies(
"root", (1, 0, 0),
vec![
("foo", Range::between((1, 0, 0), (2, 0, 0))),
("bar", Range::between((1, 0, 0), (2, 0, 0))),
],
);
#[rustfmt::skip]
dependency_provider.add_dependencies(
"foo", (1, 1, 0),
vec![("bar", Range::between((2, 0, 0), (3, 0, 0)))],
);
dependency_provider.add_dependencies("foo", (1, 0, 0), vec![]);
dependency_provider.add_dependencies("bar", (1, 0, 0), vec![]);
dependency_provider.add_dependencies("bar", (1, 1, 0), vec![]);
dependency_provider.add_dependencies("bar", (2, 0, 0), vec![]);
// Run the algorithm.
let computed_solution = resolve(&dependency_provider, "root", (1, 0, 0)).unwrap();
// Solution.
let mut expected_solution = Map::default();
expected_solution.insert("root", (1, 0, 0).into());
expected_solution.insert("foo", (1, 0, 0).into());
expected_solution.insert("bar", (1, 1, 0).into());
// Comparing the true solution with the one computed by the algorithm.
assert_eq!(expected_solution, computed_solution);
}
#[test]
/// https://github.com/dart-lang/pub/blob/master/doc/solver.md#performing-conflict-resolution
fn conflict_resolution() {
let mut dependency_provider = OfflineDependencyProvider::<&str, SemanticVersion>::new();
#[rustfmt::skip]
dependency_provider.add_dependencies(
"root", (1, 0, 0),
vec![("foo", Range::higher_than((1, 0, 0)))],
);
#[rustfmt::skip]
dependency_provider.add_dependencies(
"foo", (2, 0, 0),
vec![("bar", Range::between((1, 0, 0), (2, 0, 0)))],
);
dependency_provider.add_dependencies("foo", (1, 0, 0), vec![]);
#[rustfmt::skip]
dependency_provider.add_dependencies(
"bar", (1, 0, 0),
vec![("foo", Range::between((1, 0, 0), (2, 0, 0)))],
);
// Run the algorithm.
let computed_solution = resolve(&dependency_provider, "root", (1, 0, 0)).unwrap();
// Solution.
let mut expected_solution = Map::default();
expected_solution.insert("root", (1, 0, 0).into());
expected_solution.insert("foo", (1, 0, 0).into());
// Comparing the true solution with the one computed by the algorithm.
assert_eq!(expected_solution, computed_solution);
}
#[test]
/// https://github.com/dart-lang/pub/blob/master/doc/solver.md#conflict-resolution-with-a-partial-satisfier
fn conflict_with_partial_satisfier() {
let mut dependency_provider = OfflineDependencyProvider::<&str, SemanticVersion>::new();
#[rustfmt::skip]
// root 1.0.0 depends on foo ^1.0.0 and target ^2.0.0
dependency_provider.add_dependencies(
"root", (1, 0, 0),
vec![
("foo", Range::between((1, 0, 0), (2, 0, 0))),
("target", Range::between((2, 0, 0), (3, 0, 0))),
],
);
#[rustfmt::skip]
// foo 1.1.0 depends on left ^1.0.0 and right ^1.0.0
dependency_provider.add_dependencies(
"foo", (1, 1, 0),
vec![
("left", Range::between((1, 0, 0), (2, 0, 0))),
("right", Range::between((1, 0, 0), (2, 0, 0))),
],
);
dependency_provider.add_dependencies("foo", (1, 0, 0), vec![]);
#[rustfmt::skip]
// left 1.0.0 depends on shared >=1.0.0
dependency_provider.add_dependencies(
"left", (1, 0, 0),
vec![("shared", Range::higher_than((1, 0, 0)))],
);
#[rustfmt::skip]
// right 1.0.0 depends on shared <2.0.0
dependency_provider.add_dependencies(
"right", (1, 0, 0),
vec![("shared", Range::strictly_lower_than((2, 0, 0)))],
);
dependency_provider.add_dependencies("shared", (2, 0, 0), vec![]);
#[rustfmt::skip]
// shared 1.0.0 depends on target ^1.0.0
dependency_provider.add_dependencies(
"shared", (1, 0, 0),
vec![("target", Range::between((1, 0, 0), (2, 0, 0)))],
);
dependency_provider.add_dependencies("target", (2, 0, 0), vec![]);
dependency_provider.add_dependencies("target", (1, 0, 0), vec![]);
// Run the algorithm.
let computed_solution = resolve(&dependency_provider, "root", (1, 0, 0)).unwrap();
// Solution.
let mut expected_solution = Map::default();
expected_solution.insert("root", (1, 0, 0).into());
expected_solution.insert("foo", (1, 0, 0).into());
expected_solution.insert("target", (2, 0, 0).into());
// Comparing the true solution with the one computed by the algorithm.
assert_eq!(expected_solution, computed_solution);
}
#[test]
/// a0 dep on b and c
/// b0 dep on d0
/// b1 dep on d1 (not existing)
/// c0 has no dep
/// c1 dep on d2 (not existing)
/// d0 has no dep
///
/// Solution: a0, b0, c0, d0
fn double_choices() {
let mut dependency_provider = OfflineDependencyProvider::<&str, NumberVersion>::new();
dependency_provider.add_dependencies("a", 0, vec![("b", Range::any()), ("c", Range::any())]);
dependency_provider.add_dependencies("b", 0, vec![("d", Range::exact(0))]);
dependency_provider.add_dependencies("b", 1, vec![("d", Range::exact(1))]);
dependency_provider.add_dependencies("c", 0, vec![]);
dependency_provider.add_dependencies("c", 1, vec![("d", Range::exact(2))]);
dependency_provider.add_dependencies("d", 0, vec![]);
// Solution.
let mut expected_solution = Map::default();
expected_solution.insert("a", 0.into());
expected_solution.insert("b", 0.into());
expected_solution.insert("c", 0.into());
expected_solution.insert("d", 0.into());
// Run the algorithm.
let computed_solution = resolve(&dependency_provider, "a", 0).unwrap();
assert_eq!(expected_solution, computed_solution);
}

520
vendor/pubgrub/tests/proptest.rs vendored Normal file
View file

@ -0,0 +1,520 @@
// SPDX-License-Identifier: MPL-2.0
use std::{collections::BTreeSet as Set, error::Error};
use pubgrub::error::PubGrubError;
use pubgrub::package::Package;
use pubgrub::range::Range;
use pubgrub::report::{DefaultStringReporter, Reporter};
use pubgrub::solver::{
choose_package_with_fewest_versions, resolve, Dependencies, DependencyProvider,
OfflineDependencyProvider,
};
use pubgrub::version::{NumberVersion, Version};
use proptest::collection::{btree_map, vec};
use proptest::prelude::*;
use proptest::sample::Index;
use proptest::string::string_regex;
use crate::sat_dependency_provider::SatResolve;
mod sat_dependency_provider;
/// The same as [OfflineDependencyProvider] but takes versions from the opposite end:
/// if [OfflineDependencyProvider] returns versions from newest to oldest, this returns them from oldest to newest.
#[derive(Clone)]
struct OldestVersionsDependencyProvider<P: Package, V: Version>(OfflineDependencyProvider<P, V>);
impl<P: Package, V: Version> DependencyProvider<P, V> for OldestVersionsDependencyProvider<P, V> {
fn choose_package_version<T: std::borrow::Borrow<P>, U: std::borrow::Borrow<Range<V>>>(
&self,
potential_packages: impl Iterator<Item = (T, U)>,
) -> Result<(T, Option<V>), Box<dyn Error>> {
Ok(choose_package_with_fewest_versions(
|p| self.0.versions(p).into_iter().flatten().cloned(),
potential_packages,
))
}
fn get_dependencies(&self, p: &P, v: &V) -> Result<Dependencies<P, V>, Box<dyn Error>> {
self.0.get_dependencies(p, v)
}
}
/// The same as DP but it has a timeout.
#[derive(Clone)]
struct TimeoutDependencyProvider<DP> {
dp: DP,
start_time: std::time::Instant,
call_count: std::cell::Cell<u64>,
max_calls: u64,
}
impl<DP> TimeoutDependencyProvider<DP> {
fn new(dp: DP, max_calls: u64) -> Self {
Self {
dp,
start_time: std::time::Instant::now(),
call_count: std::cell::Cell::new(0),
max_calls,
}
}
}
impl<P: Package, V: Version, DP: DependencyProvider<P, V>> DependencyProvider<P, V>
for TimeoutDependencyProvider<DP>
{
fn choose_package_version<T: std::borrow::Borrow<P>, U: std::borrow::Borrow<Range<V>>>(
&self,
potential_packages: impl Iterator<Item = (T, U)>,
) -> Result<(T, Option<V>), Box<dyn Error>> {
self.dp.choose_package_version(potential_packages)
}
fn get_dependencies(&self, p: &P, v: &V) -> Result<Dependencies<P, V>, Box<dyn Error>> {
self.dp.get_dependencies(p, v)
}
fn should_cancel(&self) -> Result<(), Box<dyn Error>> {
assert!(self.start_time.elapsed().as_secs() < 60);
let calls = self.call_count.get();
assert!(calls < self.max_calls);
self.call_count.set(calls + 1);
Ok(())
}
}
#[test]
#[should_panic]
fn should_cancel_can_panic() {
let mut dependency_provider = OfflineDependencyProvider::<_, NumberVersion>::new();
dependency_provider.add_dependencies(0, 0, vec![(666, Range::any())]);
// Run the algorithm.
let _ = resolve(
&TimeoutDependencyProvider::new(dependency_provider, 1),
0,
0,
);
}
fn string_names() -> impl Strategy<Value = String> {
string_regex("[A-Za-z][A-Za-z0-9_-]{0,5}")
.unwrap()
.prop_filter("reserved names", |n| {
// root is the name of the thing being compiled
// so it would be confusing to have it in the index
// bad is a name reserved for a dep that won't work
n != "root" && n != "bad"
})
}
/// This generates a random registry index.
/// Unlike vec((Name, Ver, vec((Name, VerRq), ..), ..)
/// This strategy has a high probability of having valid dependencies
pub fn registry_strategy<N: Package + Ord>(
name: impl Strategy<Value = N>,
bad_name: N,
) -> impl Strategy<
Value = (
OfflineDependencyProvider<N, NumberVersion>,
Vec<(N, NumberVersion)>,
),
> {
let max_crates = 40;
let max_versions = 15;
let shrinkage = 40;
let complicated_len = 10usize;
// If this is false than the crate will depend on the nonexistent "bad"
// instead of the complex set we generated for it.
let allow_deps = prop::bool::weighted(0.99);
let a_version = ..(max_versions as u32);
let list_of_versions = btree_map(a_version, allow_deps, 1..=max_versions)
.prop_map(move |ver| ver.into_iter().collect::<Vec<_>>());
let list_of_crates_with_versions = btree_map(name, list_of_versions, 1..=max_crates);
// each version of each crate can depend on each crate smaller then it.
// In theory shrinkage should be 2, but in practice we get better trees with a larger value.
let max_deps = max_versions * (max_crates * (max_crates - 1)) / shrinkage;
let raw_version_range = (any::<Index>(), any::<Index>());
let raw_dependency = (any::<Index>(), any::<Index>(), raw_version_range);
fn order_index(a: Index, b: Index, size: usize) -> (usize, usize) {
use std::cmp::{max, min};
let (a, b) = (a.index(size), b.index(size));
(min(a, b), max(a, b))
}
let list_of_raw_dependency = vec(raw_dependency, ..=max_deps);
// By default a package depends only on other packages that have a smaller name,
// this helps make sure that all things in the resulting index are DAGs.
// If this is true then the DAG is maintained with grater instead.
let reverse_alphabetical = any::<bool>().no_shrink();
(
list_of_crates_with_versions,
list_of_raw_dependency,
reverse_alphabetical,
1..(complicated_len + 1),
)
.prop_map(
move |(crate_vers_by_name, raw_dependencies, reverse_alphabetical, complicated_len)| {
let mut list_of_pkgid: Vec<(
(N, NumberVersion),
Option<Vec<(N, Range<NumberVersion>)>>,
)> = crate_vers_by_name
.iter()
.flat_map(|(name, vers)| {
vers.iter().map(move |x| {
(
(name.clone(), NumberVersion::from(x.0)),
if x.1 { Some(vec![]) } else { None },
)
})
})
.collect();
let len_all_pkgid = list_of_pkgid.len();
for (a, b, (c, d)) in raw_dependencies {
let (a, b) = order_index(a, b, len_all_pkgid);
let (a, b) = if reverse_alphabetical { (b, a) } else { (a, b) };
let ((dep_name, _), _) = list_of_pkgid[a].to_owned();
if (list_of_pkgid[b].0).0 == dep_name {
continue;
}
let s = &crate_vers_by_name[&dep_name];
let s_last_index = s.len() - 1;
let (c, d) = order_index(c, d, s.len());
if let (_, Some(deps)) = &mut list_of_pkgid[b] {
deps.push((
dep_name,
if c == 0 && d == s_last_index {
Range::any()
} else if c == 0 {
Range::strictly_lower_than(s[d].0 + 1)
} else if d == s_last_index {
Range::higher_than(s[c].0)
} else if c == d {
Range::exact(s[c].0)
} else {
Range::between(s[c].0, s[d].0 + 1)
},
))
}
}
let mut dependency_provider = OfflineDependencyProvider::<N, NumberVersion>::new();
let complicated_len = std::cmp::min(complicated_len, list_of_pkgid.len());
let complicated: Vec<_> = if reverse_alphabetical {
&list_of_pkgid[..complicated_len]
} else {
&list_of_pkgid[(list_of_pkgid.len() - complicated_len)..]
}
.iter()
.map(|(x, _)| (x.0.clone(), x.1))
.collect();
for ((name, ver), deps) in list_of_pkgid {
dependency_provider.add_dependencies(
name,
ver,
deps.unwrap_or_else(|| vec![(bad_name.clone(), Range::any())]),
);
}
(dependency_provider, complicated)
},
)
}
/// Ensures that generator makes registries with large dependency trees.
#[test]
fn meta_test_deep_trees_from_strategy() {
use proptest::strategy::ValueTree;
use proptest::test_runner::TestRunner;
let mut dis = [0; 21];
let strategy = registry_strategy(0u16..665, 666);
let mut test_runner = TestRunner::deterministic();
for _ in 0..128 {
let (dependency_provider, cases) = strategy
.new_tree(&mut TestRunner::new_with_rng(
Default::default(),
test_runner.new_rng(),
))
.unwrap()
.current();
for (name, ver) in cases {
let res = resolve(&dependency_provider, name, ver);
dis[res
.as_ref()
.map(|x| std::cmp::min(x.len(), dis.len()) - 1)
.unwrap_or(0)] += 1;
if dis.iter().all(|&x| x > 0) {
return;
}
}
}
panic!(
"In {} tries we did not see a wide enough distribution of dependency trees! dis: {:?}",
dis.iter().sum::<i32>(),
dis
);
}
proptest! {
#![proptest_config(ProptestConfig {
max_shrink_iters:
if std::env::var("CI").is_ok() {
// This attempts to make sure that CI will fail fast,
0
} else {
// but that local builds will give a small clear test case.
2048
},
result_cache: prop::test_runner::basic_result_cache,
.. ProptestConfig::default()
})]
#[test]
/// This test is mostly for profiling.
fn prop_passes_string(
(dependency_provider, cases) in registry_strategy(string_names(), "bad".to_owned())
) {
for (name, ver) in cases {
let _ = resolve(&TimeoutDependencyProvider::new(dependency_provider.clone(), 50_000), name, ver);
}
}
#[test]
/// This test is mostly for profiling.
fn prop_passes_int(
(dependency_provider, cases) in registry_strategy(0u16..665, 666)
) {
for (name, ver) in cases {
let _ = resolve(&TimeoutDependencyProvider::new(dependency_provider.clone(), 50_000), name, ver);
}
}
#[test]
fn prop_sat_errors_the_same(
(dependency_provider, cases) in registry_strategy(0u16..665, 666)
) {
let mut sat = SatResolve::new(&dependency_provider);
for (name, ver) in cases {
if let Ok(s) = resolve(&TimeoutDependencyProvider::new(dependency_provider.clone(), 50_000), name, ver) {
prop_assert!(sat.sat_is_valid_solution(&s));
} else {
prop_assert!(!sat.sat_resolve(&name, &ver));
}
}
}
#[test]
/// This tests whether the algorithm is still deterministic.
fn prop_same_on_repeated_runs(
(dependency_provider, cases) in registry_strategy(0u16..665, 666)
) {
for (name, ver) in cases {
let one = resolve(&TimeoutDependencyProvider::new(dependency_provider.clone(), 50_000), name, ver);
for _ in 0..3 {
match (&one, &resolve(&TimeoutDependencyProvider::new(dependency_provider.clone(), 50_000), name, ver)) {
(Ok(l), Ok(r)) => assert_eq!(l, r),
(Err(PubGrubError::NoSolution(derivation_l)), Err(PubGrubError::NoSolution(derivation_r))) => {
prop_assert_eq!(
DefaultStringReporter::report(&derivation_l),
DefaultStringReporter::report(&derivation_r)
)},
_ => panic!("not the same result")
}
}
}
}
#[test]
/// [ReverseDependencyProvider] changes what order the candidates
/// are tried but not the existence of a solution.
fn prop_reversed_version_errors_the_same(
(dependency_provider, cases) in registry_strategy(0u16..665, 666)
) {
let reverse_provider = OldestVersionsDependencyProvider(dependency_provider.clone());
for (name, ver) in cases {
let l = resolve(&TimeoutDependencyProvider::new(dependency_provider.clone(), 50_000), name, ver);
let r = resolve(&TimeoutDependencyProvider::new(reverse_provider.clone(), 50_000), name, ver);
match (&l, &r) {
(Ok(_), Ok(_)) => (),
(Err(_), Err(_)) => (),
_ => panic!("not the same result")
}
}
}
#[test]
fn prop_removing_a_dep_cant_break(
(dependency_provider, cases) in registry_strategy(0u16..665, 666),
indexes_to_remove in prop::collection::vec((any::<prop::sample::Index>(), any::<prop::sample::Index>(), any::<prop::sample::Index>()), 1..10)
) {
let packages: Vec<_> = dependency_provider.packages().collect();
let mut removed_provider = dependency_provider.clone();
for (package_idx, version_idx, dep_idx) in indexes_to_remove {
let package = package_idx.get(&packages);
let versions: Vec<_> = dependency_provider
.versions(package)
.unwrap().collect();
let version = version_idx.get(&versions);
let dependencies: Vec<(u16, Range<NumberVersion>)> = match dependency_provider
.get_dependencies(package, version)
.unwrap()
{
Dependencies::Unknown => panic!(),
Dependencies::Known(d) => d.into_iter().collect(),
};
if !dependencies.is_empty() {
let dependency = dep_idx.get(&dependencies).0;
removed_provider.add_dependencies(
**package,
**version,
dependencies.into_iter().filter(|x| x.0 != dependency),
)
}
}
for (name, ver) in cases {
if resolve(
&TimeoutDependencyProvider::new(dependency_provider.clone(), 50_000),
name,
ver,
)
.is_ok()
{
prop_assert!(
resolve(
&TimeoutDependencyProvider::new(removed_provider.clone(), 50_000),
name,
ver
)
.is_ok(),
"full index worked for `{} = \"={}\"` but removing some deps broke it!",
name,
ver,
)
}
}
}
#[test]
fn prop_limited_independence_of_irrelevant_alternatives(
(dependency_provider, cases) in registry_strategy(0u16..665, 666),
indexes_to_remove in prop::collection::vec(any::<prop::sample::Index>(), 1..10)
) {
let all_versions: Vec<(u16, NumberVersion)> = dependency_provider
.packages()
.flat_map(|&p| {
dependency_provider
.versions(&p)
.unwrap()
.map(move |v| (p, v.clone()))
})
.collect();
let to_remove: Set<(_, _)> = indexes_to_remove.iter().map(|x| x.get(&all_versions)).cloned().collect();
for (name, ver) in cases {
match resolve(&TimeoutDependencyProvider::new(dependency_provider.clone(), 50_000), name, ver) {
Ok(used) => {
// If resolution was successful, then unpublishing a version of a crate
// that was not selected should not change that.
let mut smaller_dependency_provider = OfflineDependencyProvider::<_, NumberVersion>::new();
for &(n, v) in &all_versions {
if used.get(&n) == Some(&v) // it was used
|| to_remove.get(&(n, v)).is_none() // or it is not one to be removed
{
let deps = match dependency_provider.get_dependencies(&n, &v).unwrap() {
Dependencies::Unknown => panic!(),
Dependencies::Known(deps) => deps,
};
smaller_dependency_provider.add_dependencies(n, v, deps)
}
}
prop_assert!(
resolve(&TimeoutDependencyProvider::new(smaller_dependency_provider.clone(), 50_000), name, ver).is_ok(),
"unpublishing {:?} stopped `{} = \"={}\"` from working",
to_remove,
name,
ver
)
}
Err(_) => {
// If resolution was unsuccessful, then it should stay unsuccessful
// even if any version of a crate is unpublished.
let mut smaller_dependency_provider = OfflineDependencyProvider::<_, NumberVersion>::new();
for &(n, v) in &all_versions {
if to_remove.get(&(n, v)).is_none() // it is not one to be removed
{
let deps = match dependency_provider.get_dependencies(&n, &v).unwrap() {
Dependencies::Unknown => panic!(),
Dependencies::Known(deps) => deps,
};
smaller_dependency_provider.add_dependencies(n, v, deps)
}
}
prop_assert!(
resolve(&TimeoutDependencyProvider::new(smaller_dependency_provider.clone(), 50_000), name, ver).is_err(),
"full index did not work for `{} = \"={}\"` but unpublishing {:?} fixed it!",
name,
ver,
to_remove,
)
}
}
}
}
}
#[cfg(feature = "serde")]
#[test]
fn large_case() {
for case in std::fs::read_dir("test-examples").unwrap() {
let case = case.unwrap().path();
let name = case.file_name().unwrap().to_string_lossy();
eprintln!("{}", name);
let data = std::fs::read_to_string(&case).unwrap();
if name.ends_with("u16_NumberVersion.ron") {
let dependency_provider: OfflineDependencyProvider<u16, NumberVersion> =
ron::de::from_str(&data).unwrap();
let mut sat = SatResolve::new(&dependency_provider);
for p in dependency_provider.packages() {
for n in dependency_provider.versions(p).unwrap() {
if let Ok(s) = resolve(&dependency_provider, p.clone(), n.clone()) {
assert!(sat.sat_is_valid_solution(&s));
} else {
assert!(!sat.sat_resolve(p, &n));
}
}
}
} else if name.ends_with("str_SemanticVersion.ron") {
let dependency_provider: OfflineDependencyProvider<
&str,
pubgrub::version::SemanticVersion,
> = ron::de::from_str(&data).unwrap();
let mut sat = SatResolve::new(&dependency_provider);
for p in dependency_provider.packages() {
for n in dependency_provider.versions(p).unwrap() {
if let Ok(s) = resolve(&dependency_provider, p.clone(), n.clone()) {
assert!(sat.sat_is_valid_solution(&s));
} else {
assert!(!sat.sat_resolve(p, &n));
}
}
}
}
}
}

View file

@ -0,0 +1,148 @@
// SPDX-License-Identifier: MPL-2.0
use pubgrub::package::Package;
use pubgrub::solver::{Dependencies, DependencyProvider, OfflineDependencyProvider};
use pubgrub::type_aliases::{Map, SelectedDependencies};
use pubgrub::version::Version;
use varisat::ExtendFormula;
const fn num_bits<T>() -> usize {
std::mem::size_of::<T>() * 8
}
fn log_bits(x: usize) -> usize {
if x == 0 {
return 0;
}
assert!(x > 0);
(num_bits::<usize>() as u32 - x.leading_zeros()) as usize
}
fn sat_at_most_one(solver: &mut impl varisat::ExtendFormula, vars: &[varisat::Var]) {
if vars.len() <= 1 {
return;
} else if vars.len() == 2 {
solver.add_clause(&[vars[0].negative(), vars[1].negative()]);
return;
} else if vars.len() == 3 {
solver.add_clause(&[vars[0].negative(), vars[1].negative()]);
solver.add_clause(&[vars[0].negative(), vars[2].negative()]);
solver.add_clause(&[vars[1].negative(), vars[2].negative()]);
return;
}
// use the "Binary Encoding" from
// https://www.it.uu.se/research/group/astra/ModRef10/papers/Alan%20M.%20Frisch%20and%20Paul%20A.%20Giannoros.%20SAT%20Encodings%20of%20the%20At-Most-k%20Constraint%20-%20ModRef%202010.pdf
let bits: Vec<varisat::Var> = solver.new_var_iter(log_bits(vars.len())).collect();
for (i, p) in vars.iter().enumerate() {
for (j, &bit) in bits.iter().enumerate() {
solver.add_clause(&[p.negative(), bit.lit(((1 << j) & i) > 0)]);
}
}
}
/// Resolution can be reduced to the SAT problem. So this is an alternative implementation
/// of the resolver that uses a SAT library for the hard work. This is intended to be easy to read,
/// as compared to the real resolver. This will find a valid resolution if one exists.
///
/// The SAT library does not optimize for the newer version,
/// so the selected packages may not match the real resolver.
pub struct SatResolve<P: Package, V: Version> {
solver: varisat::Solver<'static>,
all_versions_by_p: Map<P, Vec<(V, varisat::Var)>>,
}
impl<P: Package, V: Version> SatResolve<P, V> {
pub fn new(dp: &OfflineDependencyProvider<P, V>) -> Self {
let mut cnf = varisat::CnfFormula::new();
let mut all_versions = vec![];
let mut all_versions_by_p: Map<P, Vec<(V, varisat::Var)>> = Map::default();
for p in dp.packages() {
let mut versions_for_p = vec![];
for v in dp.versions(p).unwrap() {
let new_var = cnf.new_var();
all_versions.push((p.clone(), v.clone(), new_var));
versions_for_p.push(new_var);
all_versions_by_p
.entry(p.clone())
.or_default()
.push((v.clone(), new_var));
}
// no two versions of the same package
sat_at_most_one(&mut cnf, &versions_for_p);
}
// active packages need each of there `deps` to be satisfied
for (p, v, var) in &all_versions {
let deps = match dp.get_dependencies(p, v).unwrap() {
Dependencies::Unknown => panic!(),
Dependencies::Known(d) => d,
};
for (p1, range) in &deps {
let empty_vec = vec![];
let mut matches: Vec<varisat::Lit> = all_versions_by_p
.get(&p1)
.unwrap_or(&empty_vec)
.iter()
.filter(|(v1, _)| range.contains(v1))
.map(|(_, var1)| var1.positive())
.collect();
// ^ the `dep` is satisfied or
matches.push(var.negative());
// ^ `p` is not active
cnf.add_clause(&matches);
}
}
let mut solver = varisat::Solver::new();
solver.add_formula(&cnf);
// We dont need to `solve` now. We know that "use nothing" will satisfy all the clauses so far.
// But things run faster if we let it spend some time figuring out how the constraints interact before we add assumptions.
solver
.solve()
.expect("docs say it can't error in default config");
Self {
solver,
all_versions_by_p,
}
}
pub fn sat_resolve(&mut self, name: &P, ver: &V) -> bool {
if let Some(vers) = self.all_versions_by_p.get(name) {
if let Some((_, var)) = vers.iter().find(|(v, _)| v == ver) {
self.solver.assume(&[var.positive()]);
self.solver
.solve()
.expect("docs say it can't error in default config")
} else {
false
}
} else {
false
}
}
pub fn sat_is_valid_solution(&mut self, pids: &SelectedDependencies<P, V>) -> bool {
let mut assumption = vec![];
for (p, vs) in &self.all_versions_by_p {
for (v, var) in vs {
assumption.push(if pids.get(p) == Some(v) {
var.positive()
} else {
var.negative()
})
}
}
self.solver.assume(&assumption);
self.solver
.solve()
.expect("docs say it can't error in default config")
}
}

54
vendor/pubgrub/tests/tests.rs vendored Normal file
View file

@ -0,0 +1,54 @@
// SPDX-License-Identifier: MPL-2.0
use pubgrub::error::PubGrubError;
use pubgrub::range::Range;
use pubgrub::solver::{resolve, OfflineDependencyProvider};
use pubgrub::version::NumberVersion;
#[test]
fn same_result_on_repeated_runs() {
let mut dependency_provider = OfflineDependencyProvider::<_, NumberVersion>::new();
dependency_provider.add_dependencies("c", 0, vec![]);
dependency_provider.add_dependencies("c", 2, vec![]);
dependency_provider.add_dependencies("b", 0, vec![]);
dependency_provider.add_dependencies("b", 1, vec![("c", Range::between(0, 1))]);
dependency_provider.add_dependencies("a", 0, vec![("b", Range::any()), ("c", Range::any())]);
let name = "a";
let ver = NumberVersion(0);
let one = resolve(&dependency_provider, name, ver);
for _ in 0..10 {
match (&one, &resolve(&dependency_provider, name, ver)) {
(Ok(l), Ok(r)) => assert_eq!(l, r),
_ => panic!("not the same result"),
}
}
}
#[test]
fn should_always_find_a_satisfier() {
let mut dependency_provider = OfflineDependencyProvider::<_, NumberVersion>::new();
dependency_provider.add_dependencies("a", 0, vec![("b", Range::none())]);
assert!(matches!(
resolve(&dependency_provider, "a", 0),
Err(PubGrubError::DependencyOnTheEmptySet { .. })
));
dependency_provider.add_dependencies("c", 0, vec![("a", Range::any())]);
assert!(matches!(
resolve(&dependency_provider, "c", 0),
Err(PubGrubError::DependencyOnTheEmptySet { .. })
));
}
#[test]
fn cannot_depend_on_self() {
let mut dependency_provider = OfflineDependencyProvider::<_, NumberVersion>::new();
dependency_provider.add_dependencies("a", 0, vec![("a", Range::any())]);
assert!(matches!(
resolve(&dependency_provider, "a", 0),
Err(PubGrubError::SelfDependency { .. })
));
}