diff --git a/Cargo.lock b/Cargo.lock index f6a11d62d..00f97da23 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -5195,6 +5195,7 @@ dependencies = [ "mailparse", "regex", "rkyv", + "schemars", "serde", "serde-untagged", "thiserror", diff --git a/crates/uv-bench/benches/uv.rs b/crates/uv-bench/benches/uv.rs index 4713ea9c9..271319dd5 100644 --- a/crates/uv-bench/benches/uv.rs +++ b/crates/uv-bench/benches/uv.rs @@ -97,7 +97,7 @@ mod resolver { use uv_pep440::Version; use uv_pep508::{MarkerEnvironment, MarkerEnvironmentBuilder}; use uv_platform_tags::{Arch, Os, Platform, Tags}; - use uv_pypi_types::ResolverMarkerEnvironment; + use uv_pypi_types::{ConflictingGroupList, ResolverMarkerEnvironment}; use uv_python::Interpreter; use uv_resolver::{ FlatIndex, InMemoryIndex, Manifest, OptionsBuilder, PythonRequirement, RequiresPython, @@ -163,6 +163,7 @@ mod resolver { let options = OptionsBuilder::new().exclude_newer(exclude_newer).build(); let sources = SourceStrategy::default(); let dependency_metadata = DependencyMetadata::default(); + let conflicting_groups = ConflictingGroupList::empty(); let python_requirement = if universal { PythonRequirement::from_requires_python( @@ -208,6 +209,7 @@ mod resolver { options, &python_requirement, markers, + conflicting_groups, Some(&TAGS), &flat_index, &index, diff --git a/crates/uv-dispatch/src/lib.rs b/crates/uv-dispatch/src/lib.rs index c07f48f5e..18cf3e3d7 100644 --- a/crates/uv-dispatch/src/lib.rs +++ b/crates/uv-dispatch/src/lib.rs @@ -26,7 +26,7 @@ use uv_distribution_types::{ }; use uv_git::GitResolver; use uv_installer::{Installer, Plan, Planner, Preparer, SitePackages}; -use uv_pypi_types::Requirement; +use uv_pypi_types::{ConflictingGroupList, Requirement}; use uv_python::{Interpreter, PythonEnvironment}; use uv_resolver::{ ExcludeNewer, FlatIndex, Flexibility, InMemoryIndex, Manifest, OptionsBuilder, @@ -186,6 +186,9 @@ impl<'a> BuildContext for BuildDispatch<'a> { .build(), &python_requirement, ResolverEnvironment::specific(marker_env), + // Conflicting groups only make sense when doing + // universal resolution. + ConflictingGroupList::empty(), Some(tags), self.flat_index, self.index, diff --git a/crates/uv-pypi-types/Cargo.toml b/crates/uv-pypi-types/Cargo.toml index a89b29d4b..6e4701174 100644 --- a/crates/uv-pypi-types/Cargo.toml +++ b/crates/uv-pypi-types/Cargo.toml @@ -19,7 +19,7 @@ workspace = true uv-distribution-filename = { workspace = true } uv-fs = { workspace = true, features = ["serde"] } uv-git = { workspace = true } -uv-normalize = { workspace = true } +uv-normalize = { workspace = true, features = ["schemars"] } uv-pep440 = { workspace = true } uv-pep508 = { workspace = true } @@ -29,6 +29,7 @@ jiff = { workspace = true, features = ["serde"] } mailparse = { workspace = true } regex = { workspace = true } rkyv = { workspace = true } +schemars = { workspace = true } serde = { workspace = true } serde-untagged = { workspace = true } thiserror = { workspace = true } diff --git a/crates/uv-pypi-types/src/conflicting_groups.rs b/crates/uv-pypi-types/src/conflicting_groups.rs new file mode 100644 index 000000000..3002d33a9 --- /dev/null +++ b/crates/uv-pypi-types/src/conflicting_groups.rs @@ -0,0 +1,301 @@ +use uv_normalize::{ExtraName, PackageName}; + +/// A list of conflicting groups pre-defined by an end user. +/// +/// This is useful to force the resolver to fork according to extras that have +/// unavoidable conflicts with each other. (The alternative is that resolution +/// will fail.) +#[derive( + Debug, Default, Clone, Eq, PartialEq, serde::Deserialize, serde::Serialize, schemars::JsonSchema, +)] +pub struct ConflictingGroupList(Vec); + +impl ConflictingGroupList { + /// Returns no conflicting groups. + /// + /// This results in no effect on resolution. + pub fn empty() -> ConflictingGroupList { + ConflictingGroupList::default() + } + + /// Push a set of conflicting groups. + pub fn push(&mut self, groups: ConflictingGroups) { + self.0.push(groups); + } + + /// Returns an iterator over all sets of conflicting groups. + pub fn iter(&self) -> impl Iterator + '_ { + self.0.iter() + } + + /// Returns true if this conflicting group list contains any conflicting + /// group set that contains the given package and extra name pair. + pub fn contains(&self, package: &PackageName, extra: &ExtraName) -> bool { + self.iter().any(|groups| groups.contains(package, extra)) + } + + /// Returns true if this set of conflicting groups is empty. + pub fn is_empty(&self) -> bool { + self.0.is_empty() + } + + /// Appends the given list to this one. This drains all elements + /// from the list given, such that after this call, it is empty. + pub fn append(&mut self, other: &mut ConflictingGroupList) { + self.0.append(&mut other.0); + } +} + +/// A single set of package-extra pairs that conflict with one another. +/// +/// Within each set of conflicting groups, the resolver should isolate +/// the requirements corresponding to each extra from the requirements of +/// other extras in this set. That is, the resolver should put each set of +/// requirements in a different fork. +/// +/// A `TryFrom>` impl may be used to build a set +/// from a sequence. Note though that at least 2 groups are required. +#[derive(Debug, Default, Clone, Eq, PartialEq, serde::Serialize, schemars::JsonSchema)] +pub struct ConflictingGroups(Vec); + +impl ConflictingGroups { + /// Create a pair of groups that conflict with one another. + pub fn pair(group1: ConflictingGroup, group2: ConflictingGroup) -> ConflictingGroups { + ConflictingGroups(vec![group1, group2]) + } + + /// Add a new conflicting group to this set. + pub fn push(&mut self, group: ConflictingGroup) { + self.0.push(group); + } + + /// Returns an iterator over all conflicting groups. + pub fn iter(&self) -> impl Iterator + '_ { + self.0.iter() + } + + /// Returns true if this conflicting group contains the given + /// package and extra name pair. + pub fn contains(&self, package: &PackageName, extra: &ExtraName) -> bool { + self.iter() + .any(|group| group.package() == package && group.extra() == extra) + } +} + +impl<'de> serde::Deserialize<'de> for ConflictingGroups { + fn deserialize(deserializer: D) -> Result + where + D: serde::Deserializer<'de>, + { + let groups = Vec::::deserialize(deserializer)?; + Self::try_from(groups).map_err(serde::de::Error::custom) + } +} + +impl TryFrom> for ConflictingGroups { + type Error = ConflictingGroupError; + + fn try_from(groups: Vec) -> Result { + match groups.len() { + 0 => return Err(ConflictingGroupError::ZeroGroups), + 1 => return Err(ConflictingGroupError::OneGroup), + _ => {} + } + Ok(ConflictingGroups(groups)) + } +} + +/// A single item in a set conflicting groups. +/// +/// Each item is a pair of a package and a corresponding extra name for that +/// package. +#[derive( + Debug, + Default, + Clone, + Eq, + Hash, + PartialEq, + PartialOrd, + Ord, + serde::Deserialize, + serde::Serialize, + schemars::JsonSchema, +)] +pub struct ConflictingGroup { + package: PackageName, + extra: ExtraName, +} + +impl ConflictingGroup { + /// Returns the package name of this conflicting group. + pub fn package(&self) -> &PackageName { + &self.package + } + + /// Returns the extra name of this conflicting group. + pub fn extra(&self) -> &ExtraName { + &self.extra + } + + /// Returns this group as a new type with its fields borrowed. + pub fn as_ref(&self) -> ConflictingGroupRef<'_> { + ConflictingGroupRef { + package: self.package(), + extra: self.extra(), + } + } +} + +impl From<(PackageName, ExtraName)> for ConflictingGroup { + fn from((package, extra): (PackageName, ExtraName)) -> ConflictingGroup { + ConflictingGroup { package, extra } + } +} + +/// A single item in a set conflicting groups, by reference. +/// +/// Each item is a pair of a package and a corresponding extra name for that +/// package. +#[derive(Debug, Clone, Copy, Eq, Hash, PartialEq, PartialOrd, Ord)] +pub struct ConflictingGroupRef<'a> { + package: &'a PackageName, + extra: &'a ExtraName, +} + +impl<'a> ConflictingGroupRef<'a> { + /// Returns the package name of this conflicting group. + pub fn package(&self) -> &'a PackageName { + self.package + } + + /// Returns the extra name of this conflicting group. + pub fn extra(&self) -> &'a ExtraName { + self.extra + } + + /// Converts this borrowed conflicting group to its owned variant. + pub fn to_owned(&self) -> ConflictingGroup { + ConflictingGroup { + package: self.package().clone(), + extra: self.extra().clone(), + } + } +} + +impl<'a> From<(&'a PackageName, &'a ExtraName)> for ConflictingGroupRef<'a> { + fn from((package, extra): (&'a PackageName, &'a ExtraName)) -> ConflictingGroupRef<'a> { + ConflictingGroupRef { package, extra } + } +} + +/// An error that occurs when the given conflicting groups are invalid somehow. +#[derive(Debug, thiserror::Error)] +pub enum ConflictingGroupError { + /// An error for when there are zero conflicting groups. + #[error("Each set of conflicting groups must have at least two entries, but found none")] + ZeroGroups, + /// An error for when there is one conflicting group. + #[error("Each set of conflicting groups must have at least two entries, but found only one")] + OneGroup, +} + +/// Like [`ConflictingGroupList`], but for deserialization in `pyproject.toml`. +/// +/// The schema format is different from the in-memory format. Specifically, the +/// schema format does not allow specifying the package name (or will make it +/// optional in the future), where as the in-memory format needs the package +/// name. +/// +/// N.B. `ConflictingGroupList` is still used for (de)serialization. +/// Specifically, in the lock file, where the package name is required. +#[derive( + Debug, Default, Clone, Eq, PartialEq, serde::Deserialize, serde::Serialize, schemars::JsonSchema, +)] +pub struct SchemaConflictingGroupList(Vec); + +impl SchemaConflictingGroupList { + /// Convert the public schema "conflicting" type to our internal fully + /// resolved type. Effectively, this pairs the corresponding package name + /// with each conflict. + /// + /// If a conflict has an explicit package name (written by the end user), + /// then that takes precedence over the given package name, which is only + /// used when there is no explicit package name written. + pub fn to_conflicting_with_package_name(&self, package: &PackageName) -> ConflictingGroupList { + let mut conflicting = ConflictingGroupList::empty(); + for tool_uv_set in &self.0 { + let mut set = vec![]; + for item in &tool_uv_set.0 { + let package = item.package.clone().unwrap_or_else(|| package.clone()); + set.push(ConflictingGroup::from((package, item.extra.clone()))); + } + // OK because we guarantee that + // `SchemaConflictingGroupList` is valid and there aren't + // any new errors that can occur here. + let set = ConflictingGroups::try_from(set).unwrap(); + conflicting.push(set); + } + conflicting + } +} + +/// Like [`ConflictingGroups`], but for deserialization in `pyproject.toml`. +/// +/// The schema format is different from the in-memory format. Specifically, the +/// schema format does not allow specifying the package name (or will make it +/// optional in the future), where as the in-memory format needs the package +/// name. +#[derive(Debug, Default, Clone, Eq, PartialEq, serde::Serialize, schemars::JsonSchema)] +pub struct SchemaConflictingGroups(Vec); + +/// Like [`ConflictingGroup`], but for deserialization in `pyproject.toml`. +/// +/// The schema format is different from the in-memory format. Specifically, the +/// schema format does not allow specifying the package name (or will make it +/// optional in the future), where as the in-memory format needs the package +/// name. +#[derive( + Debug, + Default, + Clone, + Eq, + Hash, + PartialEq, + PartialOrd, + Ord, + serde::Deserialize, + serde::Serialize, + schemars::JsonSchema, +)] +#[serde(deny_unknown_fields)] +pub struct SchemaConflictingGroup { + #[serde(default)] + package: Option, + extra: ExtraName, +} + +impl<'de> serde::Deserialize<'de> for SchemaConflictingGroups { + fn deserialize(deserializer: D) -> Result + where + D: serde::Deserializer<'de>, + { + let items = Vec::::deserialize(deserializer)?; + Self::try_from(items).map_err(serde::de::Error::custom) + } +} + +impl TryFrom> for SchemaConflictingGroups { + type Error = ConflictingGroupError; + + fn try_from( + items: Vec, + ) -> Result { + match items.len() { + 0 => return Err(ConflictingGroupError::ZeroGroups), + 1 => return Err(ConflictingGroupError::OneGroup), + _ => {} + } + Ok(SchemaConflictingGroups(items)) + } +} diff --git a/crates/uv-pypi-types/src/lib.rs b/crates/uv-pypi-types/src/lib.rs index ec9abbb44..2498140a8 100644 --- a/crates/uv-pypi-types/src/lib.rs +++ b/crates/uv-pypi-types/src/lib.rs @@ -1,4 +1,5 @@ pub use base_url::*; +pub use conflicting_groups::*; pub use direct_url::*; pub use lenient_requirement::*; pub use marker_environment::*; @@ -10,6 +11,7 @@ pub use simple_json::*; pub use supported_environments::*; mod base_url; +mod conflicting_groups; mod direct_url; mod lenient_requirement; mod marker_environment; diff --git a/crates/uv-pypi-types/src/marker_environment.rs b/crates/uv-pypi-types/src/marker_environment.rs index 849ccf251..72c474444 100644 --- a/crates/uv-pypi-types/src/marker_environment.rs +++ b/crates/uv-pypi-types/src/marker_environment.rs @@ -4,7 +4,7 @@ use uv_pep508::MarkerEnvironment; /// A wrapper type around [`MarkerEnvironment`] that ensures the Python version markers are /// release-only, to match the resolver's semantics. -#[derive(Debug, Clone)] +#[derive(Debug, Clone, Eq, PartialEq)] pub struct ResolverMarkerEnvironment(MarkerEnvironment); impl ResolverMarkerEnvironment { diff --git a/crates/uv-resolver/src/candidate_selector.rs b/crates/uv-resolver/src/candidate_selector.rs index 44f919750..cd66e4fba 100644 --- a/crates/uv-resolver/src/candidate_selector.rs +++ b/crates/uv-resolver/src/candidate_selector.rs @@ -137,10 +137,14 @@ impl CandidateSelector { // first has the matching half and then the mismatching half. let preferences_match = preferences.get(package_name).filter(|(marker, _version)| { // `.unwrap_or(true)` because the universal marker is considered matching. - marker.map(|marker| env.included(marker)).unwrap_or(true) + marker + .map(|marker| env.included_by_marker(marker)) + .unwrap_or(true) }); let preferences_mismatch = preferences.get(package_name).filter(|(marker, _version)| { - marker.map(|marker| !env.included(marker)).unwrap_or(false) + marker + .map(|marker| !env.included_by_marker(marker)) + .unwrap_or(false) }); self.get_preferred_from_iter( preferences_match.chain(preferences_mismatch), diff --git a/crates/uv-resolver/src/error.rs b/crates/uv-resolver/src/error.rs index a9db58e7f..f3cbfbb9d 100644 --- a/crates/uv-resolver/src/error.rs +++ b/crates/uv-resolver/src/error.rs @@ -12,7 +12,7 @@ use tracing::trace; use uv_distribution_types::{ BuiltDist, IndexCapabilities, IndexLocations, IndexUrl, InstalledDist, SourceDist, }; -use uv_normalize::PackageName; +use uv_normalize::{ExtraName, PackageName}; use uv_pep440::{LocalVersionSlice, Version}; use uv_static::EnvVars; @@ -41,6 +41,13 @@ pub enum ResolveError { #[error("Attempted to wait on an unregistered task: `{_0}`")] UnregisteredTask(String), + #[error("Found conflicting extra `{extra}` unconditionally enabled in `{requirement}`")] + ConflictingExtra { + // Boxed because `Requirement` is large. + requirement: Box, + extra: ExtraName, + }, + #[error("Overrides contain conflicting URLs for package `{0}`:\n- {1}\n- {2}")] ConflictingOverrideUrls(PackageName, String, String), diff --git a/crates/uv-resolver/src/lock/mod.rs b/crates/uv-resolver/src/lock/mod.rs index 205ad043c..f1e65ff64 100644 --- a/crates/uv-resolver/src/lock/mod.rs +++ b/crates/uv-resolver/src/lock/mod.rs @@ -40,7 +40,8 @@ use uv_pep440::Version; use uv_pep508::{split_scheme, MarkerEnvironment, MarkerTree, VerbatimUrl, VerbatimUrlError}; use uv_platform_tags::{TagCompatibility, TagPriority, Tags}; use uv_pypi_types::{ - redact_credentials, HashDigest, ParsedArchiveUrl, ParsedGitUrl, Requirement, RequirementSource, + redact_credentials, ConflictingGroupList, HashDigest, ParsedArchiveUrl, ParsedGitUrl, + Requirement, RequirementSource, }; use uv_types::{BuildContext, HashStrategy}; use uv_workspace::dependency_groups::DependencyGroupError; @@ -80,6 +81,8 @@ pub struct Lock { /// If this lockfile was built from a forking resolution with non-identical forks, store the /// forks in the lockfile so we can recreate them in subsequent resolutions. fork_markers: Vec, + /// The conflicting groups/extras specified by the user. + conflicting_groups: ConflictingGroupList, /// The list of supported environments specified by the user. supported_environments: Vec, /// The range of supported Python versions. @@ -236,6 +239,7 @@ impl Lock { requires_python, options, ResolverManifest::default(), + ConflictingGroupList::empty(), vec![], graph.fork_markers.clone(), )?; @@ -311,6 +315,7 @@ impl Lock { requires_python: RequiresPython, options: ResolverOptions, manifest: ResolverManifest, + conflicting_groups: ConflictingGroupList, supported_environments: Vec, fork_markers: Vec, ) -> Result { @@ -460,6 +465,7 @@ impl Lock { let lock = Self { version, fork_markers, + conflicting_groups, supported_environments, requires_python, options, @@ -477,6 +483,13 @@ impl Lock { self } + /// Record the conflicting groups that were used to generate this lock. + #[must_use] + pub fn with_conflicting_groups(mut self, conflicting_groups: ConflictingGroupList) -> Self { + self.conflicting_groups = conflicting_groups; + self + } + /// Record the supported environments that were used to generate this lock. #[must_use] pub fn with_supported_environments(mut self, supported_environments: Vec) -> Self { @@ -536,6 +549,11 @@ impl Lock { self.options.exclude_newer } + /// Returns the conflicting groups that were used to generate this lock. + pub fn conflicting_groups(&self) -> &ConflictingGroupList { + &self.conflicting_groups + } + /// Returns the supported environments that were used to generate this lock. pub fn supported_environments(&self) -> &[MarkerTree] { &self.supported_environments @@ -614,6 +632,19 @@ impl Lock { doc.insert("supported-markers", value(supported_environments)); } + if !self.conflicting_groups.is_empty() { + let mut list = Array::new(); + for groups in self.conflicting_groups.iter() { + list.push(each_element_on_its_line_array(groups.iter().map(|group| { + let mut table = InlineTable::new(); + table.insert("package", Value::from(group.package().to_string())); + table.insert("extra", Value::from(group.extra().to_string())); + table + }))); + } + doc.insert("conflicting-groups", value(list)); + } + // Write the settings that were used to generate the resolution. // This enables us to invalidate the lockfile if the user changes // their settings. @@ -1352,6 +1383,8 @@ struct LockWire { fork_markers: Vec, #[serde(rename = "supported-markers", default)] supported_environments: Vec, + #[serde(rename = "conflicting-groups", default)] + conflicting_groups: Option, /// We discard the lockfile if these options match. #[serde(default)] options: ResolverOptions, @@ -1403,6 +1436,8 @@ impl TryFrom for Lock { wire.requires_python, wire.options, wire.manifest, + wire.conflicting_groups + .unwrap_or_else(ConflictingGroupList::empty), supported_environments, fork_markers, )?; diff --git a/crates/uv-resolver/src/lock/snapshots/uv_resolver__lock__tests__hash_optional_missing.snap b/crates/uv-resolver/src/lock/snapshots/uv_resolver__lock__tests__hash_optional_missing.snap index a17e02700..13f9c1b0a 100644 --- a/crates/uv-resolver/src/lock/snapshots/uv_resolver__lock__tests__hash_optional_missing.snap +++ b/crates/uv-resolver/src/lock/snapshots/uv_resolver__lock__tests__hash_optional_missing.snap @@ -6,6 +6,9 @@ Ok( Lock { version: 1, fork_markers: [], + conflicting_groups: ConflictingGroupList( + [], + ), supported_environments: [], requires_python: RequiresPython { specifiers: VersionSpecifiers( diff --git a/crates/uv-resolver/src/lock/snapshots/uv_resolver__lock__tests__hash_optional_present.snap b/crates/uv-resolver/src/lock/snapshots/uv_resolver__lock__tests__hash_optional_present.snap index 65aaa5849..5f276e881 100644 --- a/crates/uv-resolver/src/lock/snapshots/uv_resolver__lock__tests__hash_optional_present.snap +++ b/crates/uv-resolver/src/lock/snapshots/uv_resolver__lock__tests__hash_optional_present.snap @@ -6,6 +6,9 @@ Ok( Lock { version: 1, fork_markers: [], + conflicting_groups: ConflictingGroupList( + [], + ), supported_environments: [], requires_python: RequiresPython { specifiers: VersionSpecifiers( diff --git a/crates/uv-resolver/src/lock/snapshots/uv_resolver__lock__tests__hash_required_present.snap b/crates/uv-resolver/src/lock/snapshots/uv_resolver__lock__tests__hash_required_present.snap index 09dc7df1f..194fc5ab7 100644 --- a/crates/uv-resolver/src/lock/snapshots/uv_resolver__lock__tests__hash_required_present.snap +++ b/crates/uv-resolver/src/lock/snapshots/uv_resolver__lock__tests__hash_required_present.snap @@ -6,6 +6,9 @@ Ok( Lock { version: 1, fork_markers: [], + conflicting_groups: ConflictingGroupList( + [], + ), supported_environments: [], requires_python: RequiresPython { specifiers: VersionSpecifiers( diff --git a/crates/uv-resolver/src/lock/snapshots/uv_resolver__lock__tests__missing_dependency_source_unambiguous.snap b/crates/uv-resolver/src/lock/snapshots/uv_resolver__lock__tests__missing_dependency_source_unambiguous.snap index e93ae3260..f8cae3434 100644 --- a/crates/uv-resolver/src/lock/snapshots/uv_resolver__lock__tests__missing_dependency_source_unambiguous.snap +++ b/crates/uv-resolver/src/lock/snapshots/uv_resolver__lock__tests__missing_dependency_source_unambiguous.snap @@ -6,6 +6,9 @@ Ok( Lock { version: 1, fork_markers: [], + conflicting_groups: ConflictingGroupList( + [], + ), supported_environments: [], requires_python: RequiresPython { specifiers: VersionSpecifiers( diff --git a/crates/uv-resolver/src/lock/snapshots/uv_resolver__lock__tests__missing_dependency_source_version_unambiguous.snap b/crates/uv-resolver/src/lock/snapshots/uv_resolver__lock__tests__missing_dependency_source_version_unambiguous.snap index e93ae3260..f8cae3434 100644 --- a/crates/uv-resolver/src/lock/snapshots/uv_resolver__lock__tests__missing_dependency_source_version_unambiguous.snap +++ b/crates/uv-resolver/src/lock/snapshots/uv_resolver__lock__tests__missing_dependency_source_version_unambiguous.snap @@ -6,6 +6,9 @@ Ok( Lock { version: 1, fork_markers: [], + conflicting_groups: ConflictingGroupList( + [], + ), supported_environments: [], requires_python: RequiresPython { specifiers: VersionSpecifiers( diff --git a/crates/uv-resolver/src/lock/snapshots/uv_resolver__lock__tests__missing_dependency_version_unambiguous.snap b/crates/uv-resolver/src/lock/snapshots/uv_resolver__lock__tests__missing_dependency_version_unambiguous.snap index e93ae3260..f8cae3434 100644 --- a/crates/uv-resolver/src/lock/snapshots/uv_resolver__lock__tests__missing_dependency_version_unambiguous.snap +++ b/crates/uv-resolver/src/lock/snapshots/uv_resolver__lock__tests__missing_dependency_version_unambiguous.snap @@ -6,6 +6,9 @@ Ok( Lock { version: 1, fork_markers: [], + conflicting_groups: ConflictingGroupList( + [], + ), supported_environments: [], requires_python: RequiresPython { specifiers: VersionSpecifiers( diff --git a/crates/uv-resolver/src/lock/snapshots/uv_resolver__lock__tests__source_direct_has_subdir.snap b/crates/uv-resolver/src/lock/snapshots/uv_resolver__lock__tests__source_direct_has_subdir.snap index 8c7ca8d2c..7c0b43c63 100644 --- a/crates/uv-resolver/src/lock/snapshots/uv_resolver__lock__tests__source_direct_has_subdir.snap +++ b/crates/uv-resolver/src/lock/snapshots/uv_resolver__lock__tests__source_direct_has_subdir.snap @@ -6,6 +6,9 @@ Ok( Lock { version: 1, fork_markers: [], + conflicting_groups: ConflictingGroupList( + [], + ), supported_environments: [], requires_python: RequiresPython { specifiers: VersionSpecifiers( diff --git a/crates/uv-resolver/src/lock/snapshots/uv_resolver__lock__tests__source_direct_no_subdir.snap b/crates/uv-resolver/src/lock/snapshots/uv_resolver__lock__tests__source_direct_no_subdir.snap index 04e3df0ba..894d52e40 100644 --- a/crates/uv-resolver/src/lock/snapshots/uv_resolver__lock__tests__source_direct_no_subdir.snap +++ b/crates/uv-resolver/src/lock/snapshots/uv_resolver__lock__tests__source_direct_no_subdir.snap @@ -6,6 +6,9 @@ Ok( Lock { version: 1, fork_markers: [], + conflicting_groups: ConflictingGroupList( + [], + ), supported_environments: [], requires_python: RequiresPython { specifiers: VersionSpecifiers( diff --git a/crates/uv-resolver/src/lock/snapshots/uv_resolver__lock__tests__source_directory.snap b/crates/uv-resolver/src/lock/snapshots/uv_resolver__lock__tests__source_directory.snap index 3b0a488c4..2a7316ed1 100644 --- a/crates/uv-resolver/src/lock/snapshots/uv_resolver__lock__tests__source_directory.snap +++ b/crates/uv-resolver/src/lock/snapshots/uv_resolver__lock__tests__source_directory.snap @@ -6,6 +6,9 @@ Ok( Lock { version: 1, fork_markers: [], + conflicting_groups: ConflictingGroupList( + [], + ), supported_environments: [], requires_python: RequiresPython { specifiers: VersionSpecifiers( diff --git a/crates/uv-resolver/src/lock/snapshots/uv_resolver__lock__tests__source_editable.snap b/crates/uv-resolver/src/lock/snapshots/uv_resolver__lock__tests__source_editable.snap index 2c8a20fd3..bb65d03ad 100644 --- a/crates/uv-resolver/src/lock/snapshots/uv_resolver__lock__tests__source_editable.snap +++ b/crates/uv-resolver/src/lock/snapshots/uv_resolver__lock__tests__source_editable.snap @@ -6,6 +6,9 @@ Ok( Lock { version: 1, fork_markers: [], + conflicting_groups: ConflictingGroupList( + [], + ), supported_environments: [], requires_python: RequiresPython { specifiers: VersionSpecifiers( diff --git a/crates/uv-resolver/src/pubgrub/package.rs b/crates/uv-resolver/src/pubgrub/package.rs index 14debbcd6..9a6f4e4c2 100644 --- a/crates/uv-resolver/src/pubgrub/package.rs +++ b/crates/uv-resolver/src/pubgrub/package.rs @@ -3,6 +3,7 @@ use std::sync::Arc; use uv_normalize::{ExtraName, GroupName, PackageName}; use uv_pep508::{MarkerTree, MarkerTreeContents}; +use uv_pypi_types::ConflictingGroupRef; use crate::python_requirement::PythonRequirement; @@ -166,6 +167,35 @@ impl PubGrubPackage { } } + /// Returns the extra name associated with this PubGrub package, if it has + /// one. + pub(crate) fn extra(&self) -> Option<&ExtraName> { + match &**self { + // A root can never be a dependency of another package, and a `Python` pubgrub + // package is never returned by `get_dependencies`. So these cases never occur. + PubGrubPackageInner::Root(_) + | PubGrubPackageInner::Python(_) + | PubGrubPackageInner::Package { extra: None, .. } + | PubGrubPackageInner::Dev { .. } + | PubGrubPackageInner::Marker { .. } => None, + PubGrubPackageInner::Package { + extra: Some(ref extra), + .. + } + | PubGrubPackageInner::Extra { ref extra, .. } => Some(extra), + } + } + + /// Extracts a possible conflicting group from this package. + /// + /// If this package can't possibly be classified as a conflicting group, + /// then this returns `None`. + pub(crate) fn conflicting_group(&self) -> Option> { + let package = self.name_no_root()?; + let extra = self.extra()?; + Some(ConflictingGroupRef::from((package, extra))) + } + /// Returns `true` if this PubGrub package is a proxy package. pub(crate) fn is_proxy(&self) -> bool { matches!( @@ -205,6 +235,7 @@ impl PubGrubPackage { } } + /// This isn't actually used anywhere, but can be useful for printf-debugging. #[allow(dead_code)] pub(crate) fn kind(&self) -> &'static str { match &**self { diff --git a/crates/uv-resolver/src/resolution/graph.rs b/crates/uv-resolver/src/resolution/graph.rs index 5c693c218..b19abab78 100644 --- a/crates/uv-resolver/src/resolution/graph.rs +++ b/crates/uv-resolver/src/resolution/graph.rs @@ -17,7 +17,9 @@ use uv_git::GitResolver; use uv_normalize::{ExtraName, GroupName, PackageName}; use uv_pep440::{Version, VersionSpecifier}; use uv_pep508::{MarkerEnvironment, MarkerTree, MarkerTreeKind}; -use uv_pypi_types::{HashDigest, ParsedUrlError, Requirement, VerbatimParsedUrl, Yanked}; +use uv_pypi_types::{ + ConflictingGroupList, HashDigest, ParsedUrlError, Requirement, VerbatimParsedUrl, Yanked, +}; use crate::graph_ops::marker_reachability; use crate::pins::FilePins; @@ -101,6 +103,7 @@ impl ResolutionGraph { index: &InMemoryIndex, git: &GitResolver, python: &PythonRequirement, + conflicting_groups: &ConflictingGroupList, resolution_strategy: &ResolutionStrategy, options: Options, ) -> Result { @@ -201,8 +204,8 @@ impl ResolutionGraph { resolution .env .try_markers() - .expect("A non-forking resolution exists in forking mode") - .clone() + .cloned() + .unwrap_or(MarkerTree::TRUE) }) // Any unsatisfiable forks were skipped. .filter(|fork| !fork.is_false()) @@ -237,27 +240,40 @@ impl ResolutionGraph { fork_markers, }; - #[allow(unused_mut, reason = "Used in debug_assertions below")] - let mut conflicting = graph.find_conflicting_distributions(); - if !conflicting.is_empty() { - tracing::warn!( - "found {} conflicting distributions in resolution, \ + // We only do conflicting distribution detection when no + // conflicting groups have been specified. The reason here + // is that when there are conflicting groups, then from the + // perspective of marker expressions only, it may look like + // one can install different versions of the same package for + // the same marker environment. However, the thing preventing + // this is that the only way this should be possible is if + // one tries to install two or more conflicting extras at + // the same time. At which point, uv will report an error, + // thereby sidestepping the possibility of installing different + // versions of the same package into the same virtualenv. ---AG + if conflicting_groups.is_empty() { + #[allow(unused_mut, reason = "Used in debug_assertions below")] + let mut conflicting = graph.find_conflicting_distributions(); + if !conflicting.is_empty() { + tracing::warn!( + "found {} conflicting distributions in resolution, \ please report this as a bug at \ https://github.com/astral-sh/uv/issues/new", - conflicting.len() - ); - } - // When testing, we materialize any conflicting distributions as an - // error to ensure any relevant tests fail. Otherwise, we just leave - // it at the warning message above. The reason for not returning an - // error "in production" is that an incorrect resolution may only be - // incorrect in certain marker environments, but fine in most others. - // Returning an error in that case would make `uv` unusable whenever - // the bug occurs, but letting it through means `uv` *could* still be - // usable. - #[cfg(debug_assertions)] - if let Some(err) = conflicting.pop() { - return Err(ResolveError::ConflictingDistribution(err)); + conflicting.len() + ); + } + // When testing, we materialize any conflicting distributions as an + // error to ensure any relevant tests fail. Otherwise, we just leave + // it at the warning message above. The reason for not returning an + // error "in production" is that an incorrect resolution may only be + // incorrect in certain marker environments, but fine in most others. + // Returning an error in that case would make `uv` unusable whenever + // the bug occurs, but letting it through means `uv` *could* still be + // usable. + #[cfg(debug_assertions)] + if let Some(err) = conflicting.pop() { + return Err(ResolveError::ConflictingDistribution(err)); + } } Ok(graph) } diff --git a/crates/uv-resolver/src/resolver/environment.rs b/crates/uv-resolver/src/resolver/environment.rs index 01f21552c..debe163af 100644 --- a/crates/uv-resolver/src/resolver/environment.rs +++ b/crates/uv-resolver/src/resolver/environment.rs @@ -1,12 +1,16 @@ use std::sync::Arc; +use rustc_hash::{FxHashMap, FxHashSet}; +use uv_normalize::{ExtraName, PackageName}; use uv_pep508::{MarkerEnvironment, MarkerTree}; -use uv_pypi_types::ResolverMarkerEnvironment; +use uv_pypi_types::{ + ConflictingGroup, ConflictingGroupList, ConflictingGroupRef, ResolverMarkerEnvironment, +}; +use crate::pubgrub::{PubGrubDependency, PubGrubPackage}; use crate::requires_python::RequiresPythonRange; use crate::resolver::ForkState; use crate::PythonRequirement; -use crate::ResolveError; /// Represents one or more marker environments for a resolution. /// @@ -57,7 +61,7 @@ use crate::ResolveError; /// explicitly by the resolver. (Perhaps a future refactor can incorporate /// `requires-python` into this type as well, but it's not totally clear at /// time of writing if that's a good idea or not.) -#[derive(Clone, Debug)] +#[derive(Clone, Debug, Eq, PartialEq)] pub struct ResolverEnvironment { kind: Kind, } @@ -69,7 +73,7 @@ pub struct ResolverEnvironment { /// analysis on this type, and instead try to encapsulate the case analysis via /// higher level routines on `ResolverEnvironment` itself. (This goal may prove /// intractable, so don't treat it like gospel.) -#[derive(Clone, Debug)] +#[derive(Clone, Debug, Eq, PartialEq)] enum Kind { /// We're solving for one specific marker environment only. /// @@ -94,6 +98,8 @@ enum Kind { initial_forks: Arc<[MarkerTree]>, /// The markers associated with this resolver fork. markers: MarkerTree, + /// Conflicting group exclusions. + exclude: Arc>>, }, } @@ -131,6 +137,7 @@ impl ResolverEnvironment { let kind = Kind::Universal { initial_forks: initial_forks.into(), markers: MarkerTree::TRUE, + exclude: Arc::new(FxHashMap::default()), }; ResolverEnvironment { kind } } @@ -149,32 +156,44 @@ impl ResolverEnvironment { /// Returns `false` only when this environment is a fork and it is disjoint /// with the given marker. - pub(crate) fn included(&self, marker: &MarkerTree) -> bool { + pub(crate) fn included_by_marker(&self, marker: &MarkerTree) -> bool { match self.kind { Kind::Specific { .. } => true, Kind::Universal { ref markers, .. } => !markers.is_disjoint(marker), } } + /// Returns true if the dependency represented by this forker may be + /// included in the given resolver environment. + pub(crate) fn included_by_group(&self, group: ConflictingGroupRef<'_>) -> bool { + match self.kind { + Kind::Specific { .. } => true, + Kind::Universal { ref exclude, .. } => !exclude + .get(group.package()) + .map(|set| set.contains(group.extra())) + .unwrap_or(false), + } + } + + /// Returns the bounding Python versions that can satisfy this + /// resolver environment's marker, if it's constrained. + pub(crate) fn requires_python(&self) -> Option { + let marker = self.try_markers().unwrap_or(&MarkerTree::TRUE); + crate::marker::requires_python(marker) + } + /// Narrow this environment given the forking markers. /// - /// This should be used when generating forking states in the resolver. In - /// effect, this "forks" this environment (which itself may be a fork) by - /// intersecting it with the markers given. + /// This effectively intersects any markers in this environment with the + /// markers given, and returns the new resulting environment. /// - /// This may return `None` when the marker intersection results in a marker - /// that can never be true for the given Python requirement. In this case, - /// the corresponding fork should be dropped. + /// This is also useful in tests to generate a "forked" marker environment. /// /// # Panics /// /// This panics if the resolver environment corresponds to one and only one /// specific marker environment. i.e., "pip"-style resolution. - pub(crate) fn narrow_environment( - &self, - python_requirement: &PythonRequirement, - rhs: &MarkerTree, - ) -> Option { + fn narrow_environment(&self, rhs: MarkerTree) -> ResolverEnvironment { match self.kind { Kind::Specific { .. } => { unreachable!("environment narrowing only happens in universal resolution") @@ -182,25 +201,58 @@ impl ResolverEnvironment { Kind::Universal { ref initial_forks, markers: ref lhs, + ref exclude, } => { - let mut lhs = lhs.clone(); - lhs.and(rhs.clone()); - let python_marker = python_requirement.to_marker_tree(); - // If the new combined marker is disjoint with the given - // Python requirement, then this fork shouldn't exist. - if lhs.is_disjoint(&python_marker) { - tracing::debug!( - "Skipping split {lhs:?} \ - because of Python requirement {python_marker:?}", - ); - return None; + let mut markers = lhs.clone(); + markers.and(rhs); + let kind = Kind::Universal { + initial_forks: Arc::clone(initial_forks), + markers, + exclude: Arc::clone(exclude), + }; + ResolverEnvironment { kind } + } + } + } + + /// Returns a new resolver environment with the given groups excluded from + /// it. + /// + /// When a group is excluded from a resolver environment, + /// `ResolverEnvironment::included_by_group` will return false. The idea + /// is that a dependency with a corresponding group should be excluded by + /// forks in the resolver with this environment. + /// + /// # Panics + /// + /// This panics if the resolver environment corresponds to one and only one + /// specific marker environment. i.e., "pip"-style resolution. + pub(crate) fn exclude_by_group( + &self, + groups: impl IntoIterator, + ) -> ResolverEnvironment { + match self.kind { + Kind::Specific { .. } => { + unreachable!("environment narrowing only happens in universal resolution") + } + Kind::Universal { + ref initial_forks, + ref markers, + ref exclude, + } => { + let mut exclude: FxHashMap<_, _> = (**exclude).clone(); + for group in groups { + exclude + .entry(group.package().clone()) + .or_default() + .insert(group.extra().clone()); } let kind = Kind::Universal { - initial_forks: initial_forks.clone(), - markers: lhs, + initial_forks: Arc::clone(initial_forks), + markers: markers.clone(), + exclude: Arc::new(exclude), }; - - Some(ResolverEnvironment { kind }) + ResolverEnvironment { kind } } } } @@ -215,7 +267,9 @@ impl ResolverEnvironment { /// configuration. pub(crate) fn initial_forked_states(&self, init: ForkState) -> Vec { let Kind::Universal { - ref initial_forks, .. + ref initial_forks, + markers: ref _markers, + exclude: ref _exclude, } = self.kind else { return vec![init]; @@ -226,7 +280,10 @@ impl ResolverEnvironment { initial_forks .iter() .rev() - .filter_map(|initial_fork| init.clone().with_env(&initial_fork)) + .map(|initial_fork| { + init.clone() + .with_env(self.narrow_environment(initial_fork.clone())) + }) .collect() } @@ -244,7 +301,7 @@ impl ResolverEnvironment { &self, python_requirement: &PythonRequirement, ) -> Option { - Some(python_requirement.narrow(&self.requires_python_range()?)?) + python_requirement.narrow(&self.requires_python_range()?) } /// Returns a message formatted for end users representing a fork in the @@ -314,6 +371,106 @@ impl std::fmt::Display for ResolverEnvironment { } } +/// The different forking possibilities. +/// +/// Upon seeing a dependency, when determining whether to fork, three +/// different cases are possible: +/// +/// 1. Forking cannot be ruled out. +/// 2. The dependency is excluded by the "parent" fork. +/// 3. The dependency is unconditional and thus cannot provoke new forks. +/// +/// This enum encapsulates those possibilities. In the first case, a helper is +/// returned to help management the nuts and bolts of forking. +#[derive(Debug)] +pub(crate) enum ForkingPossibility<'d> { + Possible(Forker<'d>), + DependencyAlwaysExcluded, + NoForkingPossible, +} + +impl<'d> ForkingPossibility<'d> { + pub(crate) fn new( + env: &ResolverEnvironment, + dep: &'d PubGrubDependency, + ) -> ForkingPossibility<'d> { + let marker = dep.package.marker().unwrap_or(&MarkerTree::TRUE); + if !env.included_by_marker(marker) { + ForkingPossibility::DependencyAlwaysExcluded + } else if marker.is_true() { + ForkingPossibility::NoForkingPossible + } else { + let forker = Forker { + package: &dep.package, + marker: marker.clone(), + }; + ForkingPossibility::Possible(forker) + } + } +} + +/// An encapsulation of forking based on a single dependency. +#[derive(Debug)] +pub(crate) struct Forker<'d> { + package: &'d PubGrubPackage, + marker: MarkerTree, +} + +impl<'d> Forker<'d> { + /// Attempt a fork based on the given resolver environment. + /// + /// If a fork is possible, then a new forker and at least one new + /// resolver environment is returned. In some cases, it is possible for + /// more resolver environments to be returned. (For example, when the + /// negation of this forker's markers has overlap with the given resolver + /// environment.) + pub(crate) fn fork( + &self, + env: &ResolverEnvironment, + _conflicting_groups: &ConflictingGroupList, + ) -> Option<(Forker<'d>, Vec)> { + if !env.included_by_marker(&self.marker) { + return None; + } + + let Kind::Universal { + markers: ref env_marker, + .. + } = env.kind + else { + panic!("resolver must be in universal mode for forking") + }; + + let mut envs = vec![]; + { + let not_marker = self.marker.negate(); + if !env_marker.is_disjoint(¬_marker) { + envs.push(env.narrow_environment(not_marker)); + } + } + // Note also that we push this one last for historical reasons. + // Changing the order of forks can change the output in some + // ways. While it's probably fine, we try to avoid changing the + // output. + envs.push(env.narrow_environment(self.marker.clone())); + + let mut remaining_marker = self.marker.clone(); + remaining_marker.and(env_marker.negate()); + let remaining_forker = Forker { + package: self.package, + marker: remaining_marker, + }; + Some((remaining_forker, envs)) + } + + /// Returns true if the dependency represented by this forker may be + /// included in the given resolver environment. + pub(crate) fn included(&self, env: &ResolverEnvironment) -> bool { + let marker = self.package.marker().unwrap_or(&MarkerTree::TRUE); + env.included_by_marker(marker) + } +} + #[cfg(test)] mod tests { use std::ops::Bound; @@ -352,8 +509,7 @@ mod tests { fn requires_python_range_lower(lower_version_bound: &str) -> RequiresPythonRange { let lower = LowerBound::new(Bound::Included(version(lower_version_bound))); - let range = RequiresPythonRange::new(lower, UpperBound::default()); - range + RequiresPythonRange::new(lower, UpperBound::default()) } fn marker(marker: &str) -> MarkerTree { @@ -413,8 +569,7 @@ mod tests { fn narrow_python_requirement_forking_no_op() { let pyreq = python_requirement("3.10"); let resolver_env = ResolverEnvironment::universal(vec![]) - .narrow_environment(&pyreq, &marker("python_version >= '3.10'")) - .unwrap(); + .narrow_environment(marker("python_version >= '3.10'")); assert_eq!( resolver_env.narrow_python_requirement(&pyreq), Some(python_requirement("3.10")), @@ -428,8 +583,7 @@ mod tests { fn narrow_python_requirement_forking_stricter() { let pyreq = python_requirement("3.10"); let resolver_env = ResolverEnvironment::universal(vec![]) - .narrow_environment(&pyreq, &marker("python_version >= '3.11'")) - .unwrap(); + .narrow_environment(marker("python_version >= '3.11'")); let expected = { let range = requires_python_range_lower("3.11"); let requires_python = requires_python_lower("3.10").narrow(&range).unwrap(); @@ -448,8 +602,7 @@ mod tests { fn narrow_python_requirement_forking_relaxed() { let pyreq = python_requirement("3.11"); let resolver_env = ResolverEnvironment::universal(vec![]) - .narrow_environment(&pyreq, &marker("python_version >= '3.10'")) - .unwrap(); + .narrow_environment(marker("python_version >= '3.10'")); assert_eq!( resolver_env.narrow_python_requirement(&pyreq), Some(python_requirement("3.11")), diff --git a/crates/uv-resolver/src/resolver/fork_map.rs b/crates/uv-resolver/src/resolver/fork_map.rs index 607d86e15..adacc401c 100644 --- a/crates/uv-resolver/src/resolver/fork_map.rs +++ b/crates/uv-resolver/src/resolver/fork_map.rs @@ -60,7 +60,7 @@ impl ForkMap { }; values .iter() - .filter(|entry| env.included(&entry.marker)) + .filter(|entry| env.included_by_marker(&entry.marker)) .map(|entry| &entry.value) .collect() } diff --git a/crates/uv-resolver/src/resolver/mod.rs b/crates/uv-resolver/src/resolver/mod.rs index 7f72e6fd8..5b2d09989 100644 --- a/crates/uv-resolver/src/resolver/mod.rs +++ b/crates/uv-resolver/src/resolver/mod.rs @@ -1,7 +1,5 @@ //! Given a set of requirements, find a set of compatible packages. -#![allow(warnings)] - use std::borrow::Cow; use std::cmp::Ordering; use std::collections::{BTreeMap, BTreeSet, VecDeque}; @@ -15,13 +13,14 @@ use dashmap::DashMap; use either::Either; use futures::{FutureExt, StreamExt}; use itertools::Itertools; -use pubgrub::{Incompatibility, Range, Ranges, State}; +use pubgrub::{Incompatibility, Range, State}; use rustc_hash::{FxHashMap, FxHashSet}; use tokio::sync::mpsc::{self, Receiver, Sender}; use tokio::sync::oneshot; use tokio_stream::wrappers::ReceiverStream; use tracing::{debug, info, instrument, trace, warn, Level}; +use environment::ForkingPossibility; pub use environment::ResolverEnvironment; pub(crate) use fork_map::{ForkMap, ForkSet}; pub(crate) use urls::Urls; @@ -38,7 +37,10 @@ use uv_normalize::{ExtraName, GroupName, PackageName}; use uv_pep440::{release_specifiers_to_ranges, Version, MIN_VERSION}; use uv_pep508::MarkerTree; use uv_platform_tags::Tags; -use uv_pypi_types::{Requirement, ResolutionMetadata, VerbatimParsedUrl}; +use uv_pypi_types::{ + ConflictingGroup, ConflictingGroupList, ConflictingGroupRef, Requirement, ResolutionMetadata, + VerbatimParsedUrl, +}; use uv_types::{BuildContext, HashStrategy, InstalledPackagesProvider}; use uv_warnings::warn_user_once; @@ -108,6 +110,7 @@ struct ResolverState { hasher: HashStrategy, env: ResolverEnvironment, python_requirement: PythonRequirement, + conflicting_groups: ConflictingGroupList, workspace_members: BTreeSet, selector: CandidateSelector, index: InMemoryIndex, @@ -148,6 +151,7 @@ impl<'a, Context: BuildContext, InstalledPackages: InstalledPackagesProvider> options: Options, python_requirement: &'a PythonRequirement, env: ResolverEnvironment, + conflicting_groups: ConflictingGroupList, tags: Option<&'a Tags>, flat_index: &'a FlatIndex, index: &'a InMemoryIndex, @@ -174,6 +178,7 @@ impl<'a, Context: BuildContext, InstalledPackages: InstalledPackagesProvider> hasher, env, python_requirement, + conflicting_groups, index, build_context.git(), build_context.capabilities(), @@ -194,6 +199,7 @@ impl hasher: &HashStrategy, env: ResolverEnvironment, python_requirement: &PythonRequirement, + conflicting_groups: ConflictingGroupList, index: &InMemoryIndex, git: &GitResolver, capabilities: &IndexCapabilities, @@ -221,6 +227,7 @@ impl locations: locations.clone(), env, python_requirement: python_requirement.clone(), + conflicting_groups, installed_packages, unavailable_packages: DashMap::default(), incomplete_packages: DashMap::default(), @@ -601,6 +608,7 @@ impl ResolverState ResolverState ResolverState ResolverState ForkedDependencies::Unavailable(err), }) } else { - Ok(result?.fork(python_requirement)) + Ok(result?.fork(env, python_requirement, &self.conflicting_groups)) } } @@ -1336,6 +1333,18 @@ impl ResolverState ResolverState ResolverState { if !requirement.evaluate_markers(env.marker_environment(), &[]) { @@ -1640,7 +1654,7 @@ impl ResolverState ResolverState { if !constraint.evaluate_markers(env.marker_environment(), &[]) { @@ -2071,7 +2090,7 @@ impl ResolverState Option { - self.env = self - .env - .narrow_environment(&self.python_requirement, markers)?; + fn with_env(mut self, env: ResolverEnvironment) -> Self { + self.env = env; // If the fork contains a narrowed Python requirement, apply it. if let Some(req) = self.env.narrow_python_requirement(&self.python_requirement) { debug!("Narrowed `requires-python` bound to: {}", req.target()); self.python_requirement = req; } - Some(self) + self } fn into_resolution(self) -> Resolution { @@ -2664,7 +2681,12 @@ impl Dependencies { /// A fork *only* occurs when there are multiple dependencies with the same /// name *and* those dependency specifications have corresponding marker /// expressions that are completely disjoint with one another. - fn fork(self, python_requirement: &PythonRequirement) -> ForkedDependencies { + fn fork( + self, + env: &ResolverEnvironment, + python_requirement: &PythonRequirement, + conflicting_groups: &ConflictingGroupList, + ) -> ForkedDependencies { let deps = match self { Dependencies::Available(deps) => deps, Dependencies::Unforkable(deps) => return ForkedDependencies::Unforked(deps), @@ -2682,7 +2704,7 @@ impl Dependencies { let Forks { mut forks, diverging_packages, - } = Forks::new(name_to_deps, python_requirement); + } = Forks::new(name_to_deps, env, python_requirement, conflicting_groups); if forks.is_empty() { ForkedDependencies::Unforked(vec![]) } else if forks.len() == 1 { @@ -2742,14 +2764,13 @@ struct Forks { impl Forks { fn new( name_to_deps: BTreeMap>, + env: &ResolverEnvironment, python_requirement: &PythonRequirement, + conflicting_groups: &ConflictingGroupList, ) -> Forks { let python_marker = python_requirement.to_marker_tree(); - let mut forks = vec![Fork { - dependencies: vec![], - markers: MarkerTree::TRUE, - }]; + let mut forks = vec![Fork::new(env.clone())]; let mut diverging_packages = BTreeSet::new(); for (name, mut deps) in name_to_deps { assert!(!deps.is_empty(), "every name has at least one dependency"); @@ -2780,64 +2801,118 @@ impl Forks { let dep = deps.pop().unwrap(); let markers = dep.package.marker().cloned().unwrap_or(MarkerTree::TRUE); for fork in &mut forks { - if !fork.markers.is_disjoint(&markers) { - fork.dependencies.push(dep.clone()); + if fork.env.included_by_marker(&markers) { + fork.add_dependency(dep.clone()); } } continue; } } for dep in deps { - let mut markers = dep.package.marker().cloned().unwrap_or(MarkerTree::TRUE); - if markers.is_false() { - // If the markers can never be satisfied, then we - // can drop this dependency unceremoniously. - continue; - } - if markers.is_true() { - // Or, if the markers are always true, then we just - // add the dependency to every fork unconditionally. - for fork in &mut forks { - if !fork.markers.is_disjoint(&markers) { - fork.dependencies.push(dep.clone()); - } + let mut forker = match ForkingPossibility::new(env, &dep) { + ForkingPossibility::Possible(forker) => forker, + ForkingPossibility::DependencyAlwaysExcluded => { + // If the markers can never be satisfied by the parent + // fork, then we can drop this dependency unceremoniously. + continue; } - continue; - } + ForkingPossibility::NoForkingPossible => { + // Or, if the markers are always true, then we just + // add the dependency to every fork unconditionally. + for fork in &mut forks { + fork.add_dependency(dep.clone()); + } + continue; + } + }; // Otherwise, we *should* need to add a new fork... diverging_packages.insert(name.clone()); let mut new = vec![]; - for mut fork in std::mem::take(&mut forks) { - if fork.markers.is_disjoint(&markers) { + for fork in std::mem::take(&mut forks) { + let Some((remaining_forker, envs)) = forker.fork(&fork.env, conflicting_groups) + else { new.push(fork); continue; - } + }; + forker = remaining_forker; - let not_markers = markers.negate(); - let mut new_markers = markers.clone(); - new_markers.and(fork.markers.negate()); - if !fork.markers.is_disjoint(¬_markers) { + for fork_env in envs { let mut new_fork = fork.clone(); - new_fork.intersect(not_markers); + new_fork.set_env(fork_env); + // We only add the dependency to this fork if it + // satisfies the fork's markers. Some forks are + // specifically created to exclude this dependency, + // so this isn't always true! + if forker.included(&new_fork.env) { + new_fork.add_dependency(dep.clone()); + } // Filter out any forks we created that are disjoint with our // Python requirement. - if !new_fork.markers.is_disjoint(&python_marker) { + if new_fork.env.included_by_marker(&python_marker) { new.push(new_fork); } } - fork.dependencies.push(dep.clone()); - fork.intersect(markers); - // Filter out any forks we created that are disjoint with our - // Python requirement. - if !fork.markers.is_disjoint(&python_marker) { - new.push(fork); - } - markers = new_markers; } forks = new; } } + // When there is a conflicting group configuration, we need + // to potentially add more forks. Each fork added contains an + // exclusion list of conflicting groups where dependencies with + // the corresponding package and extra name are forcefully + // excluded from that group. + // + // We specifically iterate on conflicting groups and + // potentially re-generate all forks for each one. We do it + // this way in case there are multiple sets of conflicting + // groups that impact the forks here. + // + // For example, if we have conflicting groups {x1, x2} and {x3, + // x4}, we need to make sure the forks generated from one set + // also account for the other set. + for groups in conflicting_groups.iter() { + let mut new = vec![]; + for fork in std::mem::take(&mut forks) { + let mut has_conflicting_dependency = false; + for group in groups.iter() { + if fork.contains_conflicting_group(group.as_ref()) { + has_conflicting_dependency = true; + break; + } + } + if !has_conflicting_dependency { + new.push(fork); + continue; + } + + // Create a fork that excludes ALL extras. + let mut fork_none = fork.clone(); + for group in groups.iter() { + fork_none = fork_none.exclude([group.clone()]); + } + new.push(fork_none); + + // Now create a fork for each conflicting group, where + // that fork excludes every *other* conflicting group. + // + // So if we have conflicting extras foo, bar and baz, + // then this creates three forks: one that excludes + // {foo, bar}, one that excludes {foo, baz} and one + // that excludes {bar, baz}. + for (i, _) in groups.iter().enumerate() { + let fork_allows_group = fork.clone().exclude( + groups + .iter() + .enumerate() + .filter(|&(j, _)| i != j) + .map(|(_, group)| group.clone()), + ); + new.push(fork_allows_group); + } + } + forks = new; + } Forks { forks, diverging_packages, @@ -2854,7 +2929,7 @@ impl Forks { /// have the same name and because the marker expressions are disjoint, /// a fork occurs. One fork will contain `a<2` but not `a>=2`, while /// the other fork will contain `a>=2` but not `a<2`. -#[derive(Clone, Debug, Eq, PartialEq)] +#[derive(Clone, Debug)] struct Fork { /// The list of dependencies for this fork, guaranteed to be conflict /// free. (i.e., There are no two packages with the same name with @@ -2865,26 +2940,108 @@ struct Fork { /// it should be impossible for a package with a marker expression that is /// disjoint from the marker expression on this fork to be added. dependencies: Vec, - /// The markers that provoked this fork. + /// The conflicting groups in this fork. /// - /// So in the example above, the `a<2` fork would have - /// `sys_platform == 'foo'`, while the `a>=2` fork would have - /// `sys_platform == 'bar'`. + /// This exists to make some access patterns more efficient. Namely, + /// it makes it easy to check whether there's a dependency with a + /// particular conflicting group in this fork. + conflicting_groups: FxHashMap>, + /// The resolver environment for this fork. /// - /// (This doesn't include any marker expressions from a parent fork.) - markers: MarkerTree, + /// Principally, this corresponds to the markers in this for. So in the + /// example above, the `a<2` fork would have `sys_platform == 'foo'`, while + /// the `a>=2` fork would have `sys_platform == 'bar'`. + /// + /// If this fork was generated from another fork, then this *includes* + /// the criteria from its parent. i.e., Its marker expression represents + /// the intersection of the marker expression from its parent and any + /// additional marker expression generated by addition forking based on + /// conflicting dependency specifications. + env: ResolverEnvironment, } impl Fork { - fn intersect(&mut self, markers: MarkerTree) { - self.markers.and(markers); + /// Create a new fork with no dependencies with the given resolver + /// environment. + fn new(env: ResolverEnvironment) -> Fork { + Fork { + dependencies: vec![], + conflicting_groups: FxHashMap::default(), + env, + } + } + + /// Add a dependency to this fork. + fn add_dependency(&mut self, dep: PubGrubDependency) { + if let Some(conflicting_group) = dep.package.conflicting_group() { + self.conflicting_groups + .entry(conflicting_group.package().clone()) + .or_default() + .insert(conflicting_group.extra().clone()); + } + self.dependencies.push(dep); + } + + /// Sets the resolver environment to the one given. + /// + /// Any dependency in this fork that does not satisfy the given environment + /// is removed. + fn set_env(&mut self, env: ResolverEnvironment) { + self.env = env; self.dependencies.retain(|dep| { let Some(markers) = dep.package.marker() else { return true; }; - !self.markers.is_disjoint(markers) + if self.env.included_by_marker(markers) { + return true; + } + if let Some(conflicting_group) = dep.package.conflicting_group() { + if let Some(set) = self.conflicting_groups.get_mut(conflicting_group.package()) { + set.remove(conflicting_group.extra()); + } + } + false }); } + + /// Returns true if any of the dependencies in this fork contain a + /// dependency with the given package and extra values. + fn contains_conflicting_group(&self, group: ConflictingGroupRef<'_>) -> bool { + self.conflicting_groups + .get(group.package()) + .map(|set| set.contains(group.extra())) + .unwrap_or(false) + } + + /// Exclude the given groups from this fork. + /// + /// This removes all dependencies matching the given conflicting groups. + fn exclude(mut self, groups: impl IntoIterator) -> Fork { + self.env = self.env.exclude_by_group(groups); + self.dependencies.retain(|dep| { + let Some(conflicting_group) = dep.package.conflicting_group() else { + return true; + }; + if self.env.included_by_group(conflicting_group) { + return true; + } + if let Some(conflicting_group) = dep.package.conflicting_group() { + if let Some(set) = self.conflicting_groups.get_mut(conflicting_group.package()) { + set.remove(conflicting_group.extra()); + } + } + false + }); + self + } +} + +impl Eq for Fork {} + +impl PartialEq for Fork { + fn eq(&self, other: &Fork) -> bool { + self.dependencies == other.dependencies && self.env == other.env + } } impl Ord for Fork { @@ -2892,8 +3049,8 @@ impl Ord for Fork { // A higher `requires-python` requirement indicates a _lower-priority_ fork. We'd prefer // to solve `<3.7` before solving `>=3.7`, since the resolution produced by the former might // work for the latter, but the inverse is unlikely to be true. - let self_bound = marker::requires_python(&self.markers).unwrap_or_default(); - let other_bound = marker::requires_python(&other.markers).unwrap_or_default(); + let self_bound = self.env.requires_python().unwrap_or_default(); + let other_bound = other.env.requires_python().unwrap_or_default(); other_bound.lower().cmp(self_bound.lower()).then_with(|| { // If there's no difference, prioritize forks with upper bounds. We'd prefer to solve @@ -2930,3 +3087,36 @@ impl PartialOrd for Fork { Some(self.cmp(other)) } } + +/// Returns an error if a conflicting extra is found in the given requirements. +/// +/// Specifically, if there is any conflicting extra (just one is enough) that +/// is unconditionally enabled as part of a dependency specification, then this +/// returns an error. +/// +/// The reason why we're so conservative here is because it avoids us needing +/// the look at the entire dependency tree at once. +/// +/// For example, consider packages `root`, `a`, `b` and `c`, where `c` has +/// declared conflicting extras of `x1` and `x2`. +/// +/// Now imagine `root` depends on `a` and `b`, `a` depends on `c[x1]` and `b` +/// depends on `c[x2]`. That's a conflict, but not easily detectable unless +/// you reject either `c[x1]` or `c[x2]` on the grounds that `x1` and `x2` are +/// conflicting and thus cannot be enabled unconditionally. +fn find_conflicting_extra( + conflicting: &ConflictingGroupList, + reqs: &[Requirement], +) -> Option { + for req in reqs { + for extra in &req.extras { + if conflicting.contains(&req.name, extra) { + return Some(ResolveError::ConflictingExtra { + requirement: Box::new(req.clone()), + extra: extra.clone(), + }); + } + } + } + None +} diff --git a/crates/uv-settings/src/combine.rs b/crates/uv-settings/src/combine.rs index 61cc7368f..f3cda5b9e 100644 --- a/crates/uv-settings/src/combine.rs +++ b/crates/uv-settings/src/combine.rs @@ -8,7 +8,7 @@ use uv_configuration::{ }; use uv_distribution_types::{Index, IndexUrl, PipExtraIndex, PipFindLinks, PipIndex}; use uv_install_wheel::linker::LinkMode; -use uv_pypi_types::SupportedEnvironments; +use uv_pypi_types::{SchemaConflictingGroupList, SupportedEnvironments}; use uv_python::{PythonDownloads, PythonPreference, PythonVersion}; use uv_resolver::{AnnotationStyle, ExcludeNewer, PrereleaseMode, ResolutionMode}; @@ -90,6 +90,7 @@ impl_combine_or!(PythonVersion); impl_combine_or!(ResolutionMode); impl_combine_or!(String); impl_combine_or!(SupportedEnvironments); +impl_combine_or!(SchemaConflictingGroupList); impl_combine_or!(TargetTriple); impl_combine_or!(TrustedPublishing); impl_combine_or!(Url); diff --git a/crates/uv-settings/src/settings.rs b/crates/uv-settings/src/settings.rs index e7e1e2090..470cce6bc 100644 --- a/crates/uv-settings/src/settings.rs +++ b/crates/uv-settings/src/settings.rs @@ -100,6 +100,9 @@ pub struct Options { // NOTE(charlie): These fields should be kept in-sync with `ToolUv` in // `crates/uv-workspace/src/pyproject.rs`. The documentation lives on that struct. // They're only respected in `pyproject.toml` files, and should be rejected in `uv.toml` files. + #[cfg_attr(feature = "schemars", schemars(skip))] + pub conflicting_groups: Option, + #[cfg_attr(feature = "schemars", schemars(skip))] pub workspace: Option, @@ -1559,6 +1562,7 @@ pub struct OptionsWire { // NOTE(charlie): These fields should be kept in-sync with `ToolUv` in // `crates/uv-workspace/src/pyproject.rs`. The documentation lives on that struct. // They're only respected in `pyproject.toml` files, and should be rejected in `uv.toml` files. + conflicting_groups: Option, workspace: Option, sources: Option, managed: Option, @@ -1611,6 +1615,7 @@ impl From for Options { override_dependencies, constraint_dependencies, environments, + conflicting_groups, publish_url, trusted_publishing, workspace, @@ -1668,6 +1673,7 @@ impl From for Options { override_dependencies, constraint_dependencies, environments, + conflicting_groups, publish: PublishOptions { publish_url, trusted_publishing, diff --git a/crates/uv-workspace/src/pyproject.rs b/crates/uv-workspace/src/pyproject.rs index b7dfe409c..33847edf4 100644 --- a/crates/uv-workspace/src/pyproject.rs +++ b/crates/uv-workspace/src/pyproject.rs @@ -24,7 +24,10 @@ use uv_macros::OptionsMetadata; use uv_normalize::{ExtraName, GroupName, PackageName}; use uv_pep440::{Version, VersionSpecifiers}; use uv_pep508::MarkerTree; -use uv_pypi_types::{RequirementSource, SupportedEnvironments, VerbatimParsedUrl}; +use uv_pypi_types::{ + ConflictingGroupList, RequirementSource, SchemaConflictingGroupList, SupportedEnvironments, + VerbatimParsedUrl, +}; #[derive(Error, Debug)] pub enum PyprojectTomlError { @@ -98,6 +101,24 @@ impl PyProjectToml { false } } + + /// Returns the set of conflicts for the project. + pub fn conflicting_groups(&self) -> ConflictingGroupList { + let empty = ConflictingGroupList::empty(); + let Some(project) = self.project.as_ref() else { + return empty; + }; + let Some(tool) = self.tool.as_ref() else { + return empty; + }; + let Some(tooluv) = tool.uv.as_ref() else { + return empty; + }; + let Some(conflicting) = tooluv.conflicting_groups.as_ref() else { + return empty; + }; + conflicting.to_conflicting_with_package_name(&project.name) + } } // Ignore raw document in comparison. @@ -439,6 +460,51 @@ pub struct ToolUv { "# )] pub environments: Option, + + /// Conflicting extras may be declared here. + /// + /// It's useful to declare conflicting extras when the extras have mutually + /// incompatible dependencies. For example, extra `foo` might depend on + /// `numpy==2.0.0` while extra `bar` might depend on `numpy==2.1.0`. These + /// extras cannot be activated at the same time. This usually isn't a + /// problem for pip-style workflows, but when using uv project support + /// with universal resolution, it will try to produce a resolution that + /// satisfies both extras simultaneously. + /// + /// When this happens, resolution will fail, because one cannot install + /// both `numpy 2.0.0` and `numpy 2.1.0` into the same environment. + /// + /// To work around this, you may specify `foo` and `bar` as conflicting + /// extras. When doing universal resolution in project mode, these extras + /// will get their own "forks" distinct from one another in order to permit + /// conflicting dependencies. In exchange, if one tries to install from the + /// lock file with both conflicting extras activated, installation will + /// fail. + #[cfg_attr( + feature = "schemars", + // Skipped for now while we iterate on this feature. + schemars(skip, description = "A list sets of conflicting groups or extras.") + )] + /* + This is commented out temporarily while we finalize its + functionality and naming. This avoids it showing up in docs. + #[option( + default = r#"[]"#, + value_type = "list[list[dict]]", + example = r#" + # Require that `package[test1]` and `package[test2]` + # requirements are resolved in different forks so that they + # cannot conflict with one another. + conflicting-groups = [ + [ + { extra = "test1" }, + { extra = "test2" }, + ] + ] + "# + )] + */ + pub conflicting_groups: Option, } #[derive(Default, Debug, Clone, PartialEq, Eq)] diff --git a/crates/uv-workspace/src/workspace.rs b/crates/uv-workspace/src/workspace.rs index ea07f3757..9463ef01e 100644 --- a/crates/uv-workspace/src/workspace.rs +++ b/crates/uv-workspace/src/workspace.rs @@ -11,7 +11,7 @@ use uv_distribution_types::Index; use uv_fs::{Simplified, CWD}; use uv_normalize::{GroupName, PackageName, DEV_DEPENDENCIES}; use uv_pep508::{MarkerTree, RequirementOrigin, VerbatimUrl}; -use uv_pypi_types::{Requirement, RequirementSource, SupportedEnvironments}; +use uv_pypi_types::{ConflictingGroupList, Requirement, RequirementSource, SupportedEnvironments}; use uv_static::EnvVars; use uv_warnings::{warn_user, warn_user_once}; @@ -392,6 +392,15 @@ impl Workspace { .and_then(|uv| uv.environments.as_ref()) } + /// Returns the set of conflicts for the workspace. + pub fn conflicting_groups(&self) -> ConflictingGroupList { + let mut conflicting = ConflictingGroupList::empty(); + for member in self.packages.values() { + conflicting.append(&mut member.pyproject_toml.conflicting_groups()); + } + conflicting + } + /// Returns the set of constraints for the workspace. pub fn constraints(&self) -> Vec { let Some(constraints) = self diff --git a/crates/uv-workspace/src/workspace/tests.rs b/crates/uv-workspace/src/workspace/tests.rs index 888f9de01..822b9757c 100644 --- a/crates/uv-workspace/src/workspace/tests.rs +++ b/crates/uv-workspace/src/workspace/tests.rs @@ -241,7 +241,8 @@ async fn albatross_root_workspace() { "dev-dependencies": null, "override-dependencies": null, "constraint-dependencies": null, - "environments": null + "environments": null, + "conflicting-groups": null } }, "dependency-groups": null @@ -332,7 +333,8 @@ async fn albatross_virtual_workspace() { "dev-dependencies": null, "override-dependencies": null, "constraint-dependencies": null, - "environments": null + "environments": null, + "conflicting-groups": null } }, "dependency-groups": null @@ -537,7 +539,8 @@ async fn exclude_package() -> Result<()> { "dev-dependencies": null, "override-dependencies": null, "constraint-dependencies": null, - "environments": null + "environments": null, + "conflicting-groups": null } }, "dependency-groups": null @@ -640,7 +643,8 @@ async fn exclude_package() -> Result<()> { "dev-dependencies": null, "override-dependencies": null, "constraint-dependencies": null, - "environments": null + "environments": null, + "conflicting-groups": null } }, "dependency-groups": null @@ -756,7 +760,8 @@ async fn exclude_package() -> Result<()> { "dev-dependencies": null, "override-dependencies": null, "constraint-dependencies": null, - "environments": null + "environments": null, + "conflicting-groups": null } }, "dependency-groups": null @@ -846,7 +851,8 @@ async fn exclude_package() -> Result<()> { "dev-dependencies": null, "override-dependencies": null, "constraint-dependencies": null, - "environments": null + "environments": null, + "conflicting-groups": null } }, "dependency-groups": null diff --git a/crates/uv/src/commands/pip/compile.rs b/crates/uv/src/commands/pip/compile.rs index 9ddbfd1cf..f9f8f4e78 100644 --- a/crates/uv/src/commands/pip/compile.rs +++ b/crates/uv/src/commands/pip/compile.rs @@ -23,7 +23,7 @@ use uv_fs::Simplified; use uv_git::GitResolver; use uv_install_wheel::linker::LinkMode; use uv_normalize::PackageName; -use uv_pypi_types::{Requirement, SupportedEnvironments}; +use uv_pypi_types::{ConflictingGroupList, Requirement, SupportedEnvironments}; use uv_python::{ EnvironmentPreference, PythonEnvironment, PythonInstallation, PythonPreference, PythonRequest, PythonVersion, VersionRequest, @@ -54,6 +54,7 @@ pub(crate) async fn pip_compile( constraints_from_workspace: Vec, overrides_from_workspace: Vec, environments: SupportedEnvironments, + conflicting_groups: ConflictingGroupList, extras: ExtrasSpecification, output_file: Option<&Path>, resolution_mode: ResolutionMode, @@ -251,15 +252,20 @@ pub(crate) async fn pip_compile( }; // Determine the environment for the resolution. - let (tags, resolver_env) = if universal { + let (tags, resolver_env, conflicting_groups) = if universal { ( None, ResolverEnvironment::universal(environments.into_markers()), + conflicting_groups, ) } else { let (tags, marker_env) = resolution_environment(python_version, python_platform, &interpreter)?; - (Some(tags), ResolverEnvironment::specific(marker_env)) + ( + Some(tags), + ResolverEnvironment::specific(marker_env), + ConflictingGroupList::empty(), + ) }; // Generate, but don't enforce hashes for the requirements. @@ -394,6 +400,7 @@ pub(crate) async fn pip_compile( tags.as_deref(), resolver_env.clone(), python_requirement, + conflicting_groups, &client, &flat_index, &top_level_index, diff --git a/crates/uv/src/commands/pip/install.rs b/crates/uv/src/commands/pip/install.rs index 8dc2438ec..7f92d7c63 100644 --- a/crates/uv/src/commands/pip/install.rs +++ b/crates/uv/src/commands/pip/install.rs @@ -20,7 +20,7 @@ use uv_fs::Simplified; use uv_install_wheel::linker::LinkMode; use uv_installer::{SatisfiesResult, SitePackages}; use uv_pep508::PackageName; -use uv_pypi_types::Requirement; +use uv_pypi_types::{ConflictingGroupList, Requirement}; use uv_python::{ EnvironmentPreference, Prefix, PythonEnvironment, PythonRequest, PythonVersion, Target, }; @@ -400,6 +400,7 @@ pub(crate) async fn pip_install( Some(&tags), ResolverEnvironment::specific(marker_env.clone()), python_requirement, + ConflictingGroupList::empty(), &client, &flat_index, &state.index, diff --git a/crates/uv/src/commands/pip/operations.rs b/crates/uv/src/commands/pip/operations.rs index d954c73ee..02c112f32 100644 --- a/crates/uv/src/commands/pip/operations.rs +++ b/crates/uv/src/commands/pip/operations.rs @@ -29,7 +29,7 @@ use uv_install_wheel::linker::LinkMode; use uv_installer::{Plan, Planner, Preparer, SitePackages}; use uv_normalize::{GroupName, PackageName}; use uv_platform_tags::Tags; -use uv_pypi_types::ResolverMarkerEnvironment; +use uv_pypi_types::{ConflictingGroupList, ResolverMarkerEnvironment}; use uv_python::PythonEnvironment; use uv_requirements::{ LookaheadResolver, NamedRequirementsResolver, RequirementsSource, RequirementsSpecification, @@ -104,6 +104,7 @@ pub(crate) async fn resolve( tags: Option<&Tags>, resolver_env: ResolverEnvironment, python_requirement: PythonRequirement, + conflicting_groups: ConflictingGroupList, client: &RegistryClient, flat_index: &FlatIndex, index: &InMemoryIndex, @@ -290,6 +291,7 @@ pub(crate) async fn resolve( options, &python_requirement, resolver_env, + conflicting_groups, tags, flat_index, index, diff --git a/crates/uv/src/commands/pip/sync.rs b/crates/uv/src/commands/pip/sync.rs index ca3de945d..b5524adb3 100644 --- a/crates/uv/src/commands/pip/sync.rs +++ b/crates/uv/src/commands/pip/sync.rs @@ -17,6 +17,7 @@ use uv_fs::Simplified; use uv_install_wheel::linker::LinkMode; use uv_installer::SitePackages; use uv_pep508::PackageName; +use uv_pypi_types::ConflictingGroupList; use uv_python::{ EnvironmentPreference, Prefix, PythonEnvironment, PythonRequest, PythonVersion, Target, }; @@ -344,6 +345,7 @@ pub(crate) async fn pip_sync( Some(&tags), ResolverEnvironment::specific(marker_env.clone()), python_requirement, + ConflictingGroupList::empty(), &client, &flat_index, &state.index, diff --git a/crates/uv/src/commands/project/lock.rs b/crates/uv/src/commands/project/lock.rs index e09fe5cb7..c1707ed1d 100644 --- a/crates/uv/src/commands/project/lock.rs +++ b/crates/uv/src/commands/project/lock.rs @@ -628,6 +628,7 @@ async fn do_lock( None, resolver_env, python_requirement, + workspace.conflicting_groups(), &client, &flat_index, &state.index, @@ -657,6 +658,7 @@ async fn do_lock( let previous = existing_lock.map(ValidatedLock::into_lock); let lock = Lock::from_resolution_graph(&resolution, workspace.install_path())? .with_manifest(manifest) + .with_conflicting_groups(workspace.conflicting_groups()) .with_supported_environments( environments .cloned() @@ -800,6 +802,16 @@ impl ValidatedLock { return Ok(Self::Versions(lock)); } + // If the conflicting group config has changed, we have to perform a clean resolution. + if &workspace.conflicting_groups() != lock.conflicting_groups() { + debug!( + "Ignoring existing lockfile due to change in conflicting groups: `{:?}` vs. `{:?}`", + workspace.conflicting_groups(), + lock.conflicting_groups(), + ); + return Ok(Self::Versions(lock)); + } + // If the user provided at least one index URL (from the command line, or from a configuration // file), don't use the existing lockfile if it references any registries that are no longer // included in the current configuration. diff --git a/crates/uv/src/commands/project/mod.rs b/crates/uv/src/commands/project/mod.rs index 5ee669151..da26837f8 100644 --- a/crates/uv/src/commands/project/mod.rs +++ b/crates/uv/src/commands/project/mod.rs @@ -19,10 +19,10 @@ use uv_distribution_types::{ use uv_fs::{Simplified, CWD}; use uv_git::ResolvedRepositoryReference; use uv_installer::{SatisfiesResult, SitePackages}; -use uv_normalize::{GroupName, PackageName, DEV_DEPENDENCIES}; +use uv_normalize::{ExtraName, GroupName, PackageName, DEV_DEPENDENCIES}; use uv_pep440::{Version, VersionSpecifiers}; use uv_pep508::MarkerTreeContents; -use uv_pypi_types::Requirement; +use uv_pypi_types::{ConflictingGroupList, ConflictingGroups, Requirement}; use uv_python::{ EnvironmentPreference, Interpreter, InvalidEnvironmentKind, PythonDownloads, PythonEnvironment, PythonInstallation, PythonPreference, PythonRequest, PythonVariant, PythonVersionFile, @@ -80,6 +80,17 @@ pub(crate) enum ProjectError { #[error("The current Python platform is not compatible with the lockfile's supported environments: {0}")] LockedPlatformIncompatibility(String), + #[error( + "The requested extras ({}) are incompatible with the declared conflicting extra: {{{}}}", + _1.iter().map(|extra| format!("`{extra}`")).collect::>().join(", "), + _0 + .iter() + .map(|group| format!("`{}[{}]`", group.package(), group.extra())) + .collect::>() + .join(", "), + )] + ExtraIncompatibility(ConflictingGroups, Vec), + #[error("The requested interpreter resolved to Python {0}, which is incompatible with the project's Python requirement: `{1}`")] RequestedPythonProjectIncompatibility(Version, RequiresPython), @@ -1091,6 +1102,7 @@ pub(crate) async fn resolve_environment<'a>( Some(tags), ResolverEnvironment::specific(marker_env), python_requirement, + ConflictingGroupList::empty(), &client, &flat_index, &state.index, @@ -1433,6 +1445,7 @@ pub(crate) async fn update_environment( Some(tags), ResolverEnvironment::specific(marker_env.clone()), python_requirement, + ConflictingGroupList::empty(), &client, &flat_index, &state.index, diff --git a/crates/uv/src/commands/project/sync.rs b/crates/uv/src/commands/project/sync.rs index 3367eb908..84d5cd3e9 100644 --- a/crates/uv/src/commands/project/sync.rs +++ b/crates/uv/src/commands/project/sync.rs @@ -15,7 +15,7 @@ use uv_configuration::{ use uv_dispatch::BuildDispatch; use uv_distribution_types::{DirectorySourceDist, Dist, Index, ResolvedDist, SourceDist}; use uv_installer::SitePackages; -use uv_normalize::PackageName; +use uv_normalize::{ExtraName, PackageName}; use uv_pep508::{MarkerTree, Requirement, VersionOrUrl}; use uv_pypi_types::{ LenientRequirement, ParsedArchiveUrl, ParsedGitUrl, ParsedUrl, VerbatimParsedUrl, @@ -278,6 +278,23 @@ pub(super) async fn do_sync( )); } + // Validate that we aren't trying to install extras that are + // declared as conflicting. + let conflicting_groups = target.lock().conflicting_groups(); + for groups in conflicting_groups.iter() { + let conflicting = groups + .iter() + .filter(|group| extras.contains(group.extra())) + .map(|group| group.extra().clone()) + .collect::>(); + if conflicting.len() >= 2 { + return Err(ProjectError::ExtraIncompatibility( + groups.clone(), + conflicting, + )); + } + } + // Determine the markers to use for resolution. let marker_env = venv.interpreter().resolver_marker_environment(); diff --git a/crates/uv/src/lib.rs b/crates/uv/src/lib.rs index efc66f4fc..52607d585 100644 --- a/crates/uv/src/lib.rs +++ b/crates/uv/src/lib.rs @@ -24,6 +24,7 @@ use uv_cli::{PythonCommand, PythonNamespace, ToolCommand, ToolNamespace, TopLeve #[cfg(feature = "self-update")] use uv_cli::{SelfCommand, SelfNamespace, SelfUpdateArgs}; use uv_fs::CWD; +use uv_pypi_types::ConflictingGroupList; use uv_requirements::RequirementsSource; use uv_scripts::{Pep723Item, Pep723Metadata, Pep723Script}; use uv_settings::{Combine, FilesystemOptions, Options}; @@ -332,6 +333,7 @@ async fn run(mut cli: Cli) -> Result { args.constraints_from_workspace, args.overrides_from_workspace, args.environments, + ConflictingGroupList::empty(), args.settings.extras, args.settings.output_file.as_deref(), args.settings.resolution, diff --git a/crates/uv/tests/it/lock.rs b/crates/uv/tests/it/lock.rs index ce48a30fa..7dbb846ea 100644 --- a/crates/uv/tests/it/lock.rs +++ b/crates/uv/tests/it/lock.rs @@ -2164,6 +2164,1212 @@ fn lock_dependency_non_existent_extra() -> Result<()> { Ok(()) } +/// This tests a "basic" case for specifying conflicting extras. +/// +/// Namely, we check that 1) without declaring them conflicting, +/// resolution fails, 2) declaring them conflicting, resolution +/// succeeds, 3) install succeeds, 4) install fails when requesting two +/// or more extras that are declared to conflict with each other. +/// +/// This test was inspired by: +/// +#[test] +fn lock_conflicting_extra_basic() -> Result<()> { + let context = TestContext::new("3.12"); + + // First we test that resolving with two extras that have + // conflicting dependencies fails. + let pyproject_toml = context.temp_dir.child("pyproject.toml"); + pyproject_toml.write_str( + r#" + [project] + name = "project" + version = "0.1.0" + description = "Add your description here" + requires-python = ">=3.12" + + [project.optional-dependencies] + project1 = ["sortedcontainers==2.3.0"] + project2 = ["sortedcontainers==2.4.0"] + + [build-system] + requires = ["setuptools>=42"] + build-backend = "setuptools.build_meta" + "#, + )?; + + uv_snapshot!(context.filters(), context.lock(), @r###" + success: false + exit_code: 1 + ----- stdout ----- + + ----- stderr ----- + × No solution found when resolving dependencies: + ╰─▶ Because project[project2] depends on sortedcontainers==2.4.0 and project[project1] depends on sortedcontainers==2.3.0, we can conclude that project[project1] and project[project2] are incompatible. + And because your project requires project[project1] and project[project2], we can conclude that your projects's requirements are unsatisfiable. + "###); + + // And now with the same extra configuration, we tell uv about + // the conflicting extras, which forces it to resolve each in + // their own fork. + let pyproject_toml = context.temp_dir.child("pyproject.toml"); + pyproject_toml.write_str( + r#" + [project] + name = "project" + version = "0.1.0" + description = "Add your description here" + requires-python = ">=3.12" + + [tool.uv] + conflicting-groups = [ + [ + { extra = "project1" }, + { extra = "project2" }, + ], + ] + + [project.optional-dependencies] + project1 = ["sortedcontainers==2.3.0"] + project2 = ["sortedcontainers==2.4.0"] + + [build-system] + requires = ["setuptools>=42"] + build-backend = "setuptools.build_meta" + "#, + )?; + + uv_snapshot!(context.filters(), context.lock(), @r###" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Resolved 3 packages in [TIME] + "###); + + let lock = context.read("uv.lock"); + + insta::with_settings!({ + filters => context.filters(), + }, { + assert_snapshot!( + lock, @r###" + version = 1 + requires-python = ">=3.12" + resolution-markers = [ + ] + conflicting-groups = [[ + { package = "project", extra = "project1" }, + { package = "project", extra = "project2" }, + ]] + + [options] + exclude-newer = "2024-03-25T00:00:00Z" + + [[package]] + name = "project" + version = "0.1.0" + source = { editable = "." } + + [package.optional-dependencies] + project1 = [ + { name = "sortedcontainers", version = "2.3.0", source = { registry = "https://pypi.org/simple" } }, + ] + project2 = [ + { name = "sortedcontainers", version = "2.4.0", source = { registry = "https://pypi.org/simple" } }, + ] + + [package.metadata] + requires-dist = [ + { name = "sortedcontainers", marker = "extra == 'project1'", specifier = "==2.3.0" }, + { name = "sortedcontainers", marker = "extra == 'project2'", specifier = "==2.4.0" }, + ] + + [[package]] + name = "sortedcontainers" + version = "2.3.0" + source = { registry = "https://pypi.org/simple" } + resolution-markers = [ + ] + sdist = { url = "https://files.pythonhosted.org/packages/14/10/6a9481890bae97da9edd6e737c9c3dec6aea3fc2fa53b0934037b35c89ea/sortedcontainers-2.3.0.tar.gz", hash = "sha256:59cc937650cf60d677c16775597c89a960658a09cf7c1a668f86e1e4464b10a1", size = 30509 } + wheels = [ + { url = "https://files.pythonhosted.org/packages/20/4d/a7046ae1a1a4cc4e9bbed194c387086f06b25038be596543d026946330c9/sortedcontainers-2.3.0-py2.py3-none-any.whl", hash = "sha256:37257a32add0a3ee490bb170b599e93095eed89a55da91fa9f48753ea12fd73f", size = 29479 }, + ] + + [[package]] + name = "sortedcontainers" + version = "2.4.0" + source = { registry = "https://pypi.org/simple" } + resolution-markers = [ + ] + sdist = { url = "https://files.pythonhosted.org/packages/e8/c4/ba2f8066cceb6f23394729afe52f3bf7adec04bf9ed2c820b39e19299111/sortedcontainers-2.4.0.tar.gz", hash = "sha256:25caa5a06cc30b6b83d11423433f65d1f9d76c4c6a0c90e3379eaa43b9bfdb88", size = 30594 } + wheels = [ + { url = "https://files.pythonhosted.org/packages/32/46/9cb0e58b2deb7f82b84065f37f3bffeb12413f947f9388e4cac22c4621ce/sortedcontainers-2.4.0-py2.py3-none-any.whl", hash = "sha256:a163dcaede0f1c021485e957a39245190e74249897e2ae4b2aa38595db237ee0", size = 29575 }, + ] + "### + ); + }); + + // Re-run with `--locked`. + uv_snapshot!(context.filters(), context.lock().arg("--locked"), @r###" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Resolved 3 packages in [TIME] + "###); + + // Install from the lockfile. + uv_snapshot!(context.filters(), context.sync().arg("--frozen"), @r###" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Prepared 1 package in [TIME] + Installed 1 package in [TIME] + + project==0.1.0 (from file://[TEMP_DIR]/) + "###); + // Another install, but with one of the extras enabled. + uv_snapshot!(context.filters(), context.sync().arg("--frozen").arg("--extra=project1"), @r###" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Prepared 1 package in [TIME] + Installed 1 package in [TIME] + + sortedcontainers==2.3.0 + "###); + // Another install, but with the other extra enabled. + uv_snapshot!(context.filters(), context.sync().arg("--frozen").arg("--extra=project2"), @r###" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Prepared 1 package in [TIME] + Uninstalled 1 package in [TIME] + Installed 1 package in [TIME] + - sortedcontainers==2.3.0 + + sortedcontainers==2.4.0 + "###); + // And finally, installing both extras should error. + uv_snapshot!(context.filters(), context.sync().arg("--frozen").arg("--all-extras"), @r###" + success: false + exit_code: 2 + ----- stdout ----- + + ----- stderr ----- + error: The requested extras (`project1`, `project2`) are incompatible with the declared conflicting extra: {`project[project1]`, `project[project2]`} + "###); + + Ok(()) +} + +/// Like `lock_conflicting_extra_basic`, but defines three conflicting +/// extras instead of two. +#[test] +fn lock_conflicting_extra_basic_three_extras() -> Result<()> { + let context = TestContext::new("3.12"); + + // First we test that resolving with two extras that have + // conflicting dependencies fails. + let pyproject_toml = context.temp_dir.child("pyproject.toml"); + pyproject_toml.write_str( + r#" + [project] + name = "project" + version = "0.1.0" + description = "Add your description here" + requires-python = ">=3.12" + + [project.optional-dependencies] + project1 = ["sortedcontainers==2.2.0"] + project2 = ["sortedcontainers==2.3.0"] + project3 = ["sortedcontainers==2.4.0"] + + [build-system] + requires = ["setuptools>=42"] + build-backend = "setuptools.build_meta" + "#, + )?; + + uv_snapshot!(context.filters(), context.lock(), @r###" + success: false + exit_code: 1 + ----- stdout ----- + + ----- stderr ----- + × No solution found when resolving dependencies: + ╰─▶ Because project[project3] depends on sortedcontainers==2.4.0 and project[project1] depends on sortedcontainers==2.2.0, we can conclude that project[project1] and project[project3] are incompatible. + And because your project requires project[project1] and project[project3], we can conclude that your projects's requirements are unsatisfiable. + "###); + + // And now with the same extra configuration, we tell uv about + // the conflicting extras, which forces it to resolve each in + // their own fork. + let pyproject_toml = context.temp_dir.child("pyproject.toml"); + pyproject_toml.write_str( + r#" + [project] + name = "project" + version = "0.1.0" + description = "Add your description here" + requires-python = ">=3.12" + + [tool.uv] + conflicting-groups = [ + [ + { extra = "project1" }, + { extra = "project2" }, + { extra = "project3" }, + ], + ] + + [project.optional-dependencies] + project1 = ["sortedcontainers==2.2.0"] + project2 = ["sortedcontainers==2.3.0"] + project3 = ["sortedcontainers==2.4.0"] + + [build-system] + requires = ["setuptools>=42"] + build-backend = "setuptools.build_meta" + "#, + )?; + + uv_snapshot!(context.filters(), context.lock(), @r###" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Resolved 4 packages in [TIME] + "###); + + let lock = context.read("uv.lock"); + + insta::with_settings!({ + filters => context.filters(), + }, { + assert_snapshot!( + lock, @r###" + version = 1 + requires-python = ">=3.12" + resolution-markers = [ + ] + conflicting-groups = [[ + { package = "project", extra = "project1" }, + { package = "project", extra = "project2" }, + { package = "project", extra = "project3" }, + ]] + + [options] + exclude-newer = "2024-03-25T00:00:00Z" + + [[package]] + name = "project" + version = "0.1.0" + source = { editable = "." } + + [package.optional-dependencies] + project1 = [ + { name = "sortedcontainers", version = "2.2.0", source = { registry = "https://pypi.org/simple" } }, + ] + project2 = [ + { name = "sortedcontainers", version = "2.3.0", source = { registry = "https://pypi.org/simple" } }, + ] + project3 = [ + { name = "sortedcontainers", version = "2.4.0", source = { registry = "https://pypi.org/simple" } }, + ] + + [package.metadata] + requires-dist = [ + { name = "sortedcontainers", marker = "extra == 'project1'", specifier = "==2.2.0" }, + { name = "sortedcontainers", marker = "extra == 'project2'", specifier = "==2.3.0" }, + { name = "sortedcontainers", marker = "extra == 'project3'", specifier = "==2.4.0" }, + ] + + [[package]] + name = "sortedcontainers" + version = "2.2.0" + source = { registry = "https://pypi.org/simple" } + resolution-markers = [ + ] + sdist = { url = "https://files.pythonhosted.org/packages/83/c9/466c0f9b42a0563366bb7c39906d9c6673315f81516f55e3a23a99f52234/sortedcontainers-2.2.0.tar.gz", hash = "sha256:331f5b7acb6bdfaf0b0646f5f86c087e414c9ae9d85e2076ad2eacb17ec2f4ff", size = 30402 } + wheels = [ + { url = "https://files.pythonhosted.org/packages/0c/75/4f79725a6ad966f1985d96c5aeda0b27d00c23afa14e8566efcdee1380ad/sortedcontainers-2.2.0-py2.py3-none-any.whl", hash = "sha256:f0694fbe8d090fab0fbabbfecad04756fbbb35dc3c0f89e0f6965396fe815d25", size = 29386 }, + ] + + [[package]] + name = "sortedcontainers" + version = "2.3.0" + source = { registry = "https://pypi.org/simple" } + resolution-markers = [ + ] + sdist = { url = "https://files.pythonhosted.org/packages/14/10/6a9481890bae97da9edd6e737c9c3dec6aea3fc2fa53b0934037b35c89ea/sortedcontainers-2.3.0.tar.gz", hash = "sha256:59cc937650cf60d677c16775597c89a960658a09cf7c1a668f86e1e4464b10a1", size = 30509 } + wheels = [ + { url = "https://files.pythonhosted.org/packages/20/4d/a7046ae1a1a4cc4e9bbed194c387086f06b25038be596543d026946330c9/sortedcontainers-2.3.0-py2.py3-none-any.whl", hash = "sha256:37257a32add0a3ee490bb170b599e93095eed89a55da91fa9f48753ea12fd73f", size = 29479 }, + ] + + [[package]] + name = "sortedcontainers" + version = "2.4.0" + source = { registry = "https://pypi.org/simple" } + resolution-markers = [ + ] + sdist = { url = "https://files.pythonhosted.org/packages/e8/c4/ba2f8066cceb6f23394729afe52f3bf7adec04bf9ed2c820b39e19299111/sortedcontainers-2.4.0.tar.gz", hash = "sha256:25caa5a06cc30b6b83d11423433f65d1f9d76c4c6a0c90e3379eaa43b9bfdb88", size = 30594 } + wheels = [ + { url = "https://files.pythonhosted.org/packages/32/46/9cb0e58b2deb7f82b84065f37f3bffeb12413f947f9388e4cac22c4621ce/sortedcontainers-2.4.0-py2.py3-none-any.whl", hash = "sha256:a163dcaede0f1c021485e957a39245190e74249897e2ae4b2aa38595db237ee0", size = 29575 }, + ] + "### + ); + }); + + Ok(()) +} + +/// This tests that extras don't conflict with one another when they are in +/// distinct groups of extras. +#[test] +fn lock_conflicting_extra_multiple_not_conflicting1() -> Result<()> { + let context = TestContext::new("3.12"); + + let pyproject_toml = context.temp_dir.child("pyproject.toml"); + pyproject_toml.write_str( + r#" + [project] + name = "project" + version = "0.1.0" + description = "Add your description here" + requires-python = ">=3.12" + + [tool.uv] + conflicting-groups = [ + [ + { extra = "project1" }, + { extra = "project2" }, + ], + [ + { extra = "project3" }, + { extra = "project4" }, + ], + ] + + [project.optional-dependencies] + project1 = [] + project2 = [] + project3 = [] + project4 = [] + + [build-system] + requires = ["setuptools>=42"] + build-backend = "setuptools.build_meta" + "#, + )?; + + uv_snapshot!(context.filters(), context.lock(), @r###" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Resolved 1 package in [TIME] + "###); + + // Install from the lockfile. + uv_snapshot!(context.filters(), context.sync().arg("--frozen"), @r###" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Prepared 1 package in [TIME] + Installed 1 package in [TIME] + + project==0.1.0 (from file://[TEMP_DIR]/) + "###); + // project1/project2 conflict! + uv_snapshot!( + context.filters(), + context.sync().arg("--frozen").arg("--extra=project1").arg("--extra=project2"), + @r###" + success: false + exit_code: 2 + ----- stdout ----- + + ----- stderr ----- + error: The requested extras (`project1`, `project2`) are incompatible with the declared conflicting extra: {`project[project1]`, `project[project2]`} + "###); + // project3/project4 conflict! + uv_snapshot!( + context.filters(), + context.sync().arg("--frozen").arg("--extra=project3").arg("--extra=project4"), + @r###" + success: false + exit_code: 2 + ----- stdout ----- + + ----- stderr ----- + error: The requested extras (`project3`, `project4`) are incompatible with the declared conflicting extra: {`project[project3]`, `project[project4]`} + "###); + // ... but project1/project3 does not. + uv_snapshot!( + context.filters(), + context.sync().arg("--frozen").arg("--extra=project1").arg("--extra=project3"), + @r###" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Audited 1 package in [TIME] + "###); + // ... and neither does project2/project3. + uv_snapshot!( + context.filters(), + context.sync().arg("--frozen").arg("--extra=project2").arg("--extra=project3"), + @r###" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Audited 1 package in [TIME] + "###); + // And similarly, with project 4. + uv_snapshot!( + context.filters(), + context.sync().arg("--frozen").arg("--extra=project1").arg("--extra=project4"), + @r###" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Audited 1 package in [TIME] + "###); + // ... and neither does project2/project3. + uv_snapshot!( + context.filters(), + context.sync().arg("--frozen").arg("--extra=project2").arg("--extra=project4"), + @r###" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Audited 1 package in [TIME] + "###); + + Ok(()) +} + +/// This tests that if the user has conflicting extras, but puts them in two +/// distinct groups of extras, then resolution still fails. (Because the only +/// way to resolve them in different forks is to define the extras as directly +/// conflicting.) +#[test] +fn lock_conflicting_extra_multiple_not_conflicting2() -> Result<()> { + let context = TestContext::new("3.12"); + + let pyproject_toml = context.temp_dir.child("pyproject.toml"); + pyproject_toml.write_str( + r#" + [project] + name = "project" + version = "0.1.0" + description = "Add your description here" + requires-python = ">=3.12" + + [project.optional-dependencies] + project1 = ["sortedcontainers==2.3.0"] + project2 = ["sortedcontainers==2.4.0"] + project3 = ["sortedcontainers==2.3.0"] + project4 = ["sortedcontainers==2.4.0"] + + [build-system] + requires = ["setuptools>=42"] + build-backend = "setuptools.build_meta" + "#, + )?; + + // Fails, as expected. + uv_snapshot!(context.filters(), context.lock(), @r###" + success: false + exit_code: 1 + ----- stdout ----- + + ----- stderr ----- + × No solution found when resolving dependencies: + ╰─▶ Because project[project4] depends on sortedcontainers==2.4.0 and project[project1] depends on sortedcontainers==2.3.0, we can conclude that project[project1] and project[project4] are incompatible. + And because your project requires project[project1] and project[project4], we can conclude that your projects's requirements are unsatisfiable. + "###); + + // If we define project1/project2 as conflicting and project3/project4 + // as conflicting, that still isn't enough! That's because project1 + // conflicts with project4 and project2 conflicts with project3. + pyproject_toml.write_str( + r#" + [project] + name = "project" + version = "0.1.0" + description = "Add your description here" + requires-python = ">=3.12" + + [tool.uv] + conflicting-groups = [ + [ + { extra = "project1" }, + { extra = "project2" }, + ], + [ + { extra = "project3" }, + { extra = "project4" }, + ], + ] + + [project.optional-dependencies] + project1 = ["sortedcontainers==2.3.0"] + project2 = ["sortedcontainers==2.4.0"] + project3 = ["sortedcontainers==2.3.0"] + project4 = ["sortedcontainers==2.4.0"] + + [build-system] + requires = ["setuptools>=42"] + build-backend = "setuptools.build_meta" + "#, + )?; + uv_snapshot!(context.filters(), context.lock(), @r###" + success: false + exit_code: 1 + ----- stdout ----- + + ----- stderr ----- + × No solution found when resolving dependencies: + ╰─▶ Because project[project3] depends on sortedcontainers==2.3.0 and project[project2] depends on sortedcontainers==2.4.0, we can conclude that project[project2] and project[project3] are incompatible. + And because your project requires project[project2] and project[project3], we can conclude that your projects's requirements are unsatisfiable. + "###); + + // One could try to declare all pairs of conflicting extras as + // conflicting, but this doesn't quite work either. For example, + // the first group of conflicting extra, project1/project2, + // specifically allows project4 to be co-mingled with project1 (and + // similarly, project3 with project2), which are conflicting. + pyproject_toml.write_str( + r#" + [project] + name = "project" + version = "0.1.0" + description = "Add your description here" + requires-python = ">=3.12" + + [tool.uv] + conflicting-groups = [ + [ + { extra = "project1" }, + { extra = "project2" }, + ], + [ + { extra = "project3" }, + { extra = "project4" }, + ], + [ + { extra = "project1" }, + { extra = "project4" }, + ], + [ + { extra = "project2" }, + { extra = "project3" }, + ], + ] + + [project.optional-dependencies] + project1 = ["sortedcontainers==2.3.0"] + project2 = ["sortedcontainers==2.4.0"] + project3 = ["sortedcontainers==2.3.0"] + project4 = ["sortedcontainers==2.4.0"] + + [build-system] + requires = ["setuptools>=42"] + build-backend = "setuptools.build_meta" + "#, + )?; + uv_snapshot!(context.filters(), context.lock(), @r###" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Resolved 3 packages in [TIME] + "###); + + // We can also fix this by just putting them all in one big + // group, even though project1/project3 don't conflict and + // project2/project4 don't conflict. + pyproject_toml.write_str( + r#" + [project] + name = "project" + version = "0.1.0" + description = "Add your description here" + requires-python = ">=3.12" + + [tool.uv] + conflicting-groups = [ + [ + { extra = "project1" }, + { extra = "project2" }, + { extra = "project3" }, + { extra = "project4" }, + ], + ] + + [project.optional-dependencies] + project1 = ["sortedcontainers==2.3.0"] + project2 = ["sortedcontainers==2.4.0"] + project3 = ["sortedcontainers==2.3.0"] + project4 = ["sortedcontainers==2.4.0"] + + [build-system] + requires = ["setuptools>=42"] + build-backend = "setuptools.build_meta" + "#, + )?; + uv_snapshot!(context.filters(), context.lock(), @r###" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Resolved 3 packages in [TIME] + "###); + + Ok(()) +} + +/// This tests that we handle two independent sets of conflicting +/// extras correctly. +#[test] +fn lock_conflicting_extra_multiple_independent() -> Result<()> { + let context = TestContext::new("3.12"); + + // If we don't declare any conflicting extras, then resolution + // will of course fail. + let pyproject_toml = context.temp_dir.child("pyproject.toml"); + pyproject_toml.write_str( + r#" + [project] + name = "project" + version = "0.1.0" + description = "Add your description here" + requires-python = ">=3.12" + + [project.optional-dependencies] + project1 = ["sortedcontainers==2.3.0"] + project2 = ["sortedcontainers==2.4.0"] + project3 = ["anyio==4.1.0"] + project4 = ["anyio==4.2.0"] + + [build-system] + requires = ["setuptools>=42"] + build-backend = "setuptools.build_meta" + "#, + )?; + uv_snapshot!(context.filters(), context.lock(), @r###" + success: false + exit_code: 1 + ----- stdout ----- + + ----- stderr ----- + × No solution found when resolving dependencies: + ╰─▶ Because project[project4] depends on anyio==4.2.0 and project[project3] depends on anyio==4.1.0, we can conclude that project[project3] and project[project4] are incompatible. + And because your project requires project[project3] and project[project4], we can conclude that your projects's requirements are unsatisfiable. + "###); + + // OK, responding to the error, we declare our anyio extras + // as conflicting. But now we should see sortedcontainers as + // conflicting. + pyproject_toml.write_str( + r#" + [project] + name = "project" + version = "0.1.0" + description = "Add your description here" + requires-python = ">=3.12" + + [tool.uv] + conflicting-groups = [ + [ + { extra = "project3" }, + { extra = "project4" }, + ], + ] + + [project.optional-dependencies] + project1 = ["sortedcontainers==2.3.0"] + project2 = ["sortedcontainers==2.4.0"] + project3 = ["anyio==4.1.0"] + project4 = ["anyio==4.2.0"] + + [build-system] + requires = ["setuptools>=42"] + build-backend = "setuptools.build_meta" + "#, + )?; + uv_snapshot!(context.filters(), context.lock(), @r###" + success: false + exit_code: 1 + ----- stdout ----- + + ----- stderr ----- + × No solution found when resolving dependencies: + ╰─▶ Because project[project2] depends on sortedcontainers==2.4.0 and project[project1] depends on sortedcontainers==2.3.0, we can conclude that project[project1] and project[project2] are incompatible. + And because your project requires project[project1] and project[project2], we can conclude that your projects's requirements are unsatisfiable. + "###); + + // Once we declare ALL our conflicting extras, resolution succeeds. + let pyproject_toml = context.temp_dir.child("pyproject.toml"); + pyproject_toml.write_str( + r#" + [project] + name = "project" + version = "0.1.0" + description = "Add your description here" + requires-python = ">=3.12" + + [tool.uv] + conflicting-groups = [ + [ + { extra = "project1" }, + { extra = "project2" }, + ], + [ + { extra = "project3" }, + { extra = "project4" }, + ], + ] + + [project.optional-dependencies] + project1 = ["sortedcontainers==2.3.0"] + project2 = ["sortedcontainers==2.4.0"] + project3 = ["anyio==4.1.0"] + project4 = ["anyio==4.2.0"] + + [build-system] + requires = ["setuptools>=42"] + build-backend = "setuptools.build_meta" + "#, + )?; + + uv_snapshot!(context.filters(), context.lock(), @r###" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Resolved 7 packages in [TIME] + "###); + + let lock = context.read("uv.lock"); + + insta::with_settings!({ + filters => context.filters(), + }, { + assert_snapshot!( + lock, @r###" + version = 1 + requires-python = ">=3.12" + resolution-markers = [ + ] + conflicting-groups = [[ + { package = "project", extra = "project1" }, + { package = "project", extra = "project2" }, + ], [ + { package = "project", extra = "project3" }, + { package = "project", extra = "project4" }, + ]] + + [options] + exclude-newer = "2024-03-25T00:00:00Z" + + [[package]] + name = "anyio" + version = "4.1.0" + source = { registry = "https://pypi.org/simple" } + resolution-markers = [ + ] + dependencies = [ + { name = "idna" }, + { name = "sniffio" }, + ] + sdist = { url = "https://files.pythonhosted.org/packages/6e/57/075e07fb01ae2b740289ec9daec670f60c06f62d04b23a68077fd5d73fab/anyio-4.1.0.tar.gz", hash = "sha256:5a0bec7085176715be77df87fc66d6c9d70626bd752fcc85f57cdbee5b3760da", size = 155773 } + wheels = [ + { url = "https://files.pythonhosted.org/packages/85/4f/d010eca6914703d8e6be222165d02c3e708ed909cdb2b7af3743667f302e/anyio-4.1.0-py3-none-any.whl", hash = "sha256:56a415fbc462291813a94528a779597226619c8e78af7de0507333f700011e5f", size = 83924 }, + ] + + [[package]] + name = "anyio" + version = "4.2.0" + source = { registry = "https://pypi.org/simple" } + resolution-markers = [ + ] + dependencies = [ + { name = "idna" }, + { name = "sniffio" }, + ] + sdist = { url = "https://files.pythonhosted.org/packages/2d/b8/7333d87d5f03247215d86a86362fd3e324111788c6cdd8d2e6196a6ba833/anyio-4.2.0.tar.gz", hash = "sha256:e1875bb4b4e2de1669f4bc7869b6d3f54231cdced71605e6e64c9be77e3be50f", size = 158770 } + wheels = [ + { url = "https://files.pythonhosted.org/packages/bf/cd/d6d9bb1dadf73e7af02d18225cbd2c93f8552e13130484f1c8dcfece292b/anyio-4.2.0-py3-none-any.whl", hash = "sha256:745843b39e829e108e518c489b31dc757de7d2131d53fac32bd8df268227bfee", size = 85481 }, + ] + + [[package]] + name = "idna" + version = "3.6" + source = { registry = "https://pypi.org/simple" } + sdist = { url = "https://files.pythonhosted.org/packages/bf/3f/ea4b9117521a1e9c50344b909be7886dd00a519552724809bb1f486986c2/idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca", size = 175426 } + wheels = [ + { url = "https://files.pythonhosted.org/packages/c2/e7/a82b05cf63a603df6e68d59ae6a68bf5064484a0718ea5033660af4b54a9/idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f", size = 61567 }, + ] + + [[package]] + name = "project" + version = "0.1.0" + source = { editable = "." } + + [package.optional-dependencies] + project1 = [ + { name = "sortedcontainers", version = "2.3.0", source = { registry = "https://pypi.org/simple" } }, + ] + project2 = [ + { name = "sortedcontainers", version = "2.4.0", source = { registry = "https://pypi.org/simple" } }, + ] + project3 = [ + { name = "anyio", version = "4.1.0", source = { registry = "https://pypi.org/simple" } }, + ] + project4 = [ + { name = "anyio", version = "4.2.0", source = { registry = "https://pypi.org/simple" } }, + ] + + [package.metadata] + requires-dist = [ + { name = "anyio", marker = "extra == 'project3'", specifier = "==4.1.0" }, + { name = "anyio", marker = "extra == 'project4'", specifier = "==4.2.0" }, + { name = "sortedcontainers", marker = "extra == 'project1'", specifier = "==2.3.0" }, + { name = "sortedcontainers", marker = "extra == 'project2'", specifier = "==2.4.0" }, + ] + + [[package]] + name = "sniffio" + version = "1.3.1" + source = { registry = "https://pypi.org/simple" } + sdist = { url = "https://files.pythonhosted.org/packages/a2/87/a6771e1546d97e7e041b6ae58d80074f81b7d5121207425c964ddf5cfdbd/sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc", size = 20372 } + wheels = [ + { url = "https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2", size = 10235 }, + ] + + [[package]] + name = "sortedcontainers" + version = "2.3.0" + source = { registry = "https://pypi.org/simple" } + resolution-markers = [ + ] + sdist = { url = "https://files.pythonhosted.org/packages/14/10/6a9481890bae97da9edd6e737c9c3dec6aea3fc2fa53b0934037b35c89ea/sortedcontainers-2.3.0.tar.gz", hash = "sha256:59cc937650cf60d677c16775597c89a960658a09cf7c1a668f86e1e4464b10a1", size = 30509 } + wheels = [ + { url = "https://files.pythonhosted.org/packages/20/4d/a7046ae1a1a4cc4e9bbed194c387086f06b25038be596543d026946330c9/sortedcontainers-2.3.0-py2.py3-none-any.whl", hash = "sha256:37257a32add0a3ee490bb170b599e93095eed89a55da91fa9f48753ea12fd73f", size = 29479 }, + ] + + [[package]] + name = "sortedcontainers" + version = "2.4.0" + source = { registry = "https://pypi.org/simple" } + resolution-markers = [ + ] + sdist = { url = "https://files.pythonhosted.org/packages/e8/c4/ba2f8066cceb6f23394729afe52f3bf7adec04bf9ed2c820b39e19299111/sortedcontainers-2.4.0.tar.gz", hash = "sha256:25caa5a06cc30b6b83d11423433f65d1f9d76c4c6a0c90e3379eaa43b9bfdb88", size = 30594 } + wheels = [ + { url = "https://files.pythonhosted.org/packages/32/46/9cb0e58b2deb7f82b84065f37f3bffeb12413f947f9388e4cac22c4621ce/sortedcontainers-2.4.0-py2.py3-none-any.whl", hash = "sha256:a163dcaede0f1c021485e957a39245190e74249897e2ae4b2aa38595db237ee0", size = 29575 }, + ] + "### + ); + }); + + Ok(()) +} + +#[test] +fn lock_conflicting_extra_config_change_ignore_lockfile() -> Result<()> { + let context = TestContext::new("3.12"); + + let pyproject_toml = context.temp_dir.child("pyproject.toml"); + pyproject_toml.write_str( + r#" + [project] + name = "project" + version = "0.1.0" + description = "Add your description here" + requires-python = ">=3.12" + + [tool.uv] + conflicting-groups = [ + [ + { extra = "project1" }, + { extra = "project2" }, + ], + ] + + [project.optional-dependencies] + project1 = ["sortedcontainers==2.3.0"] + project2 = ["sortedcontainers==2.4.0"] + + [build-system] + requires = ["setuptools>=42"] + build-backend = "setuptools.build_meta" + "#, + )?; + uv_snapshot!(context.filters(), context.lock(), @r###" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Resolved 3 packages in [TIME] + "###); + + let lock = context.read("uv.lock"); + insta::with_settings!({ + filters => context.filters(), + }, { + assert_snapshot!( + lock, @r###" + version = 1 + requires-python = ">=3.12" + resolution-markers = [ + ] + conflicting-groups = [[ + { package = "project", extra = "project1" }, + { package = "project", extra = "project2" }, + ]] + + [options] + exclude-newer = "2024-03-25T00:00:00Z" + + [[package]] + name = "project" + version = "0.1.0" + source = { editable = "." } + + [package.optional-dependencies] + project1 = [ + { name = "sortedcontainers", version = "2.3.0", source = { registry = "https://pypi.org/simple" } }, + ] + project2 = [ + { name = "sortedcontainers", version = "2.4.0", source = { registry = "https://pypi.org/simple" } }, + ] + + [package.metadata] + requires-dist = [ + { name = "sortedcontainers", marker = "extra == 'project1'", specifier = "==2.3.0" }, + { name = "sortedcontainers", marker = "extra == 'project2'", specifier = "==2.4.0" }, + ] + + [[package]] + name = "sortedcontainers" + version = "2.3.0" + source = { registry = "https://pypi.org/simple" } + resolution-markers = [ + ] + sdist = { url = "https://files.pythonhosted.org/packages/14/10/6a9481890bae97da9edd6e737c9c3dec6aea3fc2fa53b0934037b35c89ea/sortedcontainers-2.3.0.tar.gz", hash = "sha256:59cc937650cf60d677c16775597c89a960658a09cf7c1a668f86e1e4464b10a1", size = 30509 } + wheels = [ + { url = "https://files.pythonhosted.org/packages/20/4d/a7046ae1a1a4cc4e9bbed194c387086f06b25038be596543d026946330c9/sortedcontainers-2.3.0-py2.py3-none-any.whl", hash = "sha256:37257a32add0a3ee490bb170b599e93095eed89a55da91fa9f48753ea12fd73f", size = 29479 }, + ] + + [[package]] + name = "sortedcontainers" + version = "2.4.0" + source = { registry = "https://pypi.org/simple" } + resolution-markers = [ + ] + sdist = { url = "https://files.pythonhosted.org/packages/e8/c4/ba2f8066cceb6f23394729afe52f3bf7adec04bf9ed2c820b39e19299111/sortedcontainers-2.4.0.tar.gz", hash = "sha256:25caa5a06cc30b6b83d11423433f65d1f9d76c4c6a0c90e3379eaa43b9bfdb88", size = 30594 } + wheels = [ + { url = "https://files.pythonhosted.org/packages/32/46/9cb0e58b2deb7f82b84065f37f3bffeb12413f947f9388e4cac22c4621ce/sortedcontainers-2.4.0-py2.py3-none-any.whl", hash = "sha256:a163dcaede0f1c021485e957a39245190e74249897e2ae4b2aa38595db237ee0", size = 29575 }, + ] + "### + ); + }); + + // Re-run with `--locked` to check it's okay. + uv_snapshot!(context.filters(), context.lock().arg("--locked"), @r###" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Resolved 3 packages in [TIME] + "###); + + // Now get rid of the conflicting group config, and check that `--locked` + // fails. + pyproject_toml.write_str( + r#" + [project] + name = "project" + version = "0.1.0" + description = "Add your description here" + requires-python = ">=3.12" + + [project.optional-dependencies] + project1 = ["sortedcontainers==2.3.0"] + project2 = ["sortedcontainers==2.4.0"] + + [build-system] + requires = ["setuptools>=42"] + build-backend = "setuptools.build_meta" + "#, + )?; + // Re-run with `--locked`, which should now fail because of + // the conflicting group config removal. + uv_snapshot!(context.filters(), context.lock().arg("--locked"), @r###" + success: false + exit_code: 1 + ----- stdout ----- + + ----- stderr ----- + × No solution found when resolving dependencies: + ╰─▶ Because project[project2] depends on sortedcontainers==2.4.0 and project[project1] depends on sortedcontainers==2.3.0, we can conclude that project[project1] and project[project2] are incompatible. + And because your project requires project[project1] and project[project2], we can conclude that your projects's requirements are unsatisfiable. + "###); + + Ok(()) +} + +/// This tests that we report an error when a requirement unconditionally +/// enables a conflicting extra. +#[test] +fn lock_conflicting_extra_unconditional() -> Result<()> { + let context = TestContext::new("3.12"); + + let root_pyproject_toml = context.temp_dir.child("pyproject.toml"); + root_pyproject_toml.write_str( + r#" + [project] + name = "dummy" + version = "0.1.0" + requires-python = "==3.12.*" + dependencies = [ + "proxy1[project1,project2]" + ] + + [tool.uv.workspace] + members = ["proxy1"] + + [tool.uv.sources] + proxy1 = { workspace = true } + + [build-system] + requires = ["hatchling"] + build-backend = "hatchling.build" + "#, + )?; + + let proxy1_pyproject_toml = context.temp_dir.child("proxy1").child("pyproject.toml"); + proxy1_pyproject_toml.write_str( + r#" + [project] + name = "proxy1" + version = "0.1.0" + requires-python = "==3.12.*" + dependencies = [] + + [project.optional-dependencies] + project1 = ["anyio==4.1.0"] + project2 = ["anyio==4.2.0"] + + [tool.uv] + conflicting-groups = [ + [ + { extra = "project1" }, + { extra = "project2" }, + ], + ] + "#, + )?; + + uv_snapshot!(context.filters(), context.lock(), @r###" + success: false + exit_code: 2 + ----- stdout ----- + + ----- stderr ----- + error: Found conflicting extra `project1` unconditionally enabled in `proxy1[project1,project2] @ file://[TEMP_DIR]/proxy1` + "###); + + // An error should occur even when only one conflicting extra is enabled. + root_pyproject_toml.write_str( + r#" + [project] + name = "dummy" + version = "0.1.0" + requires-python = "==3.12.*" + dependencies = [ + "proxy1[project1]" + ] + + [tool.uv.workspace] + members = ["proxy1"] + + [tool.uv.sources] + proxy1 = { workspace = true } + + [build-system] + requires = ["hatchling"] + build-backend = "hatchling.build" + "#, + )?; + uv_snapshot!(context.filters(), context.lock(), @r###" + success: false + exit_code: 2 + ----- stdout ----- + + ----- stderr ----- + error: Found conflicting extra `project1` unconditionally enabled in `proxy1[project1] @ file://[TEMP_DIR]/proxy1` + "###); + + // And same thing for the other extra. + root_pyproject_toml.write_str( + r#" + [project] + name = "dummy" + version = "0.1.0" + requires-python = "==3.12.*" + dependencies = [ + "proxy1[project2]" + ] + + [tool.uv.workspace] + members = ["proxy1"] + + [tool.uv.sources] + proxy1 = { workspace = true } + + [build-system] + requires = ["hatchling"] + build-backend = "hatchling.build" + + "#, + )?; + uv_snapshot!(context.filters(), context.lock(), @r###" + success: false + exit_code: 2 + ----- stdout ----- + + ----- stderr ----- + error: Found conflicting extra `project2` unconditionally enabled in `proxy1[project2] @ file://[TEMP_DIR]/proxy1` + "###); + + Ok(()) +} + /// Show updated dependencies on `lock --upgrade`. #[test] fn lock_upgrade_log() -> Result<()> { diff --git a/crates/uv/tests/it/pip_install.rs b/crates/uv/tests/it/pip_install.rs index 92cd41356..17f3a7046 100644 --- a/crates/uv/tests/it/pip_install.rs +++ b/crates/uv/tests/it/pip_install.rs @@ -191,7 +191,7 @@ fn invalid_pyproject_toml_option_unknown_field() -> Result<()> { | 2 | unknown = "field" | ^^^^^^^ - unknown field `unknown`, expected one of `native-tls`, `offline`, `no-cache`, `cache-dir`, `preview`, `python-preference`, `python-downloads`, `concurrent-downloads`, `concurrent-builds`, `concurrent-installs`, `index`, `index-url`, `extra-index-url`, `no-index`, `find-links`, `index-strategy`, `keyring-provider`, `allow-insecure-host`, `resolution`, `prerelease`, `dependency-metadata`, `config-settings`, `no-build-isolation`, `no-build-isolation-package`, `exclude-newer`, `link-mode`, `compile-bytecode`, `no-sources`, `upgrade`, `upgrade-package`, `reinstall`, `reinstall-package`, `no-build`, `no-build-package`, `no-binary`, `no-binary-package`, `publish-url`, `trusted-publishing`, `pip`, `cache-keys`, `override-dependencies`, `constraint-dependencies`, `environments`, `workspace`, `sources`, `managed`, `package`, `default-groups`, `dev-dependencies` + unknown field `unknown`, expected one of `native-tls`, `offline`, `no-cache`, `cache-dir`, `preview`, `python-preference`, `python-downloads`, `concurrent-downloads`, `concurrent-builds`, `concurrent-installs`, `index`, `index-url`, `extra-index-url`, `no-index`, `find-links`, `index-strategy`, `keyring-provider`, `allow-insecure-host`, `resolution`, `prerelease`, `dependency-metadata`, `config-settings`, `no-build-isolation`, `no-build-isolation-package`, `exclude-newer`, `link-mode`, `compile-bytecode`, `no-sources`, `upgrade`, `upgrade-package`, `reinstall`, `reinstall-package`, `no-build`, `no-build-package`, `no-binary`, `no-binary-package`, `publish-url`, `trusted-publishing`, `pip`, `cache-keys`, `override-dependencies`, `constraint-dependencies`, `environments`, `conflicting-groups`, `workspace`, `sources`, `managed`, `package`, `default-groups`, `dev-dependencies` Resolved in [TIME] Audited in [TIME] diff --git a/crates/uv/tests/it/show_settings.rs b/crates/uv/tests/it/show_settings.rs index a435be824..65593d5b2 100644 --- a/crates/uv/tests/it/show_settings.rs +++ b/crates/uv/tests/it/show_settings.rs @@ -3031,6 +3031,102 @@ fn resolve_both() -> anyhow::Result<()> { Ok(()) } +/// Tests that errors when parsing `conflicting-groups` are reported. +#[test] +fn invalid_conflicting_groups() -> anyhow::Result<()> { + let context = TestContext::new("3.12"); + let pyproject = context.temp_dir.child("pyproject.toml"); + + // Write in `pyproject.toml` schema and test the singleton case. + pyproject.write_str(indoc::indoc! {r#" + [project] + name = "example" + version = "0.0.0" + requires-python = ">=3.12" + + [tool.uv] + conflicting-groups = [ + [{extra = "dev"}], + ] + "#})?; + + // The file should be rejected for violating the schema. + uv_snapshot!(context.filters(), add_shared_args(context.lock()), @r###" + success: false + exit_code: 2 + ----- stdout ----- + + ----- stderr ----- + error: Failed to parse: `pyproject.toml` + Caused by: TOML parse error at line 7, column 22 + | + 7 | conflicting-groups = [ + | ^ + Each set of conflicting groups must have at least two entries, but found only one + "### + ); + + // Now test the empty case. + pyproject.write_str(indoc::indoc! {r#" + [project] + name = "example" + version = "0.0.0" + requires-python = ">=3.12" + + [tool.uv] + conflicting-groups = [[]] + "#})?; + + // The file should be rejected for violating the schema. + uv_snapshot!(context.filters(), add_shared_args(context.lock()), @r###" + success: false + exit_code: 2 + ----- stdout ----- + + ----- stderr ----- + error: Failed to parse: `pyproject.toml` + Caused by: TOML parse error at line 7, column 22 + | + 7 | conflicting-groups = [[]] + | ^^^^ + Each set of conflicting groups must have at least two entries, but found none + "### + ); + + Ok(()) +} + +/// Tests that valid `conflicting-groups` are parsed okay. +#[test] +fn valid_conflicting_groups() -> anyhow::Result<()> { + let context = TestContext::new("3.12"); + let pyproject = context.temp_dir.child("pyproject.toml"); + + // Write in `pyproject.toml` schema. + pyproject.write_str(indoc::indoc! {r#" + [project] + name = "example" + version = "0.0.0" + requires-python = ">=3.12" + + [tool.uv] + conflicting-groups = [ + [{extra = "x1"}, {extra = "x2"}], + ] + "#})?; + uv_snapshot!(context.filters(), add_shared_args(context.lock()), @r###" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Resolved 1 package in [TIME] + "### + ); + + Ok(()) +} + /// Read from a `--config-file` command line argument. #[test] #[cfg_attr( @@ -3229,7 +3325,7 @@ fn resolve_config_file() -> anyhow::Result<()> { | 1 | [project] | ^^^^^^^ - unknown field `project`, expected one of `native-tls`, `offline`, `no-cache`, `cache-dir`, `preview`, `python-preference`, `python-downloads`, `concurrent-downloads`, `concurrent-builds`, `concurrent-installs`, `index`, `index-url`, `extra-index-url`, `no-index`, `find-links`, `index-strategy`, `keyring-provider`, `allow-insecure-host`, `resolution`, `prerelease`, `dependency-metadata`, `config-settings`, `no-build-isolation`, `no-build-isolation-package`, `exclude-newer`, `link-mode`, `compile-bytecode`, `no-sources`, `upgrade`, `upgrade-package`, `reinstall`, `reinstall-package`, `no-build`, `no-build-package`, `no-binary`, `no-binary-package`, `publish-url`, `trusted-publishing`, `pip`, `cache-keys`, `override-dependencies`, `constraint-dependencies`, `environments`, `workspace`, `sources`, `managed`, `package`, `default-groups`, `dev-dependencies` + unknown field `project`, expected one of `native-tls`, `offline`, `no-cache`, `cache-dir`, `preview`, `python-preference`, `python-downloads`, `concurrent-downloads`, `concurrent-builds`, `concurrent-installs`, `index`, `index-url`, `extra-index-url`, `no-index`, `find-links`, `index-strategy`, `keyring-provider`, `allow-insecure-host`, `resolution`, `prerelease`, `dependency-metadata`, `config-settings`, `no-build-isolation`, `no-build-isolation-package`, `exclude-newer`, `link-mode`, `compile-bytecode`, `no-sources`, `upgrade`, `upgrade-package`, `reinstall`, `reinstall-package`, `no-build`, `no-build-package`, `no-binary`, `no-binary-package`, `publish-url`, `trusted-publishing`, `pip`, `cache-keys`, `override-dependencies`, `constraint-dependencies`, `environments`, `conflicting-groups`, `workspace`, `sources`, `managed`, `package`, `default-groups`, `dev-dependencies` "### );