Add to wheel tag; add basic plumbing

This commit is contained in:
Charlie Marsh 2025-03-16 10:10:13 -04:00 committed by konstin
parent a9ea756d14
commit 2eca43f406
44 changed files with 1194 additions and 61 deletions

3
.gitignore vendored
View file

@ -1,3 +1,6 @@
# Vendored dependencies for the variant prototype.
/vendor/
# Insta snapshots.
*.pending-snap

62
Cargo.lock generated
View file

@ -2077,6 +2077,15 @@ dependencies = [
"windows-sys 0.52.0",
]
[[package]]
name = "keccak"
version = "0.1.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ecc2af9a1119c51f12a14607e783cb977bde58bc069ff0c3da1095e635d70654"
dependencies = [
"cpufeatures",
]
[[package]]
name = "kurbo"
version = "0.8.3"
@ -3623,6 +3632,16 @@ dependencies = [
"digest",
]
[[package]]
name = "sha3"
version = "0.10.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "75872d278a8f37ef87fa0ddbda7802605cb18344497949862c0d4dcb291eba60"
dependencies = [
"digest",
"keccak",
]
[[package]]
name = "sharded-slab"
version = "0.1.7"
@ -4696,6 +4715,7 @@ dependencies = [
"uv-torch",
"uv-trampoline-builder",
"uv-types",
"uv-variants",
"uv-version",
"uv-virtualenv",
"uv-warnings",
@ -5106,6 +5126,7 @@ dependencies = [
"uv-python",
"uv-resolver",
"uv-types",
"uv-variant-frontend",
"uv-version",
"uv-workspace",
]
@ -5173,6 +5194,7 @@ dependencies = [
"uv-pep440",
"uv-platform-tags",
"uv-small-str",
"uv-variants",
]
[[package]]
@ -5210,6 +5232,7 @@ dependencies = [
"uv-pypi-types",
"uv-redacted",
"uv-small-str",
"uv-variants",
"uv-warnings",
"version-ranges",
]
@ -5551,6 +5574,7 @@ dependencies = [
"anyhow",
"hashbrown 0.15.4",
"indexmap",
"indoc",
"insta",
"itertools 0.14.0",
"jiff",
@ -5771,6 +5795,7 @@ dependencies = [
"uv-static",
"uv-torch",
"uv-types",
"uv-variants",
"uv-warnings",
"uv-workspace",
]
@ -5955,6 +5980,43 @@ dependencies = [
"uv-workspace",
]
[[package]]
name = "uv-variant-frontend"
version = "0.1.0"
dependencies = [
"anstream",
"fs-err 3.1.1",
"indoc",
"owo-colors",
"rustc-hash",
"serde_json",
"tempfile",
"thiserror 2.0.12",
"tokio",
"tracing",
"uv-configuration",
"uv-distribution-types",
"uv-fs",
"uv-pypi-types",
"uv-python",
"uv-static",
"uv-types",
"uv-variants",
"uv-virtualenv",
]
[[package]]
name = "uv-variants"
version = "0.1.0"
dependencies = [
"hex",
"itertools 0.14.0",
"rkyv",
"serde",
"sha3",
"thiserror 2.0.12",
]
[[package]]
name = "uv-version"
version = "0.7.18"

View file

@ -67,6 +67,8 @@ uv-tool = { path = "crates/uv-tool" }
uv-torch = { path = "crates/uv-torch" }
uv-trampoline-builder = { path = "crates/uv-trampoline-builder" }
uv-types = { path = "crates/uv-types" }
uv-variants = { path = "crates/uv-variants" }
uv-variant-frontend = { path = "crates/uv-variant-frontend" }
uv-version = { path = "crates/uv-version" }
uv-virtualenv = { path = "crates/uv-virtualenv" }
uv-warnings = { path = "crates/uv-warnings" }
@ -158,6 +160,7 @@ serde = { version = "1.0.210", features = ["derive", "rc"] }
serde-untagged = { version = "0.1.6" }
serde_json = { version = "1.0.128" }
sha2 = { version = "0.10.8" }
sha3 = { version = "0.10.8" }
smallvec = { version = "1.13.2" }
spdx = { version = "0.10.6" }
syn = { version = "2.0.77" }

26
build-wheels.sh Executable file
View file

@ -0,0 +1,26 @@
#!/usr/bin/env bash
# build-wheels.sh
#
# This script builds wheels for the project's dependencies.
#
# It uses uv to build wheels for the following packages:
# - `provider_fictional_hw`: A fictional hardware provider package
# - `variantlib`: A library for handling variant configurations
#
# The wheels are built and placed in the ./wheels directory.
#
# Any existing wheels are removed before building.
set -euxo pipefail
UV=./target/debug/uv
# Create the destination directory if it doesn't exist.
rm -rf wheels
mkdir wheels
# Build the wheels for the fictional hardware provider package.
$UV build --out-dir ./wheels --project ./vendor/provider_fictional_hw
# Build the wheels for the variant library.
$UV build --out-dir ./wheels --project ./vendor/variantlib

View file

@ -209,6 +209,7 @@ mod resolver {
interpreter.markers(),
conflicts,
Some(&TAGS),
None,
&flat_index,
&index,
&hashes,

View file

@ -33,6 +33,7 @@ uv-pypi-types = { workspace = true }
uv-python = { workspace = true }
uv-resolver = { workspace = true }
uv-types = { workspace = true }
uv-variant-frontend = { workspace = true }
uv-version = { workspace = true }
uv-workspace = { workspace = true }

View file

@ -29,7 +29,7 @@ use uv_distribution_types::{
};
use uv_git::GitResolver;
use uv_installer::{Installer, Plan, Planner, Preparer, SitePackages};
use uv_pypi_types::Conflicts;
use uv_pypi_types::{Conflicts, VariantProviderBackend};
use uv_python::{Interpreter, PythonEnvironment};
use uv_resolver::{
ExcludeNewer, FlatIndex, Flexibility, InMemoryIndex, Manifest, OptionsBuilder,
@ -39,6 +39,7 @@ use uv_types::{
AnyErrorBuild, BuildArena, BuildContext, BuildIsolation, BuildStack, EmptyInstalledPackages,
HashStrategy, InFlight,
};
use uv_variant_frontend::VariantBuild;
use uv_workspace::WorkspaceCache;
#[derive(Debug, Error)]
@ -162,6 +163,24 @@ impl<'a> BuildDispatch<'a> {
.collect();
self
}
pub async fn setup_variants(
&self,
backend: VariantProviderBackend,
build_output: BuildOutput,
) -> Result<VariantBuild, uv_variant_frontend::Error> {
let builder = VariantBuild::setup(
backend,
self.interpreter,
self,
self.build_extra_env_vars.clone(),
build_output,
self.concurrency.builds,
)
.boxed_local()
.await?;
Ok(builder)
}
}
#[allow(refining_impl_trait)]
@ -235,6 +254,7 @@ impl BuildContext for BuildDispatch<'_> {
// Conflicting groups only make sense when doing universal resolution.
Conflicts::empty(),
Some(tags),
None,
self.flat_index,
&self.shared_state.index,
self.hasher,

View file

@ -21,6 +21,7 @@ uv-normalize = { workspace = true }
uv-pep440 = { workspace = true }
uv-platform-tags = { workspace = true }
uv-small-str = { workspace = true }
uv-variants = { workspace = true }
memchr = { workspace = true }
rkyv = { workspace = true, features = ["smallvec-1"] }

View file

@ -101,6 +101,6 @@ mod tests {
#[test]
fn wheel_filename_size() {
assert_eq!(size_of::<WheelFilename>(), 48);
assert_eq!(size_of::<WheelFilename>(), 64);
}
}

View file

@ -8,6 +8,7 @@ Ok(
"foo",
),
version: "1.2.3",
variant: None,
tags: Large {
large: WheelTagLarge {
build_tag: Some(

View file

@ -8,6 +8,7 @@ Ok(
"foo",
),
version: "1.2.3",
variant: None,
tags: Large {
large: WheelTagLarge {
build_tag: None,

View file

@ -8,6 +8,7 @@ Ok(
"foo",
),
version: "1.2.3",
variant: None,
tags: Small {
small: WheelTagSmall {
python_tag: Python {

View file

@ -0,0 +1,36 @@
---
source: crates/uv-distribution-filename/src/wheel.rs
expression: "WheelFilename::from_str(\"dummy_project-0.0.1-~36266d4d~-py3-none-any.whl\")"
---
Ok(
WheelFilename {
name: PackageName(
"dummy-project",
),
version: "0.0.1",
variant: Some(
VariantTag {
data: [
51,
54,
50,
54,
54,
100,
52,
100,
],
},
),
tags: Small {
small: WheelTagSmall {
python_tag: Python {
major: 3,
minor: None,
},
abi_tag: None,
platform_tag: Any,
},
},
},
)

View file

@ -1,3 +1,4 @@
use std::borrow::Cow;
use std::fmt::{Display, Formatter};
use std::hash::Hash;
use std::str::FromStr;
@ -6,6 +7,9 @@ use memchr::memchr;
use serde::{Deserialize, Deserializer, Serialize, Serializer, de};
use thiserror::Error;
use crate::splitter::MemchrSplitter;
use crate::wheel_tag::{WheelTag, WheelTagLarge, WheelTagSmall};
use crate::{BuildTag, BuildTagError};
use uv_cache_key::cache_digest;
use uv_normalize::{InvalidNameError, PackageName};
use uv_pep440::{Version, VersionParseError};
@ -13,10 +17,7 @@ use uv_platform_tags::{
AbiTag, LanguageTag, ParseAbiTagError, ParseLanguageTagError, ParsePlatformTagError,
PlatformTag, TagCompatibility, Tags,
};
use crate::splitter::MemchrSplitter;
use crate::wheel_tag::{WheelTag, WheelTagLarge, WheelTagSmall};
use crate::{BuildTag, BuildTagError};
use uv_variants::VariantTag;
#[derive(
Debug,
@ -34,6 +35,7 @@ use crate::{BuildTag, BuildTagError};
pub struct WheelFilename {
pub name: PackageName,
pub version: Version,
variant: Option<VariantTag>,
tags: WheelTag,
}
@ -53,15 +55,26 @@ impl FromStr for WheelFilename {
impl Display for WheelFilename {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
if let Some(variant) = &self.variant {
write!(
f,
"{}-{}-~{}~-{}.whl",
self.name.as_dist_info_name(),
self.version,
variant,
self.tags
)
} else {
write!(
f,
"{}-{}-{}.whl",
self.name.as_dist_info_name(),
self.version,
self.tags,
self.tags
)
}
}
}
impl WheelFilename {
/// Create a [`WheelFilename`] from its components.
@ -75,6 +88,7 @@ impl WheelFilename {
Self {
name,
version,
variant: None,
tags: WheelTag::Small {
small: WheelTagSmall {
python_tag,
@ -97,6 +111,15 @@ impl WheelFilename {
/// The wheel filename without the extension.
pub fn stem(&self) -> String {
if let Some(variant) = &self.variant {
format!(
"{}-{}-~{}~-{}",
self.name.as_dist_info_name(),
self.version,
variant,
self.tags
)
} else {
format!(
"{}-{}-{}",
self.name.as_dist_info_name(),
@ -104,7 +127,7 @@ impl WheelFilename {
self.tags
)
}
}
/// Returns a consistent cache key with a maximum length of 64 characters.
///
/// Prefers `{version}-{tags}` if such an identifier fits within the maximum allowed length;
@ -112,14 +135,24 @@ impl WheelFilename {
pub fn cache_key(&self) -> String {
const CACHE_KEY_MAX_LEN: usize = 64;
let full = format!("{}-{}", self.version, self.tags);
// Include variant in the cache key if it exists
let full = if let Some(variant) = &self.variant {
format!("{}-{}-{}", self.version, variant, self.tags)
} else {
format!("{}-{}", self.version, self.tags)
};
if full.len() <= CACHE_KEY_MAX_LEN {
return full;
}
// Create a digest of the tag string (instead of its individual fields) to retain
// Create a digest of the tag string (and variant if it exists) to retain
// compatibility across platforms, Rust versions, etc.
let digest = cache_digest(&format!("{}", self.tags));
let digest = if let Some(variant) = &self.variant {
cache_digest(&format!("{}-{}", variant, self.tags))
} else {
cache_digest(&format!("{}", self.tags))
};
// Truncate the version, but avoid trailing dots, plus signs, etc. to avoid ambiguity.
let version_width = CACHE_KEY_MAX_LEN - 1 /* dash */ - 16 /* digest */;
@ -132,6 +165,11 @@ impl WheelFilename {
format!("{version}-{digest}")
}
/// Return the wheel's variant tag, if present.
pub fn variant(&self) -> Option<&VariantTag> {
self.variant.as_ref()
}
/// Return the wheel's Python tags.
pub fn python_tags(&self) -> &[LanguageTag] {
match &self.tags {
@ -180,6 +218,38 @@ impl WheelFilename {
///
/// The originating `filename` is used for high-fidelity error messages.
fn parse(stem: &str, filename: &str) -> Result<Self, WheelFilenameError> {
// Extract variant from filenames with the format, e.g., `dummy_project-0.0.1-~36266d4d~-py3-none-any.whl`.
// TODO(charlie): Integrate this into the filename parsing; it's just easier to do it upfront
// for now.
let mut variant: Option<VariantTag> = None;
let stem = if let Some(tilde_start) = stem.find("-~") {
if let Some(tilde_end) = stem[tilde_start + 1..].find("~-") {
// Skip the "-~".
let variant_start = tilde_start + 2;
// End before the "~-".
let variant_end = tilde_start + 1 + tilde_end;
// Validate that the variant is exactly 8 bytes.
let variant_str = &stem[variant_start..variant_end];
if variant_str.len() == 8 && variant_str.as_bytes().iter().all(|&b| b.is_ascii()) {
variant = Some(VariantTag::new(variant_str.to_string()));
} else {
return Err(WheelFilenameError::InvalidWheelFileName(
filename.to_string(),
format!("Variant must be exactly 8 ASCII characters, got: '{variant_str}'"),
));
}
// Create a new stem without the variant.
let before_variant = &stem[..tilde_start];
let after_variant = &stem[variant_end + 1..];
Cow::Owned(format!("{before_variant}{after_variant}"))
} else {
Cow::Borrowed(stem)
}
} else {
Cow::Borrowed(stem)
};
// The wheel filename should contain either five or six entries. If six, then the third
// entry is the build tag. If five, then the third entry is the Python tag.
// https://www.python.org/dev/peps/pep-0427/#file-name-convention
@ -294,6 +364,7 @@ impl WheelFilename {
Ok(Self {
name,
version,
variant,
tags,
})
}
@ -441,12 +512,20 @@ mod tests {
));
}
#[test]
fn ok_variant_tag() {
insta::assert_debug_snapshot!(WheelFilename::from_str(
"dummy_project-0.0.1-~36266d4d~-py3-none-any.whl"
));
}
#[test]
fn from_and_to_string() {
let wheel_names = &[
"django_allauth-0.51.0-py3-none-any.whl",
"osm2geojson-0.2.4-py3-none-any.whl",
"numpy-1.26.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",
"dummy_project-0.0.1-~36266d4d~-py3-none-any.whl",
];
for wheel_name in wheel_names {
assert_eq!(
@ -481,5 +560,10 @@ mod tests {
"example-1.2.3.4.5.6.7.8.9.0.1.2.3.4.5.6.7.8.9.0.1.2.1.2.3.4.5.6.7.8.9.0.1.1.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl"
).unwrap();
insta::assert_snapshot!(filename.cache_key(), @"1.2.3.4.5.6.7.8.9.0.1.2.3.4.5.6.7.8.9.0.1.2.1.2-80bf8598e9647cf7");
// Variant tags should be included in the cache key.
let filename =
WheelFilename::from_str("dummy_project-0.0.1-~36266d4d~-py3-none-any.whl").unwrap();
insta::assert_snapshot!(filename.cache_key(), @"0.0.1-36266d4d-py3-none-any");
}
}

View file

@ -29,6 +29,7 @@ uv-platform-tags = { workspace = true }
uv-pypi-types = { workspace = true }
uv-redacted = { workspace = true }
uv-small-str = { workspace = true }
uv-variants = { workspace = true }
uv-warnings = { workspace = true }
arcstr = { workspace = true }

View file

@ -9,6 +9,7 @@ use uv_pep440::VersionSpecifiers;
use uv_pep508::{MarkerExpression, MarkerOperator, MarkerTree, MarkerValueString};
use uv_platform_tags::{AbiTag, IncompatibleTag, LanguageTag, PlatformTag, TagPriority, Tags};
use uv_pypi_types::{HashDigest, Yanked};
use uv_variants::VariantPriority;
use crate::{
File, InstalledDist, KnownPlatform, RegistryBuiltDist, RegistryBuiltWheel, RegistrySourceDist,
@ -117,6 +118,7 @@ impl IncompatibleDist {
match self {
Self::Wheel(incompatibility) => match incompatibility {
IncompatibleWheel::NoBinary => format!("has {self}"),
IncompatibleWheel::Variant => format!("has {self}"),
IncompatibleWheel::Tag(_) => format!("has {self}"),
IncompatibleWheel::Yanked(_) => format!("was {self}"),
IncompatibleWheel::ExcludeNewer(ts) => match ts {
@ -145,6 +147,7 @@ impl IncompatibleDist {
match self {
Self::Wheel(incompatibility) => match incompatibility {
IncompatibleWheel::NoBinary => format!("have {self}"),
IncompatibleWheel::Variant => format!("have {self}"),
IncompatibleWheel::Tag(_) => format!("have {self}"),
IncompatibleWheel::Yanked(_) => format!("were {self}"),
IncompatibleWheel::ExcludeNewer(ts) => match ts {
@ -193,6 +196,7 @@ impl IncompatibleDist {
Some(format!("(e.g., `{tag}`)", tag = tag.cyan()))
}
IncompatibleWheel::Tag(IncompatibleTag::Invalid) => None,
IncompatibleWheel::Variant => None,
IncompatibleWheel::NoBinary => None,
IncompatibleWheel::Yanked(..) => None,
IncompatibleWheel::ExcludeNewer(..) => None,
@ -210,6 +214,7 @@ impl Display for IncompatibleDist {
match self {
Self::Wheel(incompatibility) => match incompatibility {
IncompatibleWheel::NoBinary => f.write_str("no source distribution"),
IncompatibleWheel::Variant => f.write_str("no wheels with a matching variant"),
IncompatibleWheel::Tag(tag) => match tag {
IncompatibleTag::Invalid => f.write_str("no wheels with valid tags"),
IncompatibleTag::Python => {
@ -284,13 +289,20 @@ pub enum PythonRequirementKind {
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum WheelCompatibility {
Incompatible(IncompatibleWheel),
Compatible(HashComparison, Option<TagPriority>, Option<BuildTag>),
Compatible(
HashComparison,
Option<TagPriority>,
Option<VariantPriority>,
Option<BuildTag>,
),
}
#[derive(Debug, PartialEq, Eq, Clone)]
pub enum IncompatibleWheel {
/// The wheel was published after the exclude newer time.
ExcludeNewer(Option<i64>),
/// The wheel variant does not match the target platform.
Variant,
/// The wheel tags do not match those of the target Python platform.
Tag(IncompatibleTag),
/// The required Python version is not a superset of the target Python version range.
@ -486,7 +498,7 @@ impl PrioritizedDist {
.best_wheel_index
.map(|i| &self.0.wheels[i])
.and_then(|(_, compatibility)| match compatibility {
WheelCompatibility::Compatible(_, _, _) => None,
WheelCompatibility::Compatible(..) => None,
WheelCompatibility::Incompatible(incompatibility) => Some(incompatibility),
})
}
@ -657,7 +669,7 @@ impl<'a> CompatibleDist<'a> {
impl WheelCompatibility {
/// Return `true` if the distribution is compatible.
pub fn is_compatible(&self) -> bool {
matches!(self, Self::Compatible(_, _, _))
matches!(self, Self::Compatible(..))
}
/// Return `true` if the distribution is excluded.
@ -671,14 +683,25 @@ impl WheelCompatibility {
/// Compatible wheel ordering is determined by tag priority.
pub fn is_more_compatible(&self, other: &Self) -> bool {
match (self, other) {
(Self::Compatible(_, _, _), Self::Incompatible(_)) => true,
(Self::Compatible(..), Self::Incompatible(..)) => true,
(
Self::Compatible(hash, tag_priority, build_tag),
Self::Compatible(other_hash, other_tag_priority, other_build_tag),
Self::Compatible(hash, tag_priority, variant_priority, build_tag),
Self::Compatible(
other_hash,
other_tag_priority,
other_variant_priority,
other_build_tag,
),
) => {
(hash, tag_priority, build_tag) > (other_hash, other_tag_priority, other_build_tag)
(hash, tag_priority, variant_priority, build_tag)
> (
other_hash,
other_tag_priority,
other_variant_priority,
other_build_tag,
)
}
(Self::Incompatible(_), Self::Compatible(_, _, _)) => false,
(Self::Incompatible(..), Self::Compatible(..)) => false,
(Self::Incompatible(incompatibility), Self::Incompatible(other_incompatibility)) => {
incompatibility.is_more_compatible(other_incompatibility)
}
@ -760,34 +783,45 @@ impl IncompatibleWheel {
Self::MissingPlatform(_)
| Self::NoBinary
| Self::RequiresPython(_, _)
| Self::Variant
| Self::Tag(_)
| Self::Yanked(_) => true,
},
Self::Variant => match other {
Self::ExcludeNewer(_)
| Self::Tag(_)
| Self::RequiresPython(_, _)
| Self::Yanked(_) => false,
Self::Variant => false,
Self::MissingPlatform(_) | Self::NoBinary => true,
},
Self::Tag(tag_self) => match other {
Self::ExcludeNewer(_) => false,
Self::Tag(tag_other) => tag_self > tag_other,
Self::MissingPlatform(_)
| Self::NoBinary
| Self::RequiresPython(_, _)
| Self::Variant
| Self::Yanked(_) => true,
},
Self::RequiresPython(_, _) => match other {
Self::ExcludeNewer(_) | Self::Tag(_) => false,
// Version specifiers cannot be reasonably compared
Self::RequiresPython(_, _) => false,
Self::MissingPlatform(_) | Self::NoBinary | Self::Yanked(_) => true,
Self::MissingPlatform(_) | Self::NoBinary | Self::Yanked(_) | Self::Variant => true,
},
Self::Yanked(_) => match other {
Self::ExcludeNewer(_) | Self::Tag(_) | Self::RequiresPython(_, _) => false,
// Yanks with a reason are more helpful for errors
Self::Yanked(yanked_other) => matches!(yanked_other, Yanked::Reason(_)),
Self::MissingPlatform(_) | Self::NoBinary => true,
Self::MissingPlatform(_) | Self::NoBinary | Self::Variant => true,
},
Self::NoBinary => match other {
Self::ExcludeNewer(_)
| Self::Tag(_)
| Self::RequiresPython(_, _)
| Self::Yanked(_) => false,
| Self::Yanked(_)
| Self::Variant => false,
Self::NoBinary => false,
Self::MissingPlatform(_) => true,
},

View file

@ -26,6 +26,7 @@ uv-small-str = { workspace = true }
hashbrown = { workspace = true }
indexmap = { workspace = true, features = ["serde"] }
indoc = { workspace = true }
itertools = { workspace = true }
jiff = { workspace = true, features = ["serde"] }
mailparse = { workspace = true }

View file

@ -10,6 +10,7 @@ pub use parsed_url::*;
pub use scheme::*;
pub use simple_json::*;
pub use supported_environments::*;
pub use variants::*;
mod base_url;
mod conflicts;
@ -23,3 +24,4 @@ mod parsed_url;
mod scheme;
mod simple_json;
mod supported_environments;
mod variants;

View file

@ -0,0 +1,31 @@
use indoc::formatdoc;
use crate::VerbatimParsedUrl;
#[derive(Debug, Clone, PartialEq, Eq, serde::Deserialize)]
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
pub struct VariantProviderBackend {
/// The provider backend string such as `fictional_tech.provider`.
pub backend: String,
/// The requirements that the backend requires (e.g., `["fictional_tech>=1.0"]`).
pub requires: Vec<uv_pep508::Requirement<VerbatimParsedUrl>>,
}
impl VariantProviderBackend {
pub fn import(&self) -> String {
let import = if let Some((path, object)) = self.backend.split_once(':') {
format!("from {path} import {object} as backend")
} else {
format!("import {} as backend", self.backend)
};
formatdoc! {r#"
import sys
if sys.path[0] == "":
sys.path.pop(0)
{import}
"#}
}
}

View file

@ -39,6 +39,7 @@ uv-small-str = { workspace = true }
uv-static = { workspace = true }
uv-torch = { workspace = true }
uv-types = { workspace = true }
uv-variants = { workspace = true }
uv-warnings = { workspace = true }
uv-workspace = { workspace = true }

View file

@ -17,6 +17,7 @@ use uv_pep440::Version;
use uv_platform_tags::{TagCompatibility, Tags};
use uv_pypi_types::HashDigest;
use uv_types::HashStrategy;
use uv_variants::{VariantCompatibility, VariantSet};
/// A set of [`PrioritizedDist`] from a `--find-links` entry, indexed by [`PackageName`]
/// and [`Version`].
@ -35,6 +36,7 @@ impl FlatIndex {
pub fn from_entries(
entries: FlatIndexEntries,
tags: Option<&Tags>,
variants: Option<&VariantSet>,
hasher: &HashStrategy,
build_options: &BuildOptions,
) -> Self {
@ -46,6 +48,7 @@ impl FlatIndex {
entry.file,
entry.filename,
tags,
variants,
hasher,
build_options,
entry.index,
@ -81,6 +84,7 @@ impl FlatDistributions {
pub fn from_entries(
entries: Vec<FlatIndexEntry>,
tags: Option<&Tags>,
variants: Option<&VariantSet>,
hasher: &HashStrategy,
build_options: &BuildOptions,
) -> Self {
@ -90,6 +94,7 @@ impl FlatDistributions {
entry.file,
entry.filename,
tags,
variants,
hasher,
build_options,
entry.index,
@ -114,6 +119,7 @@ impl FlatDistributions {
file: File,
filename: DistFilename,
tags: Option<&Tags>,
variants: Option<&VariantSet>,
hasher: &HashStrategy,
build_options: &BuildOptions,
index: IndexUrl,
@ -128,6 +134,7 @@ impl FlatDistributions {
&filename,
file.hashes.as_slice(),
tags,
variants,
hasher,
build_options,
);
@ -205,6 +212,7 @@ impl FlatDistributions {
filename: &WheelFilename,
hashes: &[HashDigest],
tags: Option<&Tags>,
variants: Option<&VariantSet>,
hasher: &HashStrategy,
build_options: &BuildOptions,
) -> WheelCompatibility {
@ -214,7 +222,7 @@ impl FlatDistributions {
}
// Determine a compatibility for the wheel based on tags.
let priority = match tags {
let tag_priority = match tags {
Some(tags) => match filename.compatibility(tags) {
TagCompatibility::Incompatible(tag) => {
return WheelCompatibility::Incompatible(IncompatibleWheel::Tag(tag));
@ -224,6 +232,22 @@ impl FlatDistributions {
None => None,
};
// Determine a priority for the wheel based on variants.
let variant_priority = if let Some(variants) = variants {
if let Some(variant) = filename.variant() {
match variants.compatibility(variant) {
VariantCompatibility::Incompatible => {
return WheelCompatibility::Incompatible(IncompatibleWheel::Variant);
}
VariantCompatibility::Compatible(priority) => Some(priority),
}
} else {
None
}
} else {
None
};
// Check if hashes line up.
let hash = if let HashPolicy::Validate(required) =
hasher.get_package(&filename.name, &filename.version)
@ -242,7 +266,7 @@ impl FlatDistributions {
// Break ties with the build tag.
let build_tag = filename.build_tag().cloned();
WheelCompatibility::Compatible(hash, priority, build_tag)
WheelCompatibility::Compatible(hash, tag_priority, variant_priority, build_tag)
}
}

View file

@ -82,6 +82,7 @@ use crate::{
};
pub(crate) use provider::MetadataUnavailable;
use uv_torch::TorchStrategy;
use uv_variants::VariantSet;
mod availability;
mod batch_prefetch;
@ -168,6 +169,7 @@ impl<'a, Context: BuildContext, InstalledPackages: InstalledPackagesProvider>
current_environment: &MarkerEnvironment,
conflicts: Conflicts,
tags: Option<&'a Tags>,
variants: Option<&'a VariantSet>,
flat_index: &'a FlatIndex,
index: &'a InMemoryIndex,
hasher: &'a HashStrategy,
@ -179,6 +181,7 @@ impl<'a, Context: BuildContext, InstalledPackages: InstalledPackagesProvider>
database,
flat_index,
tags,
variants,
python_requirement.target(),
AllowedYanks::from_manifest(&manifest, &env, options.dependency_mode),
hasher,

View file

@ -11,6 +11,7 @@ use uv_normalize::PackageName;
use uv_pep440::{Version, VersionSpecifiers};
use uv_platform_tags::Tags;
use uv_types::{BuildContext, HashStrategy};
use uv_variants::VariantSet;
use crate::ExcludeNewer;
use crate::flat_index::FlatIndex;
@ -113,6 +114,7 @@ pub struct DefaultResolverProvider<'a, Context: BuildContext> {
/// These are the entries from `--find-links` that act as overrides for index responses.
flat_index: FlatIndex,
tags: Option<Tags>,
variants: Option<VariantSet>,
requires_python: RequiresPython,
allowed_yanks: AllowedYanks,
hasher: HashStrategy,
@ -127,6 +129,7 @@ impl<'a, Context: BuildContext> DefaultResolverProvider<'a, Context> {
fetcher: DistributionDatabase<'a, Context>,
flat_index: &'a FlatIndex,
tags: Option<&'a Tags>,
variants: Option<&'a VariantSet>,
requires_python: &'a RequiresPython,
allowed_yanks: AllowedYanks,
hasher: &'a HashStrategy,
@ -138,6 +141,7 @@ impl<'a, Context: BuildContext> DefaultResolverProvider<'a, Context> {
fetcher,
flat_index: flat_index.clone(),
tags: tags.cloned(),
variants: variants.cloned(),
requires_python: requires_python.clone(),
allowed_yanks,
hasher: hasher.clone(),
@ -181,6 +185,7 @@ impl<Context: BuildContext> ResolverProvider for DefaultResolverProvider<'_, Con
package_name,
index,
self.tags.as_ref(),
self.variants.as_ref(),
&self.requires_python,
&self.allowed_yanks,
&self.hasher,
@ -193,6 +198,7 @@ impl<Context: BuildContext> ResolverProvider for DefaultResolverProvider<'_, Con
MetadataFormat::Flat(metadata) => VersionMap::from_flat_metadata(
metadata,
self.tags.as_ref(),
self.variants.as_ref(),
&self.hasher,
self.build_options,
),

View file

@ -19,6 +19,7 @@ use uv_pep440::Version;
use uv_platform_tags::{IncompatibleTag, TagCompatibility, Tags};
use uv_pypi_types::{HashDigest, Yanked};
use uv_types::HashStrategy;
use uv_variants::{VariantCompatibility, VariantSet};
use uv_warnings::warn_user_once;
use crate::flat_index::FlatDistributions;
@ -47,6 +48,7 @@ impl VersionMap {
package_name: &PackageName,
index: &IndexUrl,
tags: Option<&Tags>,
variants: Option<&VariantSet>,
requires_python: &RequiresPython,
allowed_yanks: &AllowedYanks,
hasher: &HashStrategy,
@ -109,6 +111,7 @@ impl VersionMap {
no_build: build_options.no_build_package(package_name),
index: index.clone(),
tags: tags.cloned(),
variants: variants.cloned(),
allowed_yanks: allowed_yanks.clone(),
hasher: hasher.clone(),
requires_python: requires_python.clone(),
@ -121,6 +124,7 @@ impl VersionMap {
pub(crate) fn from_flat_metadata(
flat_metadata: Vec<FlatIndexEntry>,
tags: Option<&Tags>,
variants: Option<&VariantSet>,
hasher: &HashStrategy,
build_options: &BuildOptions,
) -> Self {
@ -129,7 +133,7 @@ impl VersionMap {
let mut map = BTreeMap::new();
for (version, prioritized_dist) in
FlatDistributions::from_entries(flat_metadata, tags, hasher, build_options)
FlatDistributions::from_entries(flat_metadata, tags, variants, hasher, build_options)
{
stable |= version.is_stable();
local |= version.is_local();
@ -364,6 +368,8 @@ struct VersionMapLazy {
/// The set of compatibility tags that determines whether a wheel is usable
/// in the current environment.
tags: Option<Tags>,
/// The set of active variants in the environment.
variants: Option<VariantSet>,
/// Whether files newer than this timestamp should be excluded or not.
exclude_newer: Option<ExcludeNewer>,
/// Which yanked versions are allowed
@ -564,8 +570,8 @@ impl VersionMapLazy {
}
}
// Determine a compatibility for the wheel based on tags.
let priority = if let Some(tags) = &self.tags {
// Determine a priority for the wheel based on tags.
let tag_priority = if let Some(tags) = &self.tags {
match filename.compatibility(tags) {
TagCompatibility::Incompatible(tag) => {
return WheelCompatibility::Incompatible(IncompatibleWheel::Tag(tag));
@ -583,6 +589,22 @@ impl VersionMapLazy {
None
};
// Determine a priority for the wheel based on variants.
let variant_priority = if let Some(variants) = &self.variants {
if let Some(variant) = filename.variant() {
match variants.compatibility(variant) {
VariantCompatibility::Incompatible => {
return WheelCompatibility::Incompatible(IncompatibleWheel::Variant);
}
VariantCompatibility::Compatible(priority) => Some(priority),
}
} else {
None
}
} else {
None
};
// Check if hashes line up. If hashes aren't required, they're considered matching.
let hash_policy = self.hasher.get_package(name, version);
let required_hashes = hash_policy.digests();
@ -601,7 +623,7 @@ impl VersionMapLazy {
// Break ties with the build tag.
let build_tag = filename.build_tag().cloned();
WheelCompatibility::Compatible(hash, priority, build_tag)
WheelCompatibility::Compatible(hash, tag_priority, variant_priority, build_tag)
}
}

View file

@ -14,7 +14,7 @@ use uv_install_wheel::LinkMode;
use uv_macros::{CombineOptions, OptionsMetadata};
use uv_normalize::{ExtraName, PackageName, PipGroupName};
use uv_pep508::Requirement;
use uv_pypi_types::{SupportedEnvironments, VerbatimParsedUrl};
use uv_pypi_types::{SupportedEnvironments, VariantProviderBackend, VerbatimParsedUrl};
use uv_python::{PythonDownloads, PythonPreference, PythonVersion};
use uv_redacted::DisplaySafeUrl;
use uv_resolver::{AnnotationStyle, ExcludeNewer, ForkStrategy, PrereleaseMode, ResolutionMode};
@ -105,6 +105,9 @@ pub struct Options {
)]
cache_keys: Option<Vec<CacheKey>>,
#[cfg_attr(feature = "schemars", schemars(skip))]
pub variant: Option<Vec<VariantProviderBackend>>,
// NOTE(charlie): These fields are shared with `ToolUv` in
// `crates/uv-workspace/src/pyproject.rs`. The documentation lives on that struct.
// They're respected in both `pyproject.toml` and `uv.toml` files.
@ -1856,6 +1859,8 @@ pub struct OptionsWire {
pip: Option<PipOptions>,
cache_keys: Option<Vec<CacheKey>>,
variant: Option<Vec<VariantProviderBackend>>,
// NOTE(charlie): These fields are shared with `ToolUv` in
// `crates/uv-workspace/src/pyproject.rs`. The documentation lives on that struct.
// They're respected in both `pyproject.toml` and `uv.toml` files.
@ -1927,6 +1932,7 @@ impl From<OptionsWire> for Options {
no_binary_package,
pip,
cache_keys,
variant,
override_dependencies,
constraint_dependencies,
build_constraint_dependencies,
@ -1995,6 +2001,7 @@ impl From<OptionsWire> for Options {
pip,
cache_keys,
build_backend,
variant,
override_dependencies,
constraint_dependencies,
build_constraint_dependencies,

View file

@ -0,0 +1,35 @@
[package]
name = "uv-variant-frontend"
version = "0.1.0"
edition.workspace = true
rust-version.workspace = true
homepage.workspace = true
documentation.workspace = true
repository.workspace = true
authors.workspace = true
license.workspace = true
[lints]
workspace = true
[dependencies]
uv-configuration = { workspace = true }
uv-distribution-types = { workspace = true }
uv-fs = { workspace = true }
uv-pypi-types = { workspace = true }
uv-python = { workspace = true }
uv-static = { workspace = true }
uv-types = { workspace = true }
uv-virtualenv = { workspace = true }
uv-variants = { workspace = true }
anstream = { workspace = true }
fs-err = { workspace = true }
indoc = { workspace = true }
owo-colors = { workspace = true }
serde_json = { workspace = true }
tempfile = { workspace = true }
thiserror = { workspace = true }
tokio = { workspace = true }
tracing = { workspace = true }
rustc-hash = { workspace = true }

View file

@ -0,0 +1,97 @@
use std::env;
use std::fmt::{Display, Formatter};
use std::io;
use std::path::PathBuf;
use std::process::ExitStatus;
use owo_colors::OwoColorize;
use thiserror::Error;
use tracing::error;
use uv_configuration::BuildOutput;
use uv_types::AnyErrorBuild;
use crate::PythonRunnerOutput;
#[derive(Error, Debug)]
pub enum Error {
#[error(transparent)]
Io(#[from] io::Error),
#[error("Failed to resolve requirements from {0}")]
RequirementsResolve(&'static str, #[source] AnyErrorBuild),
#[error("Failed to install requirements from {0}")]
RequirementsInstall(&'static str, #[source] AnyErrorBuild),
#[error("Failed to create temporary virtualenv")]
Virtualenv(#[from] uv_virtualenv::Error),
#[error("Failed to run `{0}`")]
CommandFailed(PathBuf, #[source] io::Error),
#[error("The build backend returned an error")]
ProviderBackend(#[from] ProviderBackendError),
#[error("Failed to build PATH for build script")]
BuildScriptPath(#[source] env::JoinPathsError),
}
#[derive(Debug, Error)]
pub struct ProviderBackendError {
message: String,
exit_code: ExitStatus,
stdout: Vec<String>,
stderr: Vec<String>,
}
impl Display for ProviderBackendError {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
write!(f, "{} ({})", self.message, self.exit_code)?;
let mut non_empty = false;
if self.stdout.iter().any(|line| !line.trim().is_empty()) {
write!(f, "\n\n{}\n{}", "[stdout]".red(), self.stdout.join("\n"))?;
non_empty = true;
}
if self.stderr.iter().any(|line| !line.trim().is_empty()) {
write!(f, "\n\n{}\n{}", "[stderr]".red(), self.stderr.join("\n"))?;
non_empty = true;
}
if non_empty {
writeln!(f)?;
}
write!(
f,
"\n{}{} This usually indicates a problem with the package or the build environment.",
"hint".bold().cyan(),
":".bold()
)?;
Ok(())
}
}
impl Error {
/// Construct an [`Error`] from the output of a failed command.
pub(crate) fn from_command_output(
message: String,
output: &PythonRunnerOutput,
level: BuildOutput,
) -> Self {
match level {
BuildOutput::Stderr | BuildOutput::Quiet => {
Self::ProviderBackend(ProviderBackendError {
message,
exit_code: output.status,
stdout: vec![],
stderr: vec![],
})
}
BuildOutput::Debug => Self::ProviderBackend(ProviderBackendError {
message,
exit_code: output.status,
stdout: output.stdout.clone(),
stderr: output.stderr.clone(),
}),
}
}
}

View file

@ -0,0 +1,328 @@
//! Detect compatible variants from a variant provider.
mod error;
use std::ffi::OsString;
use std::fmt::Write;
use std::io;
use std::path::{Path, PathBuf};
use std::process::ExitStatus;
use std::{env, iter};
use fs_err as fs;
use indoc::formatdoc;
use rustc_hash::FxHashMap;
use tempfile::TempDir;
use tokio::io::AsyncBufReadExt;
use tokio::process::Command;
use tokio::sync::Semaphore;
use tracing::debug;
use uv_configuration::{BuildOutput, PreviewMode};
use uv_distribution_types::Requirement;
use uv_fs::{PythonExt, Simplified};
use uv_pypi_types::VariantProviderBackend;
use uv_python::{Interpreter, PythonEnvironment};
use uv_static::EnvVars;
use uv_types::{BuildContext, BuildStack};
use uv_variants::VariantProviderConfig;
pub use crate::error::Error;
pub struct VariantBuild {
temp_dir: TempDir,
/// The backend to use.
backend: VariantProviderBackend,
/// The virtual environment in which to build the source distribution.
venv: PythonEnvironment,
/// Whether to send build output to `stderr` or `tracing`, etc.
level: BuildOutput,
/// Modified PATH that contains the `venv_bin`, `user_path` and `system_path` variables in that
/// order.
modified_path: OsString,
/// Environment variables to be passed in.
environment_variables: FxHashMap<OsString, OsString>,
/// Runner for Python scripts.
runner: PythonRunner,
}
impl VariantBuild {
/// Create a virtual environment in which to run a variant provider.
pub async fn setup(
backend: VariantProviderBackend,
interpreter: &Interpreter,
build_context: &impl BuildContext,
mut environment_variables: FxHashMap<OsString, OsString>,
level: BuildOutput,
concurrent_builds: usize,
) -> Result<Self, Error> {
let temp_dir = build_context.cache().venv_dir()?;
// Create a virtual environment.
let venv = uv_virtualenv::create_venv(
temp_dir.path(),
interpreter.clone(),
uv_virtualenv::Prompt::None,
false,
false,
false,
false,
false,
PreviewMode::Disabled, // TODO(konsti)
)?;
// Resolve and install the provider requirements.
let requirements = backend
.requires
.iter()
.cloned()
.map(Requirement::from)
.collect::<Vec<_>>();
let resolved_requirements = build_context
.resolve(&requirements, &BuildStack::empty())
.await
.map_err(|err| Error::RequirementsResolve("`build-system.requires`", err.into()))?;
build_context
.install(&resolved_requirements, &venv, &BuildStack::empty())
.await
.map_err(|err| Error::RequirementsInstall("`build-system.requires`", err.into()))?;
// Figure out what the modified path should be, and remove the PATH variable from the
// environment variables if it's there.
let user_path = environment_variables.remove(&OsString::from(EnvVars::PATH));
// See if there is an OS PATH variable.
let os_path = env::var_os(EnvVars::PATH);
// Prepend the user supplied PATH to the existing OS PATH.
let modified_path = if let Some(user_path) = user_path {
match os_path {
// Prepend the user supplied PATH to the existing PATH.
Some(env_path) => {
let user_path = PathBuf::from(user_path);
let new_path = env::split_paths(&user_path).chain(env::split_paths(&env_path));
Some(env::join_paths(new_path).map_err(Error::BuildScriptPath)?)
}
// Use the user supplied PATH.
None => Some(user_path),
}
} else {
os_path
};
// Prepend the venv bin directory to the modified path.
let modified_path = if let Some(path) = modified_path {
let venv_path = iter::once(venv.scripts().to_path_buf()).chain(env::split_paths(&path));
env::join_paths(venv_path).map_err(Error::BuildScriptPath)?
} else {
OsString::from(venv.scripts())
};
let runner = PythonRunner::new(concurrent_builds, level);
Ok(Self {
temp_dir,
backend,
venv,
level,
modified_path,
environment_variables,
runner,
})
}
/// Run a variant provider to infer compatible variants.
pub async fn build(&self) -> Result<VariantProviderConfig, Error> {
// Write the hook output to a file so that we can read it back reliably.
let outfile = self.temp_dir.path().join("output.json");
// Construct the appropriate build script based on the build kind.
let script = formatdoc! {
r#"
{}
with open("{}", "w") as fp:
import json
fp.write(json.dumps(backend()))
"#,
self.backend.import(),
outfile.escape_for_python()
};
let output = self
.runner
.run_script(
&self.venv,
&script,
self.temp_dir.path(),
&self.environment_variables,
&self.modified_path,
)
.await?;
if !output.status.success() {
return Err(Error::from_command_output(
format!(
"Call to variant backend failed in `{}`",
self.backend.backend
),
&output,
self.level,
));
}
// Read as JSON.
let json = fs::read(&outfile).map_err(|err| {
Error::CommandFailed(self.venv.python_executable().to_path_buf(), err)
})?;
let config = serde_json::from_slice::<VariantProviderConfig>(&json).map_err(|err| {
Error::CommandFailed(self.venv.python_executable().to_path_buf(), err.into())
})?;
Ok(config)
}
}
/// A runner that manages the execution of external python processes with a
/// concurrency limit.
#[derive(Debug)]
struct PythonRunner {
control: Semaphore,
level: BuildOutput,
}
#[derive(Debug)]
struct PythonRunnerOutput {
stdout: Vec<String>,
stderr: Vec<String>,
status: ExitStatus,
}
impl PythonRunner {
/// Create a `PythonRunner` with the provided concurrency limit and output level.
fn new(concurrency: usize, level: BuildOutput) -> Self {
Self {
control: Semaphore::new(concurrency),
level,
}
}
/// Spawn a process that runs a python script in the provided environment.
///
/// If the concurrency limit has been reached this method will wait until a pending
/// script completes before spawning this one.
///
/// Note: It is the caller's responsibility to create an informative span.
async fn run_script(
&self,
venv: &PythonEnvironment,
script: &str,
source_tree: &Path,
environment_variables: &FxHashMap<OsString, OsString>,
modified_path: &OsString,
) -> Result<PythonRunnerOutput, Error> {
/// Read lines from a reader and store them in a buffer.
async fn read_from(
mut reader: tokio::io::Split<tokio::io::BufReader<impl tokio::io::AsyncRead + Unpin>>,
mut printer: Printer,
buffer: &mut Vec<String>,
) -> io::Result<()> {
loop {
match reader.next_segment().await? {
Some(line_buf) => {
let line_buf = line_buf.strip_suffix(b"\r").unwrap_or(&line_buf);
let line = String::from_utf8_lossy(line_buf).into();
let _ = write!(printer, "{line}");
buffer.push(line);
}
None => return Ok(()),
}
}
}
let _permit = self.control.acquire().await.unwrap();
let mut child = Command::new(venv.python_executable())
.args(["-c", script])
.current_dir(source_tree.simplified())
.envs(environment_variables)
.env(EnvVars::PATH, modified_path)
.env(EnvVars::VIRTUAL_ENV, venv.root())
.env(EnvVars::CLICOLOR_FORCE, "1")
.env(EnvVars::PYTHONIOENCODING, "utf-8:backslashreplace")
.stdout(std::process::Stdio::piped())
.stderr(std::process::Stdio::piped())
.spawn()
.map_err(|err| Error::CommandFailed(venv.python_executable().to_path_buf(), err))?;
// Create buffers to capture `stdout` and `stderr`.
let mut stdout_buf = Vec::with_capacity(1024);
let mut stderr_buf = Vec::with_capacity(1024);
// Create separate readers for `stdout` and `stderr`.
let stdout_reader = tokio::io::BufReader::new(child.stdout.take().unwrap()).split(b'\n');
let stderr_reader = tokio::io::BufReader::new(child.stderr.take().unwrap()).split(b'\n');
// Asynchronously read from the in-memory pipes.
let printer = Printer::from(self.level);
let result = tokio::join!(
read_from(stdout_reader, printer, &mut stdout_buf),
read_from(stderr_reader, printer, &mut stderr_buf),
);
match result {
(Ok(()), Ok(())) => {}
(Err(err), _) | (_, Err(err)) => {
return Err(Error::CommandFailed(
venv.python_executable().to_path_buf(),
err,
));
}
}
// Wait for the child process to finish.
let status = child
.wait()
.await
.map_err(|err| Error::CommandFailed(venv.python_executable().to_path_buf(), err))?;
Ok(PythonRunnerOutput {
stdout: stdout_buf,
stderr: stderr_buf,
status,
})
}
}
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
pub enum Printer {
/// Send the provider output to `stderr`.
Stderr,
/// Send the provider output to `tracing`.
Debug,
/// Hide the provider output.
Quiet,
}
impl From<BuildOutput> for Printer {
fn from(output: BuildOutput) -> Self {
match output {
BuildOutput::Stderr => Self::Stderr,
BuildOutput::Debug => Self::Debug,
BuildOutput::Quiet => Self::Quiet,
}
}
}
impl Write for Printer {
fn write_str(&mut self, s: &str) -> std::fmt::Result {
match self {
Self::Stderr => {
anstream::eprintln!("{s}");
}
Self::Debug => {
debug!("{s}");
}
Self::Quiet => {}
}
Ok(())
}
}

View file

@ -0,0 +1,21 @@
[package]
name = "uv-variants"
version = "0.1.0"
edition.workspace = true
rust-version.workspace = true
homepage.workspace = true
documentation.workspace = true
repository.workspace = true
authors.workspace = true
license.workspace = true
[dependencies]
hex = { workspace = true }
itertools = { workspace = true }
rkyv = { workspace = true }
serde = { workspace = true }
sha3 = { workspace = true }
thiserror = { workspace = true }
[lints]
workspace = true

View file

@ -0,0 +1,207 @@
use std::cmp;
use std::collections::HashMap;
use std::num::NonZeroU32;
use std::sync::Arc;
use itertools::Itertools;
use sha3::{
digest::{ExtendableOutput, Update},
Shake128,
};
#[derive(
Debug,
Clone,
Eq,
PartialEq,
Ord,
PartialOrd,
Hash,
rkyv::Archive,
rkyv::Deserialize,
rkyv::Serialize,
)]
#[rkyv(derive(Debug))]
pub struct VariantTag {
data: String,
}
impl VariantTag {
pub fn new(data: String) -> Self {
Self { data }
}
}
impl std::fmt::Display for VariantTag {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", self.data)
}
}
#[derive(Debug, thiserror::Error)]
pub enum VariantError {
#[error("Invalid priority: `{0}`")]
InvalidPriority(usize, #[source] std::num::TryFromIntError),
}
/// In `variantlib`, this is called [`VariantKeyConfig`].
#[derive(Debug, Clone, Eq, Ord, PartialOrd, PartialEq, Hash, serde::Deserialize)]
pub struct VariantKeyConfig {
key: String,
values: Vec<String>,
}
/// In `variantlib`, this is called [`VariantProviderConfig`].
#[derive(Debug, Clone, Eq, Ord, PartialOrd, PartialEq, Hash, serde::Deserialize)]
pub struct VariantProviderConfig {
pub provider: String,
pub configs: Vec<VariantKeyConfig>,
}
/// In `variantlib`, this is called [`VariantMeta`].
#[derive(Debug, Clone, Eq, Ord, PartialOrd, PartialEq, Hash, serde::Deserialize)]
pub struct VariantMeta {
provider: String,
key: String,
value: String,
}
/// In `variantlib`, this is called [`VariantDescription`].
#[derive(Default, Debug, Clone, Eq, Ord, PartialOrd, PartialEq, Hash)]
pub struct VariantDescription {
data: Vec<VariantMeta>,
}
impl VariantDescription {
pub fn tag(&self) -> VariantTag {
const HASH_LENGTH: usize = 8;
let mut hasher = Shake128::default();
for key_value in &self.data {
hasher.update(key_value.provider.as_bytes());
hasher.update(" :: ".as_bytes());
hasher.update(key_value.key.as_bytes());
hasher.update(" :: ".as_bytes());
hasher.update(key_value.value.as_bytes());
}
let hash = hasher.finalize_boxed(HASH_LENGTH / 2);
let hex_digest = hex::encode(hash);
VariantTag::new(hex_digest)
}
}
#[derive(Default, Debug, Clone)]
pub struct VariantSet {
map: Arc<HashMap<VariantTag, VariantPriority>>,
}
impl VariantSet {
pub fn new(data: &[VariantDescription]) -> Result<Self, VariantError> {
let mut map = HashMap::new();
for (index, description) in data.iter().enumerate() {
map.insert(description.tag(), VariantPriority::try_from(index)?);
}
Ok(Self { map: Arc::new(map) })
}
pub fn compatibility(&self, variant: &VariantTag) -> VariantCompatibility {
self.map
.get(variant)
.map(|&priority| VariantCompatibility::Compatible(priority))
.unwrap_or(VariantCompatibility::Incompatible)
}
}
#[derive(Debug, Eq, PartialEq, Copy, Clone)]
pub enum VariantCompatibility {
Incompatible,
Compatible(VariantPriority),
}
impl Ord for VariantCompatibility {
fn cmp(&self, other: &Self) -> cmp::Ordering {
match (self, other) {
(Self::Compatible(p_self), Self::Compatible(p_other)) => p_self.cmp(p_other),
(Self::Incompatible, Self::Compatible(_)) => cmp::Ordering::Less,
(Self::Compatible(_), Self::Incompatible) => cmp::Ordering::Greater,
(Self::Incompatible, Self::Incompatible) => cmp::Ordering::Equal,
}
}
}
impl PartialOrd for VariantCompatibility {
fn partial_cmp(&self, other: &Self) -> Option<cmp::Ordering> {
Some(Self::cmp(self, other))
}
}
impl VariantCompatibility {
/// Returns `true` if the tag is compatible.
pub fn is_compatible(&self) -> bool {
matches!(self, Self::Compatible(_))
}
}
/// The priority of a variant.
///
/// A wrapper around [`NonZeroU32`]. Higher values indicate higher priority.
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
pub struct VariantPriority(NonZeroU32);
impl TryFrom<usize> for VariantPriority {
type Error = VariantError;
/// Create a [`VariantPriority`] from a `usize`, where higher `usize` values are given higher
/// priority.
fn try_from(priority: usize) -> Result<Self, VariantError> {
match u32::try_from(priority).and_then(|priority| NonZeroU32::try_from(1 + priority)) {
Ok(priority) => Ok(Self(priority)),
Err(err) => Err(VariantError::InvalidPriority(priority, err)),
}
}
}
/// Return all possible combinations based on the given [`VariantProviderConfig`] entities.
pub fn get_combinations(data: Vec<VariantProviderConfig>) -> Vec<VariantDescription> {
if data.is_empty() {
return Vec::new();
}
let transformed_data: Vec<Vec<VariantMeta>> = data
.into_iter()
.flat_map(|provider_cnf| {
provider_cnf.configs.into_iter().map(move |key_config| {
key_config
.values
.into_iter()
.map(|value| VariantMeta {
provider: provider_cnf.provider.clone(),
key: key_config.key.clone(),
value,
})
.collect::<Vec<VariantMeta>>()
})
})
.collect();
let mut combinations = Vec::new();
for r in (1..=transformed_data.len()).rev() {
for combo in transformed_data.iter().combinations(r) {
for vmetas in combo.iter().copied().multi_cartesian_product() {
let mut sorted_vmetas: Vec<VariantMeta> = vmetas.into_iter().cloned().collect();
sorted_vmetas
.sort_by(|a, b| a.provider.cmp(&b.provider).then_with(|| a.key.cmp(&b.key)));
let desc = VariantDescription {
data: sorted_vmetas,
};
combinations.push(desc);
}
}
}
combinations
}

View file

@ -55,6 +55,7 @@ uv-tool = { workspace = true }
uv-torch = { workspace = true }
uv-trampoline-builder = { workspace = true }
uv-types = { workspace = true }
uv-variants = { workspace = true }
uv-version = { workspace = true }
uv-virtualenv = { workspace = true }
uv-warnings = { workspace = true }

View file

@ -557,7 +557,7 @@ async fn build_package(
let entries = client
.fetch_all(index_locations.flat_indexes().map(Index::url))
.await?;
FlatIndex::from_entries(entries, None, &hasher, build_options)
FlatIndex::from_entries(entries, None, None, &hasher, build_options)
};
// Initialize any shared state.

View file

@ -436,13 +436,16 @@ pub(crate) async fn pip_compile(
// Combine the `--no-binary` and `--no-build` flags from the requirements files.
let build_options = build_options.combine(no_binary, no_build);
// STOPSHIP(charlie): Resolve all the variants, with caching.
let variants = None;
// Resolve the flat indexes from `--find-links`.
let flat_index = {
let client = FlatIndexClient::new(client.cached_client(), client.connectivity(), &cache);
let entries = client
.fetch_all(index_locations.flat_indexes().map(Index::url))
.await?;
FlatIndex::from_entries(entries, tags.as_deref(), &hasher, &build_options)
FlatIndex::from_entries(entries, tags.as_deref(), variants, &hasher, &build_options)
};
// Determine whether to enable build isolation.
@ -514,6 +517,7 @@ pub(crate) async fn pip_compile(
&Reinstall::None,
&upgrade,
tags.as_deref(),
variants,
resolver_env.clone(),
python_requirement,
interpreter.markers(),

View file

@ -10,8 +10,9 @@ use tracing::{Level, debug, enabled};
use uv_cache::Cache;
use uv_client::{BaseClientBuilder, FlatIndexClient, RegistryClientBuilder};
use uv_configuration::{
BuildOptions, Concurrency, ConfigSettings, Constraints, DryRun, ExtrasSpecification,
HashCheckingMode, IndexStrategy, PreviewMode, Reinstall, SourceStrategy, Upgrade,
BuildOptions, BuildOutput, Concurrency, ConfigSettings, Constraints, DryRun,
ExtrasSpecification, HashCheckingMode, IndexStrategy, PreviewMode, Reinstall, SourceStrategy,
Upgrade,
};
use uv_configuration::{KeyringProviderType, TargetTriple};
use uv_dispatch::{BuildDispatch, SharedState};
@ -24,7 +25,7 @@ use uv_install_wheel::LinkMode;
use uv_installer::{SatisfiesResult, SitePackages};
use uv_normalize::GroupName;
use uv_pep508::PackageName;
use uv_pypi_types::Conflicts;
use uv_pypi_types::{Conflicts, VariantProviderBackend};
use uv_python::{
EnvironmentPreference, Prefix, PythonEnvironment, PythonInstallation, PythonPreference,
PythonRequest, PythonVersion, Target,
@ -36,6 +37,7 @@ use uv_resolver::{
};
use uv_torch::{TorchMode, TorchStrategy};
use uv_types::{BuildIsolation, HashStrategy};
use uv_variants::{get_combinations, VariantSet};
use uv_warnings::warn_user;
use uv_workspace::WorkspaceCache;
@ -57,6 +59,7 @@ pub(crate) async fn pip_install(
constraints_from_workspace: Vec<Requirement>,
overrides_from_workspace: Vec<Requirement>,
build_constraints_from_workspace: Vec<Requirement>,
variants: Vec<VariantProviderBackend>,
extras: &ExtrasSpecification,
groups: BTreeMap<PathBuf, Vec<GroupName>>,
resolution_mode: ResolutionMode,
@ -368,7 +371,7 @@ pub(crate) async fn pip_install(
let entries = client
.fetch_all(index_locations.flat_indexes().map(Index::url))
.await?;
FlatIndex::from_entries(entries, Some(&tags), &hasher, &build_options)
FlatIndex::from_entries(entries, Some(&tags), None, &hasher, &build_options)
};
// Determine whether to enable build isolation.
@ -443,6 +446,25 @@ pub(crate) async fn pip_install(
// When resolving, don't take any external preferences into account.
let preferences = Vec::default();
// Compute the set of available variants.
let variants = {
// Run all providers.
let mut configs = vec![];
for provider in variants {
let builder = build_dispatch
.setup_variants(provider, BuildOutput::Debug)
.await?;
let config = builder.build().await?;
configs.push(config);
}
// Compute all combinations of the variants.
let combinations = get_combinations(configs);
VariantSet::new(&combinations)?
};
let options = OptionsBuilder::new()
.resolution_mode(resolution_mode)
.prerelease_mode(prerelease_mode)
@ -469,6 +491,7 @@ pub(crate) async fn pip_install(
&reinstall,
&upgrade,
Some(&tags),
Some(&variants),
ResolverEnvironment::specific(marker_env.clone()),
python_requirement,
interpreter.markers(),

View file

@ -42,6 +42,7 @@ use uv_resolver::{
};
use uv_tool::InstalledTools;
use uv_types::{BuildContext, HashStrategy, InFlight, InstalledPackagesProvider};
use uv_variants::VariantSet;
use uv_warnings::warn_user;
use crate::commands::pip::loggers::{DefaultInstallLogger, InstallLogger, ResolveLogger};
@ -118,6 +119,7 @@ pub(crate) async fn resolve<InstalledPackages: InstalledPackagesProvider>(
reinstall: &Reinstall,
upgrade: &Upgrade,
tags: Option<&Tags>,
variants: Option<&VariantSet>,
resolver_env: ResolverEnvironment,
python_requirement: PythonRequirement,
current_environment: &MarkerEnvironment,
@ -348,6 +350,7 @@ pub(crate) async fn resolve<InstalledPackages: InstalledPackagesProvider>(
current_environment,
conflicts,
tags,
variants,
flat_index,
index,
hasher,

View file

@ -295,13 +295,16 @@ pub(crate) async fn pip_sync(
// Combine the `--no-binary` and `--no-build` flags from the requirements files.
let build_options = build_options.combine(no_binary, no_build);
// TODO(charlie): Compute available variants.
let variants = None;
// Resolve the flat indexes from `--find-links`.
let flat_index = {
let client = FlatIndexClient::new(client.cached_client(), client.connectivity(), &cache);
let entries = client
.fetch_all(index_locations.flat_indexes().map(Index::url))
.await?;
FlatIndex::from_entries(entries, Some(&tags), &hasher, &build_options)
FlatIndex::from_entries(entries, Some(&tags), variants, &hasher, &build_options)
};
// Determine whether to enable build isolation.
@ -404,6 +407,7 @@ pub(crate) async fn pip_sync(
&reinstall,
&upgrade,
Some(&tags),
variants,
ResolverEnvironment::specific(marker_env.clone()),
python_requirement,
interpreter.markers(),

View file

@ -412,7 +412,13 @@ pub(crate) async fn add(
.map(Index::url),
)
.await?;
FlatIndex::from_entries(entries, None, &hasher, &settings.resolver.build_options)
FlatIndex::from_entries(
entries,
None,
None,
&hasher,
&settings.resolver.build_options,
)
};
// Create a build dispatch.

View file

@ -649,7 +649,7 @@ async fn do_lock(
let entries = client
.fetch_all(index_locations.flat_indexes().map(Index::url))
.await?;
FlatIndex::from_entries(entries, None, &hasher, build_options)
FlatIndex::from_entries(entries, None, None, &hasher, build_options)
};
// Create a build dispatch.
@ -822,6 +822,7 @@ async fn do_lock(
&Reinstall::default(),
upgrade,
None,
None,
resolver_env,
python_requirement,
interpreter.markers(),

View file

@ -1874,13 +1874,16 @@ pub(crate) async fn resolve_environment(
None => vec![],
};
// TODO(charlie): Compute available variants.
let variants = None;
// Resolve the flat indexes from `--find-links`.
let flat_index = {
let client = FlatIndexClient::new(client.cached_client(), client.connectivity(), cache);
let entries = client
.fetch_all(index_locations.flat_indexes().map(Index::url))
.await?;
FlatIndex::from_entries(entries, Some(tags), &hasher, build_options)
FlatIndex::from_entries(entries, Some(tags), None, &hasher, build_options)
};
let workspace_cache = WorkspaceCache::default();
@ -1924,6 +1927,7 @@ pub(crate) async fn resolve_environment(
&reinstall,
&upgrade,
Some(tags),
variants,
ResolverEnvironment::specific(marker_env),
python_requirement,
interpreter.markers(),
@ -2011,13 +2015,16 @@ pub(crate) async fn sync_environment(
let hasher = HashStrategy::default();
let workspace_cache = WorkspaceCache::default();
// TODO(charlie): Compute available variants.
let variants = None;
// Resolve the flat indexes from `--find-links`.
let flat_index = {
let client = FlatIndexClient::new(client.cached_client(), client.connectivity(), cache);
let entries = client
.fetch_all(index_locations.flat_indexes().map(Index::url))
.await?;
FlatIndex::from_entries(entries, Some(tags), &hasher, build_options)
FlatIndex::from_entries(entries, Some(tags), variants, &hasher, build_options)
};
// Create a build dispatch.
@ -2231,13 +2238,16 @@ pub(crate) async fn update_environment(
let tags = venv.interpreter().tags()?;
let python_requirement = PythonRequirement::from_interpreter(interpreter);
// TODO(charlie): Compute available variants.
let variants = None;
// Resolve the flat indexes from `--find-links`.
let flat_index = {
let client = FlatIndexClient::new(client.cached_client(), client.connectivity(), cache);
let entries = client
.fetch_all(index_locations.flat_indexes().map(Index::url))
.await?;
FlatIndex::from_entries(entries, Some(tags), &hasher, build_options)
FlatIndex::from_entries(entries, Some(tags), variants, &hasher, build_options)
};
// Create a build dispatch.
@ -2263,6 +2273,9 @@ pub(crate) async fn update_environment(
preview,
);
// TODO(charlie): Compute available variants.
let variants = None;
// Resolve the requirements.
let resolution = match pip::operations::resolve(
requirements,
@ -2279,6 +2292,7 @@ pub(crate) async fn update_environment(
reinstall,
upgrade,
Some(tags),
variants,
ResolverEnvironment::specific(marker_env.clone()),
python_requirement,
venv.interpreter().markers(),

View file

@ -715,13 +715,16 @@ pub(super) async fn do_sync(
// Extract the hashes from the lockfile.
let hasher = HashStrategy::from_resolution(&resolution, HashCheckingMode::Verify)?;
// TODO(charlie): Compute available variants.
let variants = None;
// Resolve the flat indexes from `--find-links`.
let flat_index = {
let client = FlatIndexClient::new(client.cached_client(), client.connectivity(), cache);
let entries = client
.fetch_all(index_locations.flat_indexes().map(Index::url))
.await?;
FlatIndex::from_entries(entries, Some(tags), &hasher, build_options)
FlatIndex::from_entries(entries, Some(tags), variants, &hasher, build_options)
};
// Create a build dispatch.

View file

@ -326,6 +326,7 @@ async fn venv_impl(
FlatIndex::from_entries(
entries,
Some(tags),
None,
&HashStrategy::None,
&BuildOptions::new(NoBinary::None, NoBuild::All),
)

View file

@ -721,6 +721,7 @@ async fn run(mut cli: Cli) -> Result<ExitStatus> {
args.constraints_from_workspace,
args.overrides_from_workspace,
args.build_constraints_from_workspace,
args.variants_from_workspace,
&args.settings.extras,
groups,
args.settings.resolution,

View file

@ -31,7 +31,7 @@ use uv_distribution_types::{DependencyMetadata, Index, IndexLocations, IndexUrl,
use uv_install_wheel::LinkMode;
use uv_normalize::{PackageName, PipGroupName};
use uv_pep508::{ExtraName, MarkerTree, RequirementOrigin};
use uv_pypi_types::SupportedEnvironments;
use uv_pypi_types::{SupportedEnvironments, VariantProviderBackend};
use uv_python::{Prefix, PythonDownloads, PythonPreference, PythonVersion, Target};
use uv_redacted::DisplaySafeUrl;
use uv_resolver::{
@ -2110,6 +2110,7 @@ pub(crate) struct PipInstallSettings {
pub(crate) overrides: Vec<PathBuf>,
pub(crate) build_constraints: Vec<PathBuf>,
pub(crate) dry_run: DryRun,
pub(crate) variants_from_workspace: Vec<VariantProviderBackend>,
pub(crate) constraints_from_workspace: Vec<Requirement>,
pub(crate) overrides_from_workspace: Vec<Requirement>,
pub(crate) build_constraints_from_workspace: Vec<Requirement>,
@ -2162,6 +2163,12 @@ impl PipInstallSettings {
compat_args: _,
} = args;
let variants_from_workspace = if let Some(configuration) = &filesystem {
configuration.variant.clone().unwrap_or_default()
} else {
Vec::new()
};
let constraints_from_workspace = if let Some(configuration) = &filesystem {
configuration
.constraint_dependencies
@ -2221,6 +2228,7 @@ impl PipInstallSettings {
.filter_map(Maybe::into_option)
.collect(),
dry_run: DryRun::from_args(dry_run),
variants_from_workspace,
constraints_from_workspace,
overrides_from_workspace,
build_constraints_from_workspace,

View file

@ -98,3 +98,7 @@ files = [
[tool.uv]
managed = false
[[tool.uv.variant]]
backend = "provider_fictional_hw.plugin:build"
requires = ["provider_fictional_hw"]