Reject already-installed wheels that don't match the target platform (#15484)

## Summary

We've received several requests to validate that installed wheels match
the current Python platform. This isn't _super_ common, since it
requires that your platform changes in some meaningful way (e.g., you
switch from x86 to ARM), though in practice, it sounds like it _can_
happen in HPC environments. This seems like a good thing to do
regardless, so we now validate that the tags (as recoded in `WHEEL`) are
consistent with the current platform during installs.

Closes https://github.com/astral-sh/uv/issues/15035.
This commit is contained in:
Charlie Marsh 2025-08-25 09:20:54 -04:00 committed by GitHub
parent 563adb8904
commit be4d5b72aa
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
25 changed files with 751 additions and 63 deletions

View file

@ -0,0 +1,487 @@
use std::str::FromStr;
use memchr::memchr;
use thiserror::Error;
use uv_platform_tags::{
AbiTag, LanguageTag, ParseAbiTagError, ParseLanguageTagError, ParsePlatformTagError,
PlatformTag, Tags,
};
use crate::splitter::MemchrSplitter;
use crate::wheel_tag::{WheelTag, WheelTagLarge, WheelTagSmall};
/// The expanded wheel tags as stored in a `WHEEL` file.
///
/// For example, if a wheel filename included `py2.py3-none-any`, the `WHEEL` file would include:
/// ```
/// Tag: py2-none-any
/// Tag: py3-none-any
/// ```
///
/// This type stores those expanded tags.
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd)]
pub struct ExpandedTags(smallvec::SmallVec<[WheelTag; 1]>);
impl ExpandedTags {
/// Parse a list of expanded wheel tags (e.g., `py3-none-any`).
pub fn parse<'a>(tags: impl IntoIterator<Item = &'a str>) -> Result<Self, ExpandedTagError> {
let tags = tags
.into_iter()
.map(parse_expanded_tag)
.collect::<Result<_, _>>()?;
Ok(Self(tags))
}
/// Returns `true` if the wheel is compatible with the given tags.
pub fn is_compatible(&self, compatible_tags: &Tags) -> bool {
self.0.iter().any(|tag| {
compatible_tags.is_compatible(tag.python_tags(), tag.abi_tags(), tag.platform_tags())
})
}
}
#[derive(Error, Debug)]
pub enum ExpandedTagError {
#[error("The wheel tag \"{0}\" is missing a language tag")]
MissingLanguageTag(String),
#[error("The wheel tag \"{0}\" is missing an ABI tag")]
MissingAbiTag(String),
#[error("The wheel tag \"{0}\" is missing a platform tag")]
MissingPlatformTag(String),
#[error("The wheel tag \"{0}\" contains too many segments")]
ExtraSegment(String),
#[error("The wheel tag \"{0}\" contains an invalid language tag")]
InvalidLanguageTag(String, #[source] ParseLanguageTagError),
#[error("The wheel tag \"{0}\" contains an invalid ABI tag")]
InvalidAbiTag(String, #[source] ParseAbiTagError),
#[error("The wheel tag \"{0}\" contains an invalid platform tag")]
InvalidPlatformTag(String, #[source] ParsePlatformTagError),
}
/// Parse an expanded (i.e., simplified) wheel tag, e.g. `py3-none-any`.
///
/// Unlike parsing tags in a wheel filename, each tag in this case is expected to contain exactly
/// three segments separated by `-`: a language tag, an ABI tag, and a platform tag; however,
/// empirically, some build backends do emit multipart tags (like `cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64`),
/// so we allow those too.
fn parse_expanded_tag(tag: &str) -> Result<WheelTag, ExpandedTagError> {
let mut splitter = memchr::Memchr::new(b'-', tag.as_bytes());
if tag.is_empty() {
return Err(ExpandedTagError::MissingLanguageTag(tag.to_string()));
}
let Some(python_tag_index) = splitter.next() else {
return Err(ExpandedTagError::MissingAbiTag(tag.to_string()));
};
let Some(abi_tag_index) = splitter.next() else {
return Err(ExpandedTagError::MissingPlatformTag(tag.to_string()));
};
if splitter.next().is_some() {
return Err(ExpandedTagError::ExtraSegment(tag.to_string()));
}
let python_tag = &tag[..python_tag_index];
let abi_tag = &tag[python_tag_index + 1..abi_tag_index];
let platform_tag = &tag[abi_tag_index + 1..];
let is_small = memchr(b'.', tag.as_bytes()).is_none();
if let Some(small) = is_small
.then(|| {
Some(WheelTagSmall {
python_tag: LanguageTag::from_str(python_tag).ok()?,
abi_tag: AbiTag::from_str(abi_tag).ok()?,
platform_tag: PlatformTag::from_str(platform_tag).ok()?,
})
})
.flatten()
{
Ok(WheelTag::Small { small })
} else {
Ok(WheelTag::Large {
large: Box::new(WheelTagLarge {
build_tag: None,
python_tag: MemchrSplitter::split(python_tag, b'.')
.map(LanguageTag::from_str)
.filter_map(Result::ok)
.collect(),
abi_tag: MemchrSplitter::split(abi_tag, b'.')
.map(AbiTag::from_str)
.filter_map(Result::ok)
.collect(),
platform_tag: MemchrSplitter::split(platform_tag, b'.')
.map(PlatformTag::from_str)
.filter_map(Result::ok)
.collect(),
repr: tag.into(),
}),
})
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_parse_simple_expanded_tag() {
let tags = ExpandedTags::parse(vec!["py3-none-any"]).unwrap();
insta::assert_debug_snapshot!(tags, @r"
ExpandedTags(
[
Small {
small: WheelTagSmall {
python_tag: Python {
major: 3,
minor: None,
},
abi_tag: None,
platform_tag: Any,
},
},
],
)
");
}
#[test]
fn test_parse_multiple_expanded_tags() {
let tags = ExpandedTags::parse(vec![
"py2-none-any",
"py3-none-any",
"cp39-cp39-linux_x86_64",
])
.unwrap();
insta::assert_debug_snapshot!(tags, @r"
ExpandedTags(
[
Small {
small: WheelTagSmall {
python_tag: Python {
major: 2,
minor: None,
},
abi_tag: None,
platform_tag: Any,
},
},
Small {
small: WheelTagSmall {
python_tag: Python {
major: 3,
minor: None,
},
abi_tag: None,
platform_tag: Any,
},
},
Small {
small: WheelTagSmall {
python_tag: CPython {
python_version: (
3,
9,
),
},
abi_tag: CPython {
gil_disabled: false,
python_version: (
3,
9,
),
},
platform_tag: Linux {
arch: X86_64,
},
},
},
],
)
");
}
#[test]
fn test_parse_complex_platform_tag() {
let tags = ExpandedTags::parse(vec![
"cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64",
])
.unwrap();
insta::assert_debug_snapshot!(tags, @r#"
ExpandedTags(
[
Large {
large: WheelTagLarge {
build_tag: None,
python_tag: [
CPython {
python_version: (
3,
12,
),
},
],
abi_tag: [
CPython {
gil_disabled: false,
python_version: (
3,
12,
),
},
],
platform_tag: [
Manylinux {
major: 2,
minor: 17,
arch: X86_64,
},
Manylinux2014 {
arch: X86_64,
},
],
repr: "cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64",
},
},
],
)
"#);
}
#[test]
fn test_parse_unknown_expanded_tag() {
let tags = ExpandedTags::parse(vec!["py3-foo-any"]).unwrap();
insta::assert_debug_snapshot!(tags, @r#"
ExpandedTags(
[
Large {
large: WheelTagLarge {
build_tag: None,
python_tag: [
Python {
major: 3,
minor: None,
},
],
abi_tag: [],
platform_tag: [
Any,
],
repr: "py3-foo-any",
},
},
],
)
"#);
}
#[test]
fn test_parse_expanded_tag_with_dots() {
let tags = ExpandedTags::parse(vec!["py2.py3-none-any"]).unwrap();
insta::assert_debug_snapshot!(tags, @r#"
ExpandedTags(
[
Large {
large: WheelTagLarge {
build_tag: None,
python_tag: [
Python {
major: 2,
minor: None,
},
Python {
major: 3,
minor: None,
},
],
abi_tag: [
None,
],
platform_tag: [
Any,
],
repr: "py2.py3-none-any",
},
},
],
)
"#);
}
#[test]
fn test_error_missing_language_tag() {
let err = ExpandedTags::parse(vec![""]).unwrap_err();
insta::assert_debug_snapshot!(err, @r#"
MissingLanguageTag(
"",
)
"#);
}
#[test]
fn test_error_missing_abi_tag() {
let err = ExpandedTags::parse(vec!["py3"]).unwrap_err();
insta::assert_debug_snapshot!(err, @r#"
MissingAbiTag(
"py3",
)
"#);
}
#[test]
fn test_error_missing_platform_tag() {
let err = ExpandedTags::parse(vec!["py3-none"]).unwrap_err();
insta::assert_debug_snapshot!(err, @r#"
MissingPlatformTag(
"py3-none",
)
"#);
}
#[test]
fn test_error_extra_segment() {
let err = ExpandedTags::parse(vec!["py3-none-any-extra"]).unwrap_err();
insta::assert_debug_snapshot!(err, @r#"
ExtraSegment(
"py3-none-any-extra",
)
"#);
}
#[test]
fn test_parse_expanded_tag_single_segment() {
let result = parse_expanded_tag("py3-none-any");
assert!(result.is_ok());
let tag = result.unwrap();
insta::assert_debug_snapshot!(tag, @r"
Small {
small: WheelTagSmall {
python_tag: Python {
major: 3,
minor: None,
},
abi_tag: None,
platform_tag: Any,
},
}
");
}
#[test]
fn test_parse_expanded_tag_multi_segment() {
let result = parse_expanded_tag("cp39.cp310-cp39.cp310-linux_x86_64.linux_i686");
assert!(result.is_ok());
let tag = result.unwrap();
insta::assert_debug_snapshot!(tag, @r#"
Large {
large: WheelTagLarge {
build_tag: None,
python_tag: [
CPython {
python_version: (
3,
9,
),
},
CPython {
python_version: (
3,
10,
),
},
],
abi_tag: [
CPython {
gil_disabled: false,
python_version: (
3,
9,
),
},
CPython {
gil_disabled: false,
python_version: (
3,
10,
),
},
],
platform_tag: [
Linux {
arch: X86_64,
},
Linux {
arch: X86,
},
],
repr: "cp39.cp310-cp39.cp310-linux_x86_64.linux_i686",
},
}
"#);
}
#[test]
fn test_parse_expanded_tag_empty() {
let result = parse_expanded_tag("");
assert!(result.is_err());
insta::assert_debug_snapshot!(result.unwrap_err(), @r#"
MissingLanguageTag(
"",
)
"#);
}
#[test]
fn test_parse_expanded_tag_one_segment() {
let result = parse_expanded_tag("python");
assert!(result.is_err());
insta::assert_debug_snapshot!(result.unwrap_err(), @r#"
MissingAbiTag(
"python",
)
"#);
}
#[test]
fn test_parse_expanded_tag_two_segments() {
let result = parse_expanded_tag("py3-none");
assert!(result.is_err());
insta::assert_debug_snapshot!(result.unwrap_err(), @r#"
MissingPlatformTag(
"py3-none",
)
"#);
}
#[test]
fn test_parse_expanded_tag_four_segments() {
let result = parse_expanded_tag("py3-none-any-extra");
assert!(result.is_err());
insta::assert_debug_snapshot!(result.unwrap_err(), @r#"
ExtraSegment(
"py3-none-any-extra",
)
"#);
}
#[test]
fn test_expanded_tags_ordering() {
let tags1 = ExpandedTags::parse(vec!["py3-none-any"]).unwrap();
let tags2 = ExpandedTags::parse(vec!["py3-none-any"]).unwrap();
let tags3 = ExpandedTags::parse(vec!["py2-none-any"]).unwrap();
assert_eq!(tags1, tags2);
assert_ne!(tags1, tags3);
}
}

View file

@ -1,16 +1,19 @@
use std::fmt::{Display, Formatter};
use std::str::FromStr;
use uv_normalize::PackageName;
use uv_pep440::Version;
pub use build_tag::{BuildTag, BuildTagError};
pub use egg::{EggInfoFilename, EggInfoFilenameError};
pub use expanded_tags::{ExpandedTagError, ExpandedTags};
pub use extension::{DistExtension, ExtensionError, SourceDistExtension};
pub use source_dist::{SourceDistFilename, SourceDistFilenameError};
pub use wheel::{WheelFilename, WheelFilenameError};
mod build_tag;
mod egg;
mod expanded_tags;
mod extension;
mod source_dist;
mod splitter;

View file

@ -25,6 +25,7 @@ uv-git-types = { workspace = true }
uv-normalize = { workspace = true }
uv-pep440 = { workspace = true }
uv-pep508 = { workspace = true }
uv-install-wheel = { workspace = true }
uv-platform-tags = { workspace = true }
uv-pypi-types = { workspace = true }
uv-redacted = { workspace = true }

View file

@ -9,8 +9,9 @@ use tracing::warn;
use url::Url;
use uv_cache_info::CacheInfo;
use uv_distribution_filename::EggInfoFilename;
use uv_distribution_filename::{EggInfoFilename, ExpandedTags};
use uv_fs::Simplified;
use uv_install_wheel::WheelFile;
use uv_normalize::PackageName;
use uv_pep440::Version;
use uv_pypi_types::{DirectUrl, MetadataError};
@ -40,6 +41,12 @@ pub enum InstalledDistError {
#[error(transparent)]
PackageNameParse(#[from] uv_normalize::InvalidNameError),
#[error(transparent)]
WheelFileParse(#[from] uv_install_wheel::Error),
#[error(transparent)]
ExpandedTagParse(#[from] uv_distribution_filename::ExpandedTagError),
#[error("Invalid .egg-link path: `{}`", _0.user_display())]
InvalidEggLinkPath(PathBuf),
@ -414,6 +421,30 @@ impl InstalledDist {
}
}
/// Return the supported wheel tags for the distribution from the `WHEEL` file, if available.
pub fn read_tags(&self) -> Result<Option<ExpandedTags>, InstalledDistError> {
// TODO(charlie): Cache this result.
let path = match self {
Self::Registry(InstalledRegistryDist { path, .. }) => path,
Self::Url(InstalledDirectUrlDist { path, .. }) => path,
Self::EggInfoFile(_) => return Ok(None),
Self::EggInfoDirectory(_) => return Ok(None),
Self::LegacyEditable(_) => return Ok(None),
};
// Read the `WHEEL` file.
let contents = fs_err::read_to_string(path.join("WHEEL"))?;
let wheel_file = WheelFile::parse(&contents)?;
let Some(tags) = wheel_file.tags() else {
return Ok(None);
};
// Parse the tags.
let tags = ExpandedTags::parse(tags.iter().map(String::as_str))?;
Ok(Some(tags))
}
/// Return true if the distribution is editable.
pub fn is_editable(&self) -> bool {
matches!(

View file

@ -1,20 +1,23 @@
use crate::Error;
use std::ffi::{OsStr, OsString};
use std::path::{Path, PathBuf};
use std::sync::{Arc, Mutex};
use std::time::SystemTime;
use fs_err as fs;
use fs_err::DirEntry;
use reflink_copy as reflink;
use rustc_hash::FxHashMap;
use serde::{Deserialize, Serialize};
use std::ffi::{OsStr, OsString};
use std::path::{Path, PathBuf};
use std::sync::{Arc, Mutex};
use std::time::SystemTime;
use tempfile::tempdir_in;
use tracing::{debug, instrument, trace};
use walkdir::WalkDir;
use uv_distribution_filename::WheelFilename;
use uv_fs::Simplified;
use uv_preview::{Preview, PreviewFeatures};
use uv_warnings::{warn_user, warn_user_once};
use walkdir::WalkDir;
use crate::Error;
#[allow(clippy::struct_field_names)]
#[derive(Debug, Default)]

View file

@ -949,12 +949,9 @@ mod test {
use assert_fs::prelude::*;
use indoc::{formatdoc, indoc};
use crate::Error;
use crate::wheel::format_shebang;
use super::{
RecordEntry, Script, WheelFile, get_script_executable, parse_email_message_file,
read_record_file, write_installer_metadata,
Error, RecordEntry, Script, WheelFile, format_shebang, get_script_executable,
parse_email_message_file, read_record_file, write_installer_metadata,
};
#[test]

View file

@ -125,6 +125,7 @@ impl<'a> Planner<'a> {
dist.name(),
installed,
&source,
tags,
config_settings,
config_settings_package,
extra_build_requires,

View file

@ -14,6 +14,7 @@ use uv_distribution_types::{
};
use uv_git_types::GitOid;
use uv_normalize::PackageName;
use uv_platform_tags::Tags;
use uv_pypi_types::{DirInfo, DirectUrl, VcsInfo, VcsKind};
#[derive(Debug, Copy, Clone)]
@ -32,6 +33,7 @@ impl RequirementSatisfaction {
name: &PackageName,
distribution: &InstalledDist,
source: &RequirementSource,
tags: &Tags,
config_settings: &ConfigSettings,
config_settings_package: &PackageConfigSettings,
extra_build_requires: &ExtraBuildRequires,
@ -55,7 +57,7 @@ impl RequirementSatisfaction {
);
dist_build_info != &build_info
}) {
debug!("Build info mismatch for {name}: {distribution:?}");
debug!("Build info mismatch for {name}: {distribution}");
return Self::OutOfDate;
}
@ -63,10 +65,9 @@ impl RequirementSatisfaction {
match source {
// If the requirement comes from a registry, check by name.
RequirementSource::Registry { specifier, .. } => {
if specifier.contains(distribution.version()) {
return Self::Satisfied;
if !specifier.contains(distribution.version()) {
return Self::Mismatch;
}
Self::Mismatch
}
RequirementSource::Url {
// We use the location since `direct_url.json` also stores this URL, e.g.
@ -130,9 +131,6 @@ impl RequirementSatisfaction {
}
}
}
// Otherwise, assume the requirement is up-to-date.
Self::Satisfied
}
RequirementSource::Git {
url: _,
@ -188,8 +186,6 @@ impl RequirementSatisfaction {
);
return Self::OutOfDate;
}
Self::Satisfied
}
RequirementSource::Path {
install_path: requested_path,
@ -244,8 +240,6 @@ impl RequirementSatisfaction {
return Self::CacheInvalid;
}
}
Self::Satisfied
}
RequirementSource::Directory {
install_path: requested_path,
@ -314,9 +308,26 @@ impl RequirementSatisfaction {
}
}
Self::Satisfied
// If the distribution isn't compatible with the current platform, it is a mismatch.
if let Ok(Some(wheel_tags)) = distribution.read_tags() {
if !wheel_tags.is_compatible(tags) {
debug!("Platform tags mismatch for {name}: {distribution}");
return Self::Mismatch;
}
}
}
}
// If the distribution isn't compatible with the current platform, it is a mismatch.
if let Ok(Some(wheel_tags)) = distribution.read_tags() {
if !wheel_tags.is_compatible(tags) {
debug!("Platform tags mismatch for {name}: {distribution}");
return Self::Mismatch;
}
}
// Otherwise, assume the requirement is up-to-date.
Self::Satisfied
}
}

View file

@ -16,6 +16,7 @@ use uv_fs::Simplified;
use uv_normalize::PackageName;
use uv_pep440::{Version, VersionSpecifiers};
use uv_pep508::VersionOrUrl;
use uv_platform_tags::Tags;
use uv_pypi_types::{ResolverMarkerEnvironment, VerbatimParsedUrl};
use uv_python::{Interpreter, PythonEnvironment};
use uv_redacted::DisplaySafeUrl;
@ -194,6 +195,7 @@ impl SitePackages {
pub fn diagnostics(
&self,
markers: &ResolverMarkerEnvironment,
tags: &Tags,
) -> Result<Vec<SitePackagesDiagnostic>> {
let mut diagnostics = Vec::new();
@ -242,6 +244,25 @@ impl SitePackages {
}
}
// Verify that the package is compatible with the current tags.
match distribution.read_tags() {
Ok(Some(wheel_tags)) => {
if !wheel_tags.is_compatible(tags) {
// TODO(charlie): Show the expanded tag hint, that explains _why_ it doesn't match.
diagnostics.push(SitePackagesDiagnostic::IncompatiblePlatform {
package: package.clone(),
});
}
}
Ok(None) => {}
Err(_) => {
diagnostics.push(SitePackagesDiagnostic::TagsUnavailable {
package: package.clone(),
path: distribution.install_path().to_owned(),
});
}
}
// Verify that the dependencies are installed.
for dependency in &metadata.requires_dist {
if !dependency.evaluate_markers(markers, &[]) {
@ -259,12 +280,10 @@ impl SitePackages {
}
[installed] => {
match &dependency.version_or_url {
None | Some(uv_pep508::VersionOrUrl::Url(_)) => {
None | Some(VersionOrUrl::Url(_)) => {
// Nothing to do (accept any installed version).
}
Some(uv_pep508::VersionOrUrl::VersionSpecifier(
version_specifier,
)) => {
Some(VersionOrUrl::VersionSpecifier(version_specifier)) => {
// The installed version doesn't satisfy the requirement.
if !version_specifier.contains(installed.version()) {
diagnostics.push(
@ -296,6 +315,7 @@ impl SitePackages {
constraints: &[NameRequirementSpecification],
overrides: &[UnresolvedRequirementSpecification],
markers: &ResolverMarkerEnvironment,
tags: &Tags,
config_settings: &ConfigSettings,
config_settings_package: &PackageConfigSettings,
extra_build_requires: &ExtraBuildRequires,
@ -385,6 +405,7 @@ impl SitePackages {
constraints.iter().map(|constraint| &constraint.requirement),
overrides.iter().map(Cow::as_ref),
markers,
tags,
config_settings,
config_settings_package,
extra_build_requires,
@ -399,6 +420,7 @@ impl SitePackages {
constraints: impl Iterator<Item = &'a Requirement>,
overrides: impl Iterator<Item = &'a Requirement>,
markers: &ResolverMarkerEnvironment,
tags: &Tags,
config_settings: &ConfigSettings,
config_settings_package: &PackageConfigSettings,
extra_build_requires: &ExtraBuildRequires,
@ -460,6 +482,7 @@ impl SitePackages {
name,
distribution,
&requirement.source,
tags,
config_settings,
config_settings_package,
extra_build_requires,
@ -481,6 +504,7 @@ impl SitePackages {
name,
distribution,
&constraint.source,
tags,
config_settings,
config_settings_package,
extra_build_requires,
@ -566,6 +590,12 @@ pub enum SitePackagesDiagnostic {
/// The path to the package.
path: PathBuf,
},
TagsUnavailable {
/// The package that is missing tags.
package: PackageName,
/// The path to the package.
path: PathBuf,
},
IncompatiblePythonVersion {
/// The package that requires a different version of Python.
package: PackageName,
@ -574,6 +604,10 @@ pub enum SitePackagesDiagnostic {
/// The version of Python that is required.
requires_python: VersionSpecifiers,
},
IncompatiblePlatform {
/// The package that was built for a different platform.
package: PackageName,
},
MissingDependency {
/// The package that is missing a dependency.
package: PackageName,
@ -604,6 +638,10 @@ impl Diagnostic for SitePackagesDiagnostic {
"The package `{package}` is broken or incomplete (unable to read `METADATA`). Consider recreating the virtualenv, or removing the package directory at: {}.",
path.display(),
),
Self::TagsUnavailable { package, path } => format!(
"The package `{package}` is broken or incomplete (unable to read `WHEEL` file). Consider recreating the virtualenv, or removing the package directory at: {}.",
path.display(),
),
Self::IncompatiblePythonVersion {
package,
version,
@ -611,6 +649,9 @@ impl Diagnostic for SitePackagesDiagnostic {
} => format!(
"The package `{package}` requires Python {requires_python}, but `{version}` is installed"
),
Self::IncompatiblePlatform { package } => {
format!("The package `{package}` was built for a different platform")
}
Self::MissingDependency {
package,
requirement,
@ -640,7 +681,9 @@ impl Diagnostic for SitePackagesDiagnostic {
fn includes(&self, name: &PackageName) -> bool {
match self {
Self::MetadataUnavailable { package, .. } => name == package,
Self::TagsUnavailable { package, .. } => name == package,
Self::IncompatiblePythonVersion { package, .. } => name == package,
Self::IncompatiblePlatform { package } => name == package,
Self::MissingDependency { package, .. } => name == package,
Self::IncompatibleDependency {
package,

View file

@ -11,6 +11,7 @@ use uv_distribution_types::{CompatibleDist, IncompatibleDist, IncompatibleSource
use uv_distribution_types::{DistributionMetadata, IncompatibleWheel, Name, PrioritizedDist};
use uv_normalize::PackageName;
use uv_pep440::Version;
use uv_platform_tags::Tags;
use uv_types::InstalledPackagesProvider;
use crate::preferences::{Entry, PreferenceSource, Preferences};
@ -84,6 +85,7 @@ impl CandidateSelector {
exclusions: &'a Exclusions,
index: Option<&'a IndexUrl>,
env: &ResolverEnvironment,
tags: Option<&'a Tags>,
) -> Option<Candidate<'a>> {
let reinstall = exclusions.reinstall(package_name);
let upgrade = exclusions.upgrade(package_name);
@ -106,6 +108,7 @@ impl CandidateSelector {
reinstall,
index,
env,
tags,
) {
trace!("Using preference {} {}", preferred.name, preferred.version);
return Some(preferred);
@ -116,7 +119,7 @@ impl CandidateSelector {
let installed = if reinstall {
None
} else {
Self::get_installed(package_name, range, installed_packages)
Self::get_installed(package_name, range, installed_packages, tags)
};
// If we're not upgrading, we should prefer the already-installed distribution.
@ -176,6 +179,7 @@ impl CandidateSelector {
reinstall: bool,
index: Option<&'a IndexUrl>,
env: &ResolverEnvironment,
tags: Option<&'a Tags>,
) -> Option<Candidate<'a>> {
let preferences = preferences.get(package_name);
@ -231,6 +235,7 @@ impl CandidateSelector {
installed_packages,
reinstall,
env,
tags,
)
}
@ -244,6 +249,7 @@ impl CandidateSelector {
installed_packages: &'a InstalledPackages,
reinstall: bool,
env: &ResolverEnvironment,
tags: Option<&Tags>,
) -> Option<Candidate<'a>> {
for (version, source) in preferences {
// Respect the version range for this requirement.
@ -263,6 +269,17 @@ impl CandidateSelector {
"Found installed version of {dist} that satisfies preference in {range}"
);
// Verify that the installed distribution is compatible with the environment.
if tags.is_some_and(|tags| {
let Ok(Some(wheel_tags)) = dist.read_tags() else {
return false;
};
!wheel_tags.is_compatible(tags)
}) {
debug!("Platform tags mismatch for installed {dist}");
continue;
}
return Some(Candidate {
name: package_name,
version,
@ -351,6 +368,7 @@ impl CandidateSelector {
package_name: &'a PackageName,
range: &Range<Version>,
installed_packages: &'a InstalledPackages,
tags: Option<&'a Tags>,
) -> Option<Candidate<'a>> {
let installed_dists = installed_packages.get_packages(package_name);
match installed_dists.as_slice() {
@ -363,7 +381,17 @@ impl CandidateSelector {
return None;
}
debug!("Found installed version of {dist} that satisfies {range}");
// Verify that the installed distribution is compatible with the environment.
if tags.is_some_and(|tags| {
let Ok(Some(wheel_tags)) = dist.read_tags() else {
return false;
};
!wheel_tags.is_compatible(tags)
}) {
debug!("Platform tags mismatch for installed {dist}");
return None;
}
return Some(Candidate {
name: package_name,
version,

View file

@ -5270,18 +5270,14 @@ impl WheelTagHint {
fn python_tags<'a>(
filenames: impl Iterator<Item = &'a WheelFilename> + 'a,
) -> impl Iterator<Item = LanguageTag> + 'a {
filenames
.flat_map(uv_distribution_filename::WheelFilename::python_tags)
.copied()
filenames.flat_map(WheelFilename::python_tags).copied()
}
/// Returns an iterator over the compatible Python tags of the available wheels.
fn abi_tags<'a>(
filenames: impl Iterator<Item = &'a WheelFilename> + 'a,
) -> impl Iterator<Item = AbiTag> + 'a {
filenames
.flat_map(uv_distribution_filename::WheelFilename::abi_tags)
.copied()
filenames.flat_map(WheelFilename::abi_tags).copied()
}
/// Returns the set of platform tags for the distribution that are ABI-compatible with the given

View file

@ -1269,6 +1269,7 @@ impl<InstalledPackages: InstalledPackagesProvider> ResolverState<InstalledPackag
&self.exclusions,
index,
env,
self.tags.as_ref(),
) else {
// Short circuit: we couldn't find _any_ versions for a package.
return Ok(None);
@ -1485,6 +1486,7 @@ impl<InstalledPackages: InstalledPackagesProvider> ResolverState<InstalledPackag
&self.exclusions,
index,
env,
self.tags.as_ref(),
) else {
return Ok(None);
};
@ -2447,6 +2449,7 @@ impl<InstalledPackages: InstalledPackagesProvider> ResolverState<InstalledPackag
&self.exclusions,
None,
&env,
self.tags.as_ref(),
) else {
return Ok(None);
};

View file

@ -14,7 +14,7 @@ use uv_python::{
};
use crate::commands::pip::operations::report_target_environment;
use crate::commands::pip::resolution_markers;
use crate::commands::pip::{resolution_markers, resolution_tags};
use crate::commands::{ExitStatus, elapsed};
use crate::printer::Printer;
@ -57,12 +57,15 @@ pub(crate) fn pip_check(
.dimmed()
)?;
// Determine the markers to use for resolution.
// Determine the markers and tags to use for resolution.
let markers = resolution_markers(python_version, python_platform, environment.interpreter());
let tags = resolution_tags(python_version, python_platform, environment.interpreter())?;
// Run the diagnostics.
let diagnostics: Vec<SitePackagesDiagnostic> =
site_packages.diagnostics(&markers)?.into_iter().collect();
let diagnostics: Vec<SitePackagesDiagnostic> = site_packages
.diagnostics(&markers, &tags)?
.into_iter()
.collect();
if diagnostics.is_empty() {
writeln!(

View file

@ -87,11 +87,12 @@ pub(crate) fn pip_freeze(
// Validate that the environment is consistent.
if strict {
// Determine the markers to use for resolution.
// Determine the markers and tags to use for resolution.
let markers = environment.interpreter().resolver_marker_environment();
let tags = environment.interpreter().tags()?;
for entry in site_packages {
for diagnostic in entry.diagnostics(&markers)? {
for diagnostic in entry.diagnostics(&markers, tags)? {
writeln!(
printer.stderr(),
"{}{} {}",

View file

@ -265,13 +265,18 @@ pub(crate) async fn pip_install(
})
.ok();
// Determine the markers to use for the resolution.
// Determine the markers and tags to use for the resolution.
let interpreter = environment.interpreter();
let marker_env = resolution_markers(
python_version.as_ref(),
python_platform.as_ref(),
interpreter,
);
let tags = resolution_tags(
python_version.as_ref(),
python_platform.as_ref(),
interpreter,
)?;
// Determine the set of installed packages.
let site_packages = SitePackages::from_environment(&environment)?;
@ -291,6 +296,7 @@ pub(crate) async fn pip_install(
&constraints,
&overrides,
&marker_env,
&tags,
config_settings,
config_settings_package,
&extra_build_requires,
@ -329,13 +335,6 @@ pub(crate) async fn pip_install(
PythonRequirement::from_interpreter(interpreter)
};
// Determine the tags to use for the resolution.
let tags = resolution_tags(
python_version.as_ref(),
python_platform.as_ref(),
interpreter,
)?;
// Collect the set of required hashes.
let hasher = if let Some(hash_checking) = hash_checking {
HashStrategy::from_requirements(
@ -636,7 +635,7 @@ pub(crate) async fn pip_install(
// Notify the user of any environment diagnostics.
if strict && !dry_run.enabled() {
operations::diagnose_environment(&resolution, &environment, &marker_env, printer)?;
operations::diagnose_environment(&resolution, &environment, &marker_env, &tags, printer)?;
}
Ok(ExitStatus::Success)

View file

@ -272,10 +272,11 @@ pub(crate) async fn pip_list(
// Validate that the environment is consistent.
if strict {
// Determine the markers to use for resolution.
// Determine the markers and tags to use for resolution.
let markers = environment.interpreter().resolver_marker_environment();
let tags = environment.interpreter().tags()?;
for diagnostic in site_packages.diagnostics(&markers)? {
for diagnostic in site_packages.diagnostics(&markers, tags)? {
writeln!(
printer.stderr(),
"{}{} {}",

View file

@ -975,10 +975,11 @@ pub(crate) fn diagnose_environment(
resolution: &Resolution,
venv: &PythonEnvironment,
markers: &ResolverMarkerEnvironment,
tags: &Tags,
printer: Printer,
) -> Result<(), Error> {
let site_packages = SitePackages::from_environment(venv)?;
for diagnostic in site_packages.diagnostics(markers)? {
for diagnostic in site_packages.diagnostics(markers, tags)? {
// Only surface diagnostics that are "relevant" to the current resolution.
if resolution
.distributions()

View file

@ -57,8 +57,9 @@ pub(crate) fn pip_show(
// Build the installed index.
let site_packages = SitePackages::from_environment(&environment)?;
// Determine the markers to use for resolution.
// Determine the markers and tags to use for resolution.
let markers = environment.interpreter().resolver_marker_environment();
let tags = environment.interpreter().tags()?;
// Sort and deduplicate the packages, which are keyed by name.
packages.sort_unstable();
@ -201,7 +202,7 @@ pub(crate) fn pip_show(
// Validate that the environment is consistent.
if strict {
for diagnostic in site_packages.diagnostics(&markers)? {
for diagnostic in site_packages.diagnostics(&markers, tags)? {
writeln!(
printer.stderr(),
"{}{} {}",

View file

@ -566,7 +566,7 @@ pub(crate) async fn pip_sync(
// Notify the user of any environment diagnostics.
if strict && !dry_run.enabled() {
operations::diagnose_environment(&resolution, &environment, &marker_env, printer)?;
operations::diagnose_environment(&resolution, &environment, &marker_env, &tags, printer)?;
}
Ok(ExitStatus::Success)

View file

@ -80,8 +80,9 @@ pub(crate) async fn pip_tree(
packages
};
// Determine the markers to use for the resolution.
// Determine the markers and tags to use for the resolution.
let markers = environment.interpreter().resolver_marker_environment();
let tags = environment.interpreter().tags()?;
// Determine the latest version for each package.
let latest = if outdated && !packages.is_empty() {
@ -178,7 +179,7 @@ pub(crate) async fn pip_tree(
// Validate that the environment is consistent.
if strict {
for diagnostic in site_packages.diagnostics(&markers)? {
for diagnostic in site_packages.diagnostics(&markers, tags)? {
writeln!(
printer.stderr(),
"{}{} {}",

View file

@ -2266,9 +2266,10 @@ pub(crate) async fn update_environment(
..
} = spec;
// Determine markers to use for resolution.
// Determine markers and tags to use for resolution.
let interpreter = venv.interpreter();
let marker_env = venv.interpreter().resolver_marker_environment();
let tags = venv.interpreter().tags()?;
// Check if the current environment satisfies the requirements
let site_packages = SitePackages::from_environment(&venv)?;
@ -2282,6 +2283,7 @@ pub(crate) async fn update_environment(
&constraints,
&overrides,
&marker_env,
tags,
config_setting,
config_settings_package,
&extra_build_requires,

View file

@ -1354,6 +1354,12 @@ fn can_skip_ephemeral(
return false;
}
// Determine the markers and tags to use for resolution.
let markers = interpreter.resolver_marker_environment();
let Ok(tags) = interpreter.tags() else {
return false;
};
// Lower the extra build dependencies, if any.
let extra_build_requires =
LoweredExtraBuildDependencies::from_non_lowered(extra_build_dependencies.clone())
@ -1363,7 +1369,8 @@ fn can_skip_ephemeral(
&spec.requirements,
&spec.constraints,
&spec.overrides,
&interpreter.resolver_marker_environment(),
&markers,
tags,
config_setting,
config_settings_package,
&extra_build_requires,

View file

@ -965,6 +965,10 @@ async fn get_or_create_environment(
)
.into_inner();
// Determine the markers and tags to use for the resolution.
let markers = interpreter.resolver_marker_environment();
let tags = interpreter.tags()?;
// Check if the installed packages meet the requirements.
let site_packages = SitePackages::from_environment(&environment)?;
if matches!(
@ -972,7 +976,8 @@ async fn get_or_create_environment(
requirements.iter(),
constraints.iter(),
overrides.iter(),
&interpreter.resolver_marker_environment(),
&markers,
tags,
config_setting,
config_settings_package,
&extra_build_requires,

View file

@ -8745,7 +8745,7 @@ fn no_extension() {
/// Regression test for: <https://github.com/astral-sh/uv/pull/6646>
#[test]
fn switch_platform() -> Result<()> {
fn switch_python_version() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_txt = context.temp_dir.child("requirements.txt");
@ -12718,3 +12718,65 @@ fn transitive_dependency_config_settings_invalidation() -> Result<()> {
Ok(())
}
#[test]
fn switch_platform() {
let context = TestContext::new("3.12");
uv_snapshot!(context.pip_install()
.arg("cffi")
.arg("--python-platform")
.arg("windows"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 2 packages in [TIME]
Prepared 2 packages in [TIME]
Installed 2 packages in [TIME]
+ cffi==1.16.0
+ pycparser==2.21
"
);
uv_snapshot!(context.pip_check().arg("--python-platform").arg("windows"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Checked 2 packages in [TIME]
All installed packages are compatible
"
);
uv_snapshot!(context.pip_check().arg("--python-platform").arg("linux"), @r"
success: false
exit_code: 1
----- stdout -----
----- stderr -----
Checked 2 packages in [TIME]
Found 1 incompatibility
The package `cffi` was built for a different platform
"
);
uv_snapshot!(context.pip_install()
.arg("cffi")
.arg("--python-platform")
.arg("linux"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 2 packages in [TIME]
Prepared 1 package in [TIME]
Uninstalled 1 package in [TIME]
Installed 1 package in [TIME]
~ cffi==1.16.0
"
);
}