mirror of
https://github.com/astral-sh/uv.git
synced 2025-08-04 19:08:04 +00:00
Move architecture and operating system probing to Python (#2381)
The architecture of uv does not necessarily match that of the python
interpreter (#2326). In cross compiling/testing scenarios the operating
system can also mismatch. To solve this, we move arch and os detection
to python, vendoring the relevant pypa/packaging code, preventing
mismatches between what the python interpreter was compiled for and what
uv was compiled for.
To make the scripts more manageable, they are now a directory in a
tempdir and we run them with `python -m` . I've simplified the
pypa/packaging code since we're still building the tags in rust. A
`Platform` is now instantiated by querying the python interpreter for
its platform. The pypa/packaging files are copied verbatim for easier
updates except a `lru_cache()` python 3.7 backport.
Error handling is done by a `"result": "success|error"` field that allow
passing error details to rust:
```console
$ uv venv --no-cache
× Can't use Python at `/home/konsti/projects/uv/.venv/bin/python3`
╰─▶ Unknown operation system `linux`
```
I've used the [maturin sysconfig
collection](855f6d2cb1/sysconfig
)
as reference. I'm unsure how to test these changes across the wide
variety of platforms.
Fixes #2326
This commit is contained in:
parent
e0ac5b4e84
commit
7964bfbb2b
50 changed files with 1603 additions and 1473 deletions
|
@ -22,7 +22,7 @@ name = "install_wheel_rs"
|
|||
[dependencies]
|
||||
distribution-filename = { path = "../distribution-filename" }
|
||||
pep440_rs = { path = "../pep440-rs" }
|
||||
platform-host = { path = "../platform-host" }
|
||||
platform-tags = { path = "../platform-tags" }
|
||||
uv-normalize = { path = "../uv-normalize" }
|
||||
uv-fs = { path = "../uv-fs" }
|
||||
pypi-types = { path = "../pypi-types" }
|
||||
|
@ -36,7 +36,6 @@ mailparse = { workspace = true }
|
|||
once_cell = { workspace = true }
|
||||
pathdiff = { workspace = true }
|
||||
platform-info = { workspace = true }
|
||||
plist = { workspace = true }
|
||||
reflink-copy = { workspace = true }
|
||||
regex = { workspace = true }
|
||||
rustc-hash = { workspace = true }
|
||||
|
|
|
@ -9,7 +9,7 @@ use thiserror::Error;
|
|||
use zip::result::ZipError;
|
||||
|
||||
use pep440_rs::Version;
|
||||
use platform_host::{Arch, Os};
|
||||
use platform_tags::{Arch, Os};
|
||||
use pypi_types::Scheme;
|
||||
pub use uninstall::{uninstall_wheel, Uninstall};
|
||||
use uv_fs::Simplified;
|
||||
|
|
|
@ -1,25 +0,0 @@
|
|||
[package]
|
||||
name = "platform-host"
|
||||
version = "0.0.1"
|
||||
edition = { workspace = true }
|
||||
rust-version = { workspace = true }
|
||||
homepage = { workspace = true }
|
||||
documentation = { workspace = true }
|
||||
repository = { workspace = true }
|
||||
authors = { workspace = true }
|
||||
license = { workspace = true }
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
[dependencies]
|
||||
fs-err = { workspace = true }
|
||||
goblin = { workspace = true }
|
||||
once_cell = { workspace = true }
|
||||
platform-info = { workspace = true }
|
||||
plist = { workspace = true }
|
||||
regex = { workspace = true }
|
||||
serde = { workspace = true, features = ["derive"] }
|
||||
target-lexicon = { workspace = true }
|
||||
thiserror = { workspace = true }
|
||||
tracing = { workspace = true }
|
|
@ -1,204 +0,0 @@
|
|||
//! Abstractions for understanding the current platform (operating system and architecture).
|
||||
|
||||
use std::{fmt, io};
|
||||
|
||||
use platform_info::{PlatformInfo, PlatformInfoAPI, UNameAPI};
|
||||
use thiserror::Error;
|
||||
|
||||
use crate::linux::detect_linux_libc;
|
||||
use crate::mac_os::get_mac_os_version;
|
||||
|
||||
mod linux;
|
||||
mod mac_os;
|
||||
|
||||
#[derive(Error, Debug)]
|
||||
pub enum PlatformError {
|
||||
#[error(transparent)]
|
||||
IOError(#[from] io::Error),
|
||||
#[error("Failed to detect the operating system version: {0}")]
|
||||
OsVersionDetectionError(String),
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
pub struct Platform {
|
||||
os: Os,
|
||||
arch: Arch,
|
||||
}
|
||||
|
||||
impl Platform {
|
||||
/// Create a new platform from the given operating system and architecture.
|
||||
pub fn new(os: Os, arch: Arch) -> Self {
|
||||
Self { os, arch }
|
||||
}
|
||||
|
||||
/// Create a new platform from the current operating system and architecture.
|
||||
pub fn current() -> Result<Self, PlatformError> {
|
||||
let os = Os::current()?;
|
||||
let arch = Arch::current()?;
|
||||
Ok(Self { os, arch })
|
||||
}
|
||||
|
||||
/// Return the platform's operating system.
|
||||
pub fn os(&self) -> &Os {
|
||||
&self.os
|
||||
}
|
||||
|
||||
/// Return the platform's architecture.
|
||||
pub fn arch(&self) -> Arch {
|
||||
self.arch
|
||||
}
|
||||
}
|
||||
|
||||
/// All supported operating systems.
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
pub enum Os {
|
||||
Manylinux { major: u16, minor: u16 },
|
||||
Musllinux { major: u16, minor: u16 },
|
||||
Windows,
|
||||
Macos { major: u16, minor: u16 },
|
||||
FreeBsd { release: String },
|
||||
NetBsd { release: String },
|
||||
OpenBsd { release: String },
|
||||
Dragonfly { release: String },
|
||||
Illumos { release: String, arch: String },
|
||||
Haiku { release: String },
|
||||
}
|
||||
|
||||
impl Os {
|
||||
pub fn current() -> Result<Self, PlatformError> {
|
||||
let target_triple = target_lexicon::HOST;
|
||||
|
||||
let os = match target_triple.operating_system {
|
||||
target_lexicon::OperatingSystem::Linux => detect_linux_libc()?,
|
||||
target_lexicon::OperatingSystem::Windows => Self::Windows,
|
||||
target_lexicon::OperatingSystem::MacOSX { major, minor, .. } => {
|
||||
Self::Macos { major, minor }
|
||||
}
|
||||
target_lexicon::OperatingSystem::Darwin => {
|
||||
let (major, minor) = get_mac_os_version()?;
|
||||
Self::Macos { major, minor }
|
||||
}
|
||||
target_lexicon::OperatingSystem::Netbsd => Self::NetBsd {
|
||||
release: Self::platform_info()?
|
||||
.release()
|
||||
.to_string_lossy()
|
||||
.to_string(),
|
||||
},
|
||||
target_lexicon::OperatingSystem::Freebsd => Self::FreeBsd {
|
||||
release: Self::platform_info()?
|
||||
.release()
|
||||
.to_string_lossy()
|
||||
.to_string(),
|
||||
},
|
||||
target_lexicon::OperatingSystem::Openbsd => Self::OpenBsd {
|
||||
release: Self::platform_info()?
|
||||
.release()
|
||||
.to_string_lossy()
|
||||
.to_string(),
|
||||
},
|
||||
target_lexicon::OperatingSystem::Dragonfly => Self::Dragonfly {
|
||||
release: Self::platform_info()?
|
||||
.release()
|
||||
.to_string_lossy()
|
||||
.to_string(),
|
||||
},
|
||||
target_lexicon::OperatingSystem::Illumos => {
|
||||
let platform_info = Self::platform_info()?;
|
||||
Self::Illumos {
|
||||
release: platform_info.release().to_string_lossy().to_string(),
|
||||
arch: platform_info.machine().to_string_lossy().to_string(),
|
||||
}
|
||||
}
|
||||
target_lexicon::OperatingSystem::Haiku => Self::Haiku {
|
||||
release: Self::platform_info()?
|
||||
.release()
|
||||
.to_string_lossy()
|
||||
.to_string(),
|
||||
},
|
||||
unsupported => {
|
||||
return Err(PlatformError::OsVersionDetectionError(format!(
|
||||
"The operating system {unsupported:?} is not supported"
|
||||
)));
|
||||
}
|
||||
};
|
||||
Ok(os)
|
||||
}
|
||||
|
||||
fn platform_info() -> Result<PlatformInfo, PlatformError> {
|
||||
PlatformInfo::new().map_err(|err| PlatformError::OsVersionDetectionError(err.to_string()))
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for Os {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
match *self {
|
||||
Self::Manylinux { .. } => write!(f, "Manylinux"),
|
||||
Self::Musllinux { .. } => write!(f, "Musllinux"),
|
||||
Self::Windows => write!(f, "Windows"),
|
||||
Self::Macos { .. } => write!(f, "MacOS"),
|
||||
Self::FreeBsd { .. } => write!(f, "FreeBSD"),
|
||||
Self::NetBsd { .. } => write!(f, "NetBSD"),
|
||||
Self::OpenBsd { .. } => write!(f, "OpenBSD"),
|
||||
Self::Dragonfly { .. } => write!(f, "DragonFly"),
|
||||
Self::Illumos { .. } => write!(f, "Illumos"),
|
||||
Self::Haiku { .. } => write!(f, "Haiku"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// All supported CPU architectures
|
||||
#[derive(Debug, Clone, Copy, Eq, PartialEq)]
|
||||
pub enum Arch {
|
||||
Aarch64,
|
||||
Armv7L,
|
||||
Powerpc64Le,
|
||||
Powerpc64,
|
||||
X86,
|
||||
X86_64,
|
||||
S390X,
|
||||
}
|
||||
|
||||
impl fmt::Display for Arch {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
match *self {
|
||||
Self::Aarch64 => write!(f, "aarch64"),
|
||||
Self::Armv7L => write!(f, "armv7l"),
|
||||
Self::Powerpc64Le => write!(f, "ppc64le"),
|
||||
Self::Powerpc64 => write!(f, "ppc64"),
|
||||
Self::X86 => write!(f, "i686"),
|
||||
Self::X86_64 => write!(f, "x86_64"),
|
||||
Self::S390X => write!(f, "s390x"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Arch {
|
||||
pub fn current() -> Result<Self, PlatformError> {
|
||||
let target_triple = target_lexicon::HOST;
|
||||
let arch = match target_triple.architecture {
|
||||
target_lexicon::Architecture::X86_64 => Self::X86_64,
|
||||
target_lexicon::Architecture::X86_32(_) => Self::X86,
|
||||
target_lexicon::Architecture::Arm(_) => Self::Armv7L,
|
||||
target_lexicon::Architecture::Aarch64(_) => Self::Aarch64,
|
||||
target_lexicon::Architecture::Powerpc64 => Self::Powerpc64,
|
||||
target_lexicon::Architecture::Powerpc64le => Self::Powerpc64Le,
|
||||
target_lexicon::Architecture::S390x => Self::S390X,
|
||||
unsupported => {
|
||||
return Err(PlatformError::OsVersionDetectionError(format!(
|
||||
"The architecture {unsupported} is not supported"
|
||||
)));
|
||||
}
|
||||
};
|
||||
Ok(arch)
|
||||
}
|
||||
|
||||
/// Returns the oldest possible Manylinux tag for this architecture
|
||||
pub fn get_minimum_manylinux_minor(&self) -> u16 {
|
||||
match self {
|
||||
// manylinux 2014
|
||||
Self::Aarch64 | Self::Armv7L | Self::Powerpc64 | Self::Powerpc64Le | Self::S390X => 17,
|
||||
// manylinux 1
|
||||
Self::X86 | Self::X86_64 => 5,
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,293 +0,0 @@
|
|||
//! Taken from `glibc_version` (<https://github.com/delta-incubator/glibc-version-rs>),
|
||||
//! which used the Apache 2.0 license (but not the MIT license)
|
||||
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::process::{Command, Stdio};
|
||||
|
||||
use fs_err as fs;
|
||||
use goblin::elf::Elf;
|
||||
use once_cell::sync::Lazy;
|
||||
use regex::Regex;
|
||||
|
||||
use crate::{Os, PlatformError};
|
||||
|
||||
pub(crate) fn detect_linux_libc() -> Result<Os, PlatformError> {
|
||||
let ld_path = find_ld_path()?;
|
||||
|
||||
tracing::trace!("trying to detect musl version by running `{ld_path:?}`");
|
||||
match detect_musl_version(&ld_path) {
|
||||
Ok(os) => return Ok(os),
|
||||
Err(err) => tracing::trace!("tried to find musl version, but failed: {err}"),
|
||||
}
|
||||
tracing::trace!("trying to detect libc version from possible symlink at {ld_path:?}");
|
||||
match detect_linux_libc_from_ld_symlink(&ld_path) {
|
||||
Ok(os) => return Ok(os),
|
||||
Err(err) => {
|
||||
tracing::trace!("tried to find libc version from ld symlink, but failed: {err}");
|
||||
}
|
||||
}
|
||||
tracing::trace!("trying to run `ldd --version` to detect glibc version");
|
||||
match detect_glibc_version_from_ldd() {
|
||||
Ok(os_version) => return Ok(os_version),
|
||||
Err(err) => {
|
||||
tracing::trace!("tried to find glibc version from `ldd --version`, but failed: {err}");
|
||||
}
|
||||
}
|
||||
let msg = "\
|
||||
could not detect either glibc version nor musl libc version, \
|
||||
at least one of which is required\
|
||||
";
|
||||
Err(PlatformError::OsVersionDetectionError(msg.to_string()))
|
||||
}
|
||||
|
||||
// glibc version is taken from std/sys/unix/os.rs
|
||||
fn detect_glibc_version_from_ldd() -> Result<Os, PlatformError> {
|
||||
let output = Command::new("ldd")
|
||||
.args(["--version"])
|
||||
.output()
|
||||
.map_err(|err| {
|
||||
PlatformError::OsVersionDetectionError(format!(
|
||||
"failed to execute `ldd --version` for glibc: {err}"
|
||||
))
|
||||
})?;
|
||||
match glibc_ldd_output_to_version("stdout", &output.stdout) {
|
||||
Ok(os) => return Ok(os),
|
||||
Err(err) => {
|
||||
tracing::trace!("failed to parse glibc version from stdout of `ldd --version`: {err}");
|
||||
}
|
||||
}
|
||||
match glibc_ldd_output_to_version("stderr", &output.stderr) {
|
||||
Ok(os) => return Ok(os),
|
||||
Err(err) => {
|
||||
tracing::trace!("failed to parse glibc version from stderr of `ldd --version`: {err}");
|
||||
}
|
||||
}
|
||||
Err(PlatformError::OsVersionDetectionError(
|
||||
"could not find glibc version from stdout or stderr of `ldd --version`".to_string(),
|
||||
))
|
||||
}
|
||||
|
||||
fn glibc_ldd_output_to_version(kind: &str, output: &[u8]) -> Result<Os, PlatformError> {
|
||||
static RE: Lazy<Regex> = Lazy::new(|| Regex::new(r"ldd \(.+\) ([0-9]+\.[0-9]+)").unwrap());
|
||||
|
||||
let output = std::str::from_utf8(output).map_err(|err| {
|
||||
PlatformError::OsVersionDetectionError(format!(
|
||||
"failed to parse `ldd --version` {kind} as UTF-8: {err}"
|
||||
))
|
||||
})?;
|
||||
tracing::trace!("{kind} output from `ldd --version`: {output:?}");
|
||||
let Some((_, [version])) = RE.captures(output).map(|c| c.extract()) else {
|
||||
return Err(PlatformError::OsVersionDetectionError(
|
||||
"failed to detect glibc version on {kind}".to_string(),
|
||||
));
|
||||
};
|
||||
let Some(os) = parse_glibc_version(version) else {
|
||||
return Err(PlatformError::OsVersionDetectionError(format!(
|
||||
"failed to parse glibc version on {kind} from: {version:?}",
|
||||
)));
|
||||
};
|
||||
Ok(os)
|
||||
}
|
||||
|
||||
// Returns Some((major, minor)) if the string is a valid "x.y" version,
|
||||
// ignoring any extra dot-separated parts. Otherwise return None.
|
||||
fn parse_glibc_version(version: &str) -> Option<Os> {
|
||||
let mut parsed_ints = version.split('.').map(str::parse).fuse();
|
||||
match (parsed_ints.next(), parsed_ints.next()) {
|
||||
(Some(Ok(major)), Some(Ok(minor))) => Some(Os::Manylinux { major, minor }),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
fn detect_linux_libc_from_ld_symlink(path: &Path) -> Result<Os, PlatformError> {
|
||||
static RE: Lazy<Regex> =
|
||||
Lazy::new(|| Regex::new(r"^ld-([0-9]{1,3})\.([0-9]{1,3})\.so$").unwrap());
|
||||
|
||||
let target = fs::read_link(path).map_err(|err| {
|
||||
PlatformError::OsVersionDetectionError(format!(
|
||||
"failed to read {path:?} as a symbolic link: {err}",
|
||||
))
|
||||
})?;
|
||||
let Some(filename) = target.file_name() else {
|
||||
return Err(PlatformError::OsVersionDetectionError(format!(
|
||||
"failed to get base name of symbolic link path {target:?}",
|
||||
)));
|
||||
};
|
||||
let filename = filename.to_string_lossy();
|
||||
let Some((_, [major, minor])) = RE.captures(&filename).map(|c| c.extract()) else {
|
||||
return Err(PlatformError::OsVersionDetectionError(format!(
|
||||
"failed to find major/minor version in dynamic linker symlink \
|
||||
filename {filename:?} from its path {target:?} via regex {regex}",
|
||||
regex = RE.as_str(),
|
||||
)));
|
||||
};
|
||||
// OK since we are guaranteed to have between 1 and 3 ASCII digits and the
|
||||
// maximum possible value, 999, fits into a u16.
|
||||
let major = major.parse().expect("valid major version");
|
||||
let minor = minor.parse().expect("valid minor version");
|
||||
Ok(Os::Manylinux { major, minor })
|
||||
}
|
||||
|
||||
/// Read the musl version from libc library's output. Taken from maturin.
|
||||
///
|
||||
/// The libc library should output something like this to `stderr`:
|
||||
///
|
||||
/// ```text
|
||||
/// musl libc (`x86_64`)
|
||||
/// Version 1.2.2
|
||||
/// Dynamic Program Loader
|
||||
/// ```
|
||||
fn detect_musl_version(ld_path: impl AsRef<Path>) -> Result<Os, PlatformError> {
|
||||
let ld_path = ld_path.as_ref();
|
||||
let output = Command::new(ld_path)
|
||||
.stdout(Stdio::null())
|
||||
.stderr(Stdio::piped())
|
||||
.output()
|
||||
.map_err(|err| {
|
||||
PlatformError::OsVersionDetectionError(format!(
|
||||
"failed to execute `{ld_path:?}` for musl: {err}"
|
||||
))
|
||||
})?;
|
||||
match musl_ld_output_to_version("stdout", &output.stdout) {
|
||||
Ok(os) => return Ok(os),
|
||||
Err(err) => {
|
||||
tracing::trace!("failed to parse musl version from stdout of `{ld_path:?}`: {err}");
|
||||
}
|
||||
}
|
||||
match musl_ld_output_to_version("stderr", &output.stderr) {
|
||||
Ok(os) => return Ok(os),
|
||||
Err(err) => {
|
||||
tracing::trace!("failed to parse musl version from stderr of `{ld_path:?}`: {err}");
|
||||
}
|
||||
}
|
||||
Err(PlatformError::OsVersionDetectionError(format!(
|
||||
"could not find musl version from stdout or stderr of `{ld_path:?}`",
|
||||
)))
|
||||
}
|
||||
|
||||
fn musl_ld_output_to_version(kind: &str, output: &[u8]) -> Result<Os, PlatformError> {
|
||||
static RE: Lazy<Regex> =
|
||||
Lazy::new(|| Regex::new(r"Version ([0-9]{1,4})\.([0-9]{1,4})").unwrap());
|
||||
|
||||
let output = std::str::from_utf8(output).map_err(|err| {
|
||||
PlatformError::OsVersionDetectionError(format!("failed to parse {kind} as UTF-8: {err}"))
|
||||
})?;
|
||||
tracing::trace!("{kind} output from `ld`: {output:?}");
|
||||
let Some((_, [major, minor])) = RE.captures(output).map(|c| c.extract()) else {
|
||||
return Err(PlatformError::OsVersionDetectionError(format!(
|
||||
"could not find musl version from on {kind} via regex: {}",
|
||||
RE.as_str(),
|
||||
)));
|
||||
};
|
||||
// OK since we are guaranteed to have between 1 and 4 ASCII digits and the
|
||||
// maximum possible value, 9999, fits into a u16.
|
||||
let major = major.parse().expect("valid major version");
|
||||
let minor = minor.parse().expect("valid minor version");
|
||||
Ok(Os::Musllinux { major, minor })
|
||||
}
|
||||
|
||||
/// Find musl ld path from executable's ELF header.
|
||||
fn find_ld_path() -> Result<PathBuf, PlatformError> {
|
||||
// At first, we just looked for /bin/ls. But on some Linux distros, /bin/ls
|
||||
// is a shell script that just calls /usr/bin/ls. So we switched to looking
|
||||
// at /bin/sh. But apparently in some environments, /bin/sh is itself just
|
||||
// a shell script that calls /bin/dash. So... We just try a few different
|
||||
// paths. In most cases, /bin/sh should work.
|
||||
//
|
||||
// See: https://github.com/astral-sh/uv/pull/1493
|
||||
// See: https://github.com/astral-sh/uv/issues/1810
|
||||
let attempts = ["/bin/sh", "/bin/dash", "/bin/ls"];
|
||||
for path in attempts {
|
||||
match find_ld_path_at(path) {
|
||||
Ok(ld_path) => return Ok(ld_path),
|
||||
Err(err) => {
|
||||
tracing::trace!("attempt to find `ld` path at {path} failed: {err}");
|
||||
}
|
||||
}
|
||||
}
|
||||
Err(PlatformError::OsVersionDetectionError(format!(
|
||||
"Couldn't parse ELF interpreter path out of any of the following paths: {joined}",
|
||||
joined = attempts.join(", "),
|
||||
)))
|
||||
}
|
||||
|
||||
/// Attempt to find the path to the `ld` executable by
|
||||
/// ELF parsing the given path. If this fails for any
|
||||
/// reason, then an error is returned.
|
||||
fn find_ld_path_at(path: impl AsRef<Path>) -> Result<PathBuf, PlatformError> {
|
||||
let path = path.as_ref();
|
||||
let buffer = fs::read(path)?;
|
||||
let elf = Elf::parse(&buffer).map_err(|err| {
|
||||
PlatformError::OsVersionDetectionError(format!(
|
||||
"Couldn't parse {path} as an ELF file: {err}",
|
||||
path = path.display()
|
||||
))
|
||||
})?;
|
||||
if let Some(elf_interpreter) = elf.interpreter {
|
||||
Ok(PathBuf::from(elf_interpreter))
|
||||
} else {
|
||||
Err(PlatformError::OsVersionDetectionError(format!(
|
||||
"Couldn't find ELF interpreter path from {path}",
|
||||
path = path.display()
|
||||
)))
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn parse_ldd_output() {
|
||||
let ver_str = glibc_ldd_output_to_version(
|
||||
"stdout",
|
||||
br"ldd (GNU libc) 2.12
|
||||
Copyright (C) 2010 Free Software Foundation, Inc.
|
||||
This is free software; see the source for copying conditions. There is NO
|
||||
warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
|
||||
Written by Roland McGrath and Ulrich Drepper.",
|
||||
)
|
||||
.unwrap();
|
||||
assert_eq!(
|
||||
ver_str,
|
||||
Os::Manylinux {
|
||||
major: 2,
|
||||
minor: 12
|
||||
}
|
||||
);
|
||||
|
||||
let ver_str = glibc_ldd_output_to_version(
|
||||
"stderr",
|
||||
br"ldd (Ubuntu GLIBC 2.31-0ubuntu9.2) 2.31
|
||||
Copyright (C) 2020 Free Software Foundation, Inc.
|
||||
This is free software; see the source for copying conditions. There is NO
|
||||
warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
|
||||
Written by Roland McGrath and Ulrich Drepper.",
|
||||
)
|
||||
.unwrap();
|
||||
assert_eq!(
|
||||
ver_str,
|
||||
Os::Manylinux {
|
||||
major: 2,
|
||||
minor: 31
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_musl_ld_output() {
|
||||
// This output was generated by running `/lib/ld-musl-x86_64.so.1`
|
||||
// in an Alpine Docker image. The Alpine version:
|
||||
//
|
||||
// # cat /etc/alpine-release
|
||||
// 3.19.1
|
||||
let output = b"\
|
||||
musl libc (x86_64)
|
||||
Version 1.2.4_git20230717
|
||||
Dynamic Program Loader
|
||||
Usage: /lib/ld-musl-x86_64.so.1 [options] [--] pathname [args]\
|
||||
";
|
||||
let got = musl_ld_output_to_version("stderr", output).unwrap();
|
||||
assert_eq!(got, Os::Musllinux { major: 1, minor: 2 });
|
||||
}
|
||||
}
|
|
@ -1,36 +0,0 @@
|
|||
use crate::PlatformError;
|
||||
use serde::Deserialize;
|
||||
|
||||
/// Get the macOS version from the SystemVersion.plist file.
|
||||
pub(crate) fn get_mac_os_version() -> Result<(u16, u16), PlatformError> {
|
||||
// This is actually what python does
|
||||
// https://github.com/python/cpython/blob/cb2b3c8d3566ae46b3b8d0718019e1c98484589e/Lib/platform.py#L409-L428
|
||||
#[derive(Deserialize)]
|
||||
#[serde(rename_all = "PascalCase")]
|
||||
struct SystemVersion {
|
||||
product_version: String,
|
||||
}
|
||||
let system_version: SystemVersion =
|
||||
plist::from_file("/System/Library/CoreServices/SystemVersion.plist")
|
||||
.map_err(|err| PlatformError::OsVersionDetectionError(err.to_string()))?;
|
||||
|
||||
let invalid_mac_os_version = || {
|
||||
PlatformError::OsVersionDetectionError(format!(
|
||||
"Invalid macOS version {}",
|
||||
system_version.product_version
|
||||
))
|
||||
};
|
||||
match system_version
|
||||
.product_version
|
||||
.split('.')
|
||||
.collect::<Vec<&str>>()
|
||||
.as_slice()
|
||||
{
|
||||
[major, minor] | [major, minor, _] => {
|
||||
let major = major.parse::<u16>().map_err(|_| invalid_mac_os_version())?;
|
||||
let minor = minor.parse::<u16>().map_err(|_| invalid_mac_os_version())?;
|
||||
Ok((major, minor))
|
||||
}
|
||||
_ => Err(invalid_mac_os_version()),
|
||||
}
|
||||
}
|
|
@ -13,7 +13,6 @@ license = { workspace = true }
|
|||
workspace = true
|
||||
|
||||
[dependencies]
|
||||
platform-host = { path = "../platform-host" }
|
||||
|
||||
rustc-hash = { workspace = true }
|
||||
serde = { workspace = true, features = ["derive"] }
|
||||
thiserror = { workspace = true }
|
||||
|
|
|
@ -1,498 +1,5 @@
|
|||
use std::str::FromStr;
|
||||
use std::sync::Arc;
|
||||
use std::{cmp, num::NonZeroU32};
|
||||
pub use platform::{Arch, Os, Platform, PlatformError};
|
||||
pub use tags::{IncompatibleTag, TagCompatibility, TagPriority, Tags, TagsError};
|
||||
|
||||
use rustc_hash::FxHashMap;
|
||||
|
||||
use platform_host::{Arch, Os, Platform, PlatformError};
|
||||
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
pub enum TagsError {
|
||||
#[error(transparent)]
|
||||
PlatformError(#[from] PlatformError),
|
||||
#[error("Unsupported implementation: {0}")]
|
||||
UnsupportedImplementation(String),
|
||||
#[error("Unknown implementation: {0}")]
|
||||
UnknownImplementation(String),
|
||||
#[error("Invalid priority: {0}")]
|
||||
InvalidPriority(usize, #[source] std::num::TryFromIntError),
|
||||
}
|
||||
|
||||
#[derive(Debug, Eq, Ord, PartialEq, PartialOrd, Clone)]
|
||||
pub enum IncompatibleTag {
|
||||
Invalid,
|
||||
Python,
|
||||
Abi,
|
||||
Platform,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
pub enum TagCompatibility {
|
||||
Incompatible(IncompatibleTag),
|
||||
Compatible(TagPriority),
|
||||
}
|
||||
|
||||
impl Ord for TagCompatibility {
|
||||
fn cmp(&self, other: &Self) -> std::cmp::Ordering {
|
||||
match (self, other) {
|
||||
(Self::Compatible(p_self), Self::Compatible(p_other)) => p_self.cmp(p_other),
|
||||
(Self::Incompatible(_), Self::Compatible(_)) => cmp::Ordering::Less,
|
||||
(Self::Compatible(_), Self::Incompatible(_)) => cmp::Ordering::Greater,
|
||||
(Self::Incompatible(t_self), Self::Incompatible(t_other)) => t_self.cmp(t_other),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialOrd for TagCompatibility {
|
||||
fn partial_cmp(&self, other: &Self) -> Option<cmp::Ordering> {
|
||||
Some(Self::cmp(self, other))
|
||||
}
|
||||
}
|
||||
|
||||
impl TagCompatibility {
|
||||
pub fn is_compatible(&self) -> bool {
|
||||
matches!(self, Self::Compatible(_))
|
||||
}
|
||||
}
|
||||
|
||||
/// A set of compatible tags for a given Python version and platform.
|
||||
///
|
||||
/// Its principle function is to determine whether the tags for a particular
|
||||
/// wheel are compatible with the current environment.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct Tags {
|
||||
/// python_tag |--> abi_tag |--> platform_tag |--> priority
|
||||
#[allow(clippy::type_complexity)]
|
||||
map: Arc<FxHashMap<String, FxHashMap<String, FxHashMap<String, TagPriority>>>>,
|
||||
}
|
||||
|
||||
impl Tags {
|
||||
/// Create a new set of tags.
|
||||
///
|
||||
/// Tags are prioritized based on their position in the given vector. Specifically, tags that
|
||||
/// appear earlier in the vector are given higher priority than tags that appear later.
|
||||
pub fn new(tags: Vec<(String, String, String)>) -> Self {
|
||||
let mut map = FxHashMap::default();
|
||||
for (index, (py, abi, platform)) in tags.into_iter().rev().enumerate() {
|
||||
map.entry(py.to_string())
|
||||
.or_insert(FxHashMap::default())
|
||||
.entry(abi.to_string())
|
||||
.or_insert(FxHashMap::default())
|
||||
.entry(platform.to_string())
|
||||
.or_insert(TagPriority::try_from(index).expect("valid tag priority"));
|
||||
}
|
||||
Self { map: Arc::new(map) }
|
||||
}
|
||||
|
||||
/// Returns the compatible tags for the given Python implementation (e.g., `cpython`), version,
|
||||
/// and platform.
|
||||
pub fn from_env(
|
||||
platform: &Platform,
|
||||
python_version: (u8, u8),
|
||||
implementation_name: &str,
|
||||
implementation_version: (u8, u8),
|
||||
) -> Result<Self, TagsError> {
|
||||
let implementation = Implementation::from_str(implementation_name)?;
|
||||
let platform_tags = compatible_tags(platform)?;
|
||||
|
||||
let mut tags = Vec::with_capacity(5 * platform_tags.len());
|
||||
|
||||
// 1. This exact c api version
|
||||
for platform_tag in &platform_tags {
|
||||
tags.push((
|
||||
implementation.language_tag(python_version),
|
||||
implementation.abi_tag(python_version, implementation_version),
|
||||
platform_tag.clone(),
|
||||
));
|
||||
tags.push((
|
||||
implementation.language_tag(python_version),
|
||||
"none".to_string(),
|
||||
platform_tag.clone(),
|
||||
));
|
||||
}
|
||||
// 2. abi3 and no abi (e.g. executable binary)
|
||||
if matches!(implementation, Implementation::CPython) {
|
||||
// For some reason 3.2 is the minimum python for the cp abi
|
||||
for minor in (2..=python_version.1).rev() {
|
||||
for platform_tag in &platform_tags {
|
||||
tags.push((
|
||||
implementation.language_tag((python_version.0, minor)),
|
||||
"abi3".to_string(),
|
||||
platform_tag.clone(),
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
// 3. no abi (e.g. executable binary)
|
||||
for minor in (0..=python_version.1).rev() {
|
||||
for platform_tag in &platform_tags {
|
||||
tags.push((
|
||||
format!("py{}{}", python_version.0, minor),
|
||||
"none".to_string(),
|
||||
platform_tag.clone(),
|
||||
));
|
||||
}
|
||||
}
|
||||
// 4. major only
|
||||
for platform_tag in platform_tags {
|
||||
tags.push((
|
||||
format!("py{}", python_version.0),
|
||||
"none".to_string(),
|
||||
platform_tag,
|
||||
));
|
||||
}
|
||||
// 5. no binary
|
||||
for minor in (0..=python_version.1).rev() {
|
||||
tags.push((
|
||||
format!("py{}{}", python_version.0, minor),
|
||||
"none".to_string(),
|
||||
"any".to_string(),
|
||||
));
|
||||
}
|
||||
tags.push((
|
||||
format!("py{}", python_version.0),
|
||||
"none".to_string(),
|
||||
"any".to_string(),
|
||||
));
|
||||
Ok(Self::new(tags))
|
||||
}
|
||||
|
||||
/// Returns true when there exists at least one tag for this platform
|
||||
/// whose individual components all appear in each of the slices given.
|
||||
///
|
||||
/// Like [`Tags::compatibility`], but short-circuits as soon as a compatible
|
||||
/// tag is found.
|
||||
pub fn is_compatible(
|
||||
&self,
|
||||
wheel_python_tags: &[String],
|
||||
wheel_abi_tags: &[String],
|
||||
wheel_platform_tags: &[String],
|
||||
) -> bool {
|
||||
// NOTE: A typical work-load is a context in which the platform tags
|
||||
// are quite large, but the tags of a wheel are quite small. It is
|
||||
// common, for example, for the lengths of the slices given to all be
|
||||
// 1. So while the looping here might look slow, the key thing we want
|
||||
// to avoid is looping over all of the platform tags. We avoid that
|
||||
// with hashmap lookups.
|
||||
|
||||
for wheel_py in wheel_python_tags {
|
||||
let Some(abis) = self.map.get(wheel_py) else {
|
||||
continue;
|
||||
};
|
||||
for wheel_abi in wheel_abi_tags {
|
||||
let Some(platforms) = abis.get(wheel_abi) else {
|
||||
continue;
|
||||
};
|
||||
for wheel_platform in wheel_platform_tags {
|
||||
if platforms.contains_key(wheel_platform) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
false
|
||||
}
|
||||
|
||||
/// Returns the [`TagCompatibility`] of the given tags.
|
||||
///
|
||||
/// If compatible, includes the score of the most-compatible platform tag.
|
||||
/// If incompatible, includes the tag part which was a closest match.
|
||||
pub fn compatibility(
|
||||
&self,
|
||||
wheel_python_tags: &[String],
|
||||
wheel_abi_tags: &[String],
|
||||
wheel_platform_tags: &[String],
|
||||
) -> TagCompatibility {
|
||||
let mut max_compatibility = TagCompatibility::Incompatible(IncompatibleTag::Invalid);
|
||||
|
||||
for wheel_py in wheel_python_tags {
|
||||
let Some(abis) = self.map.get(wheel_py) else {
|
||||
max_compatibility =
|
||||
max_compatibility.max(TagCompatibility::Incompatible(IncompatibleTag::Python));
|
||||
continue;
|
||||
};
|
||||
for wheel_abi in wheel_abi_tags {
|
||||
let Some(platforms) = abis.get(wheel_abi) else {
|
||||
max_compatibility =
|
||||
max_compatibility.max(TagCompatibility::Incompatible(IncompatibleTag::Abi));
|
||||
continue;
|
||||
};
|
||||
for wheel_platform in wheel_platform_tags {
|
||||
let priority = platforms.get(wheel_platform).copied();
|
||||
if let Some(priority) = priority {
|
||||
max_compatibility =
|
||||
max_compatibility.max(TagCompatibility::Compatible(priority));
|
||||
} else {
|
||||
max_compatibility = max_compatibility
|
||||
.max(TagCompatibility::Incompatible(IncompatibleTag::Platform));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
max_compatibility
|
||||
}
|
||||
}
|
||||
|
||||
/// The priority of a platform tag.
|
||||
///
|
||||
/// A wrapper around [`NonZeroU32`]. Higher values indicate higher priority.
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
|
||||
pub struct TagPriority(NonZeroU32);
|
||||
|
||||
impl TryFrom<usize> for TagPriority {
|
||||
type Error = TagsError;
|
||||
|
||||
/// Create a [`TagPriority`] from a `usize`, where higher `usize` values are given higher
|
||||
/// priority.
|
||||
fn try_from(priority: usize) -> Result<Self, TagsError> {
|
||||
match u32::try_from(priority).and_then(|priority| NonZeroU32::try_from(1 + priority)) {
|
||||
Ok(priority) => Ok(Self(priority)),
|
||||
Err(err) => Err(TagsError::InvalidPriority(priority, err)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub enum Implementation {
|
||||
CPython,
|
||||
PyPy,
|
||||
Pyston,
|
||||
}
|
||||
|
||||
impl Implementation {
|
||||
/// Returns the "language implementation and version tag" for the current implementation and
|
||||
/// Python version (e.g., `cp39` or `pp37`).
|
||||
pub fn language_tag(&self, python_version: (u8, u8)) -> String {
|
||||
match self {
|
||||
// Ex) `cp39`
|
||||
Self::CPython => format!("cp{}{}", python_version.0, python_version.1),
|
||||
// Ex) `pp39`
|
||||
Self::PyPy => format!("pp{}{}", python_version.0, python_version.1),
|
||||
// Ex) `pt38``
|
||||
Self::Pyston => format!("pt{}{}", python_version.0, python_version.1),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn abi_tag(&self, python_version: (u8, u8), implementation_version: (u8, u8)) -> String {
|
||||
match self {
|
||||
// Ex) `cp39`
|
||||
Self::CPython => {
|
||||
if python_version.1 <= 7 {
|
||||
format!("cp{}{}m", python_version.0, python_version.1)
|
||||
} else {
|
||||
format!("cp{}{}", python_version.0, python_version.1)
|
||||
}
|
||||
}
|
||||
// Ex) `pypy39_pp73`
|
||||
Self::PyPy => format!(
|
||||
"pypy{}{}_pp{}{}",
|
||||
python_version.0,
|
||||
python_version.1,
|
||||
implementation_version.0,
|
||||
implementation_version.1
|
||||
),
|
||||
// Ex) `pyston38-pyston_23`
|
||||
Self::Pyston => format!(
|
||||
"pyston{}{}-pyston_{}{}",
|
||||
python_version.0,
|
||||
python_version.1,
|
||||
implementation_version.0,
|
||||
implementation_version.1
|
||||
),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl FromStr for Implementation {
|
||||
type Err = TagsError;
|
||||
|
||||
fn from_str(s: &str) -> Result<Self, TagsError> {
|
||||
match s {
|
||||
// Known and supported implementations.
|
||||
"cpython" => Ok(Self::CPython),
|
||||
"pypy" => Ok(Self::PyPy),
|
||||
"pyston" => Ok(Self::Pyston),
|
||||
// Known but unsupported implementations.
|
||||
"python" => Err(TagsError::UnsupportedImplementation(s.to_string())),
|
||||
"ironpython" => Err(TagsError::UnsupportedImplementation(s.to_string())),
|
||||
"jython" => Err(TagsError::UnsupportedImplementation(s.to_string())),
|
||||
// Unknown implementations.
|
||||
_ => Err(TagsError::UnknownImplementation(s.to_string())),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the compatible tags for the current [`Platform`] (e.g., `manylinux_2_17`,
|
||||
/// `macosx_11_0_arm64`, or `win_amd64`).
|
||||
///
|
||||
/// We have two cases: Actual platform specific tags (including "merged" tags such as universal2)
|
||||
/// and "any".
|
||||
///
|
||||
/// Bit of a mess, needs to be cleaned up.
|
||||
fn compatible_tags(platform: &Platform) -> Result<Vec<String>, PlatformError> {
|
||||
let os = platform.os();
|
||||
let arch = platform.arch();
|
||||
|
||||
let platform_tags = match (&os, arch) {
|
||||
(Os::Manylinux { major, minor }, _) => {
|
||||
let mut platform_tags = vec![format!("linux_{}", arch)];
|
||||
platform_tags.extend(
|
||||
(arch.get_minimum_manylinux_minor()..=*minor)
|
||||
.map(|minor| format!("manylinux_{major}_{minor}_{arch}")),
|
||||
);
|
||||
if (arch.get_minimum_manylinux_minor()..=*minor).contains(&12) {
|
||||
platform_tags.push(format!("manylinux2010_{arch}"));
|
||||
}
|
||||
if (arch.get_minimum_manylinux_minor()..=*minor).contains(&17) {
|
||||
platform_tags.push(format!("manylinux2014_{arch}"));
|
||||
}
|
||||
if (arch.get_minimum_manylinux_minor()..=*minor).contains(&5) {
|
||||
platform_tags.push(format!("manylinux1_{arch}"));
|
||||
}
|
||||
platform_tags
|
||||
}
|
||||
(Os::Musllinux { major, minor }, _) => {
|
||||
let mut platform_tags = vec![format!("linux_{}", arch)];
|
||||
// musl 1.1 is the lowest supported version in musllinux
|
||||
platform_tags
|
||||
.extend((1..=*minor).map(|minor| format!("musllinux_{major}_{minor}_{arch}")));
|
||||
platform_tags
|
||||
}
|
||||
(Os::Macos { major, minor }, Arch::X86_64) => {
|
||||
// Source: https://github.com/pypa/packaging/blob/fd4f11139d1c884a637be8aa26bb60a31fbc9411/packaging/tags.py#L346
|
||||
let mut platform_tags = vec![];
|
||||
match major {
|
||||
10 => {
|
||||
// Prior to Mac OS 11, each yearly release of Mac OS bumped the "minor" version
|
||||
// number. The major version was always 10.
|
||||
for minor in (0..=*minor).rev() {
|
||||
for binary_format in get_mac_binary_formats(*major, minor, arch) {
|
||||
platform_tags.push(format!("macosx_{major}_{minor}_{binary_format}"));
|
||||
}
|
||||
}
|
||||
}
|
||||
value if *value >= 11 => {
|
||||
// Starting with Mac OS 11, each yearly release bumps the major version number.
|
||||
// The minor versions are now the midyear updates.
|
||||
for major in (10..=*major).rev() {
|
||||
for binary_format in get_mac_binary_formats(major, 0, arch) {
|
||||
platform_tags.push(format!("macosx_{}_{}_{}", major, 0, binary_format));
|
||||
}
|
||||
}
|
||||
// The "universal2" binary format can have a macOS version earlier than 11.0
|
||||
// when the x86_64 part of the binary supports that version of macOS.
|
||||
for minor in (4..=16).rev() {
|
||||
for binary_format in get_mac_binary_formats(10, minor, arch) {
|
||||
platform_tags
|
||||
.push(format!("macosx_{}_{}_{}", 10, minor, binary_format));
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
return Err(PlatformError::OsVersionDetectionError(format!(
|
||||
"Unsupported macOS version: {major}",
|
||||
)));
|
||||
}
|
||||
}
|
||||
platform_tags
|
||||
}
|
||||
(Os::Macos { major, .. }, Arch::Aarch64) => {
|
||||
// Source: https://github.com/pypa/packaging/blob/fd4f11139d1c884a637be8aa26bb60a31fbc9411/packaging/tags.py#L346
|
||||
let mut platform_tags = vec![];
|
||||
// Starting with Mac OS 11, each yearly release bumps the major version number.
|
||||
// The minor versions are now the midyear updates.
|
||||
for major in (10..=*major).rev() {
|
||||
for binary_format in get_mac_binary_formats(major, 0, arch) {
|
||||
platform_tags.push(format!("macosx_{}_{}_{}", major, 0, binary_format));
|
||||
}
|
||||
}
|
||||
// The "universal2" binary format can have a macOS version earlier than 11.0
|
||||
// when the x86_64 part of the binary supports that version of macOS.
|
||||
platform_tags.extend(
|
||||
(4..=16)
|
||||
.rev()
|
||||
.map(|minor| format!("macosx_{}_{}_universal2", 10, minor)),
|
||||
);
|
||||
platform_tags
|
||||
}
|
||||
(Os::Windows, Arch::X86) => {
|
||||
vec!["win32".to_string()]
|
||||
}
|
||||
(Os::Windows, Arch::X86_64) => {
|
||||
vec!["win_amd64".to_string()]
|
||||
}
|
||||
(Os::Windows, Arch::Aarch64) => vec!["win_arm64".to_string()],
|
||||
(
|
||||
Os::FreeBsd { release }
|
||||
| Os::NetBsd { release }
|
||||
| Os::OpenBsd { release }
|
||||
| Os::Dragonfly { release }
|
||||
| Os::Haiku { release },
|
||||
_,
|
||||
) => {
|
||||
let release = release.replace(['.', '-'], "_");
|
||||
vec![format!(
|
||||
"{}_{}_{}",
|
||||
os.to_string().to_lowercase(),
|
||||
release,
|
||||
arch
|
||||
)]
|
||||
}
|
||||
(Os::Illumos { release, arch }, _) => {
|
||||
// See https://github.com/python/cpython/blob/46c8d915715aa2bd4d697482aa051fe974d440e1/Lib/sysconfig.py#L722-L730
|
||||
if let Some((major, other)) = release.split_once('_') {
|
||||
let major_ver: u64 = major.parse().map_err(|err| {
|
||||
PlatformError::OsVersionDetectionError(format!(
|
||||
"illumos major version is not a number: {err}"
|
||||
))
|
||||
})?;
|
||||
if major_ver >= 5 {
|
||||
// SunOS 5 == Solaris 2
|
||||
let os = "solaris".to_string();
|
||||
let release = format!("{}_{}", major_ver - 3, other);
|
||||
let arch = format!("{arch}_64bit");
|
||||
return Ok(vec![format!("{}_{}_{}", os, release, arch)]);
|
||||
}
|
||||
}
|
||||
|
||||
let os = os.to_string().to_lowercase();
|
||||
vec![format!("{}_{}_{}", os, release, arch)]
|
||||
}
|
||||
_ => {
|
||||
return Err(PlatformError::OsVersionDetectionError(format!(
|
||||
"Unsupported operating system and architecture combination: {os} {arch}"
|
||||
)));
|
||||
}
|
||||
};
|
||||
Ok(platform_tags)
|
||||
}
|
||||
|
||||
/// Determine the appropriate binary formats for a macOS version.
|
||||
/// Source: <https://github.com/pypa/packaging/blob/fd4f11139d1c884a637be8aa26bb60a31fbc9411/packaging/tags.py#L314>
|
||||
fn get_mac_binary_formats(major: u16, minor: u16, arch: Arch) -> Vec<String> {
|
||||
let mut formats = vec![match arch {
|
||||
Arch::Aarch64 => "arm64".to_string(),
|
||||
_ => arch.to_string(),
|
||||
}];
|
||||
|
||||
if matches!(arch, Arch::X86_64) {
|
||||
if (major, minor) < (10, 4) {
|
||||
return vec![];
|
||||
}
|
||||
formats.extend([
|
||||
"intel".to_string(),
|
||||
"fat64".to_string(),
|
||||
"fat32".to_string(),
|
||||
]);
|
||||
}
|
||||
|
||||
if matches!(arch, Arch::X86_64 | Arch::Aarch64) {
|
||||
formats.push("universal2".to_string());
|
||||
}
|
||||
|
||||
if matches!(arch, Arch::X86_64) {
|
||||
formats.push("universal".to_string());
|
||||
}
|
||||
|
||||
formats
|
||||
}
|
||||
mod platform;
|
||||
mod tags;
|
||||
|
|
111
crates/platform-tags/src/platform.rs
Normal file
111
crates/platform-tags/src/platform.rs
Normal file
|
@ -0,0 +1,111 @@
|
|||
//! Abstractions for understanding the current platform (operating system and architecture).
|
||||
|
||||
use std::{fmt, io};
|
||||
|
||||
use serde::{Deserialize, Serialize};
|
||||
use thiserror::Error;
|
||||
|
||||
#[derive(Error, Debug)]
|
||||
pub enum PlatformError {
|
||||
#[error(transparent)]
|
||||
IOError(#[from] io::Error),
|
||||
#[error("Failed to detect the operating system version: {0}")]
|
||||
OsVersionDetectionError(String),
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq, Deserialize, Serialize)]
|
||||
pub struct Platform {
|
||||
os: Os,
|
||||
arch: Arch,
|
||||
}
|
||||
|
||||
impl Platform {
|
||||
/// Create a new platform from the given operating system and architecture.
|
||||
pub fn new(os: Os, arch: Arch) -> Self {
|
||||
Self { os, arch }
|
||||
}
|
||||
|
||||
/// Return the platform's operating system.
|
||||
pub fn os(&self) -> &Os {
|
||||
&self.os
|
||||
}
|
||||
|
||||
/// Return the platform's architecture.
|
||||
pub fn arch(&self) -> Arch {
|
||||
self.arch
|
||||
}
|
||||
}
|
||||
|
||||
/// All supported operating systems.
|
||||
#[derive(Debug, Clone, Eq, PartialEq, Deserialize, Serialize)]
|
||||
#[serde(tag = "name", rename_all = "lowercase")]
|
||||
pub enum Os {
|
||||
Manylinux { major: u16, minor: u16 },
|
||||
Musllinux { major: u16, minor: u16 },
|
||||
Windows,
|
||||
Macos { major: u16, minor: u16 },
|
||||
FreeBsd { release: String },
|
||||
NetBsd { release: String },
|
||||
OpenBsd { release: String },
|
||||
Dragonfly { release: String },
|
||||
Illumos { release: String, arch: String },
|
||||
Haiku { release: String },
|
||||
}
|
||||
|
||||
impl fmt::Display for Os {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
match *self {
|
||||
Self::Manylinux { .. } => write!(f, "Manylinux"),
|
||||
Self::Musllinux { .. } => write!(f, "Musllinux"),
|
||||
Self::Windows => write!(f, "Windows"),
|
||||
Self::Macos { .. } => write!(f, "MacOS"),
|
||||
Self::FreeBsd { .. } => write!(f, "FreeBSD"),
|
||||
Self::NetBsd { .. } => write!(f, "NetBSD"),
|
||||
Self::OpenBsd { .. } => write!(f, "OpenBSD"),
|
||||
Self::Dragonfly { .. } => write!(f, "DragonFly"),
|
||||
Self::Illumos { .. } => write!(f, "Illumos"),
|
||||
Self::Haiku { .. } => write!(f, "Haiku"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// All supported CPU architectures
|
||||
#[derive(Debug, Clone, Copy, Eq, PartialEq, Deserialize, Serialize)]
|
||||
#[serde(rename_all = "lowercase")]
|
||||
pub enum Arch {
|
||||
#[serde(alias = "arm64")]
|
||||
Aarch64,
|
||||
Armv7L,
|
||||
Powerpc64Le,
|
||||
Powerpc64,
|
||||
X86,
|
||||
#[serde(alias = "amd64")]
|
||||
X86_64,
|
||||
S390X,
|
||||
}
|
||||
|
||||
impl fmt::Display for Arch {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
match *self {
|
||||
Self::Aarch64 => write!(f, "aarch64"),
|
||||
Self::Armv7L => write!(f, "armv7l"),
|
||||
Self::Powerpc64Le => write!(f, "ppc64le"),
|
||||
Self::Powerpc64 => write!(f, "ppc64"),
|
||||
Self::X86 => write!(f, "i686"),
|
||||
Self::X86_64 => write!(f, "x86_64"),
|
||||
Self::S390X => write!(f, "s390x"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Arch {
|
||||
/// Returns the oldest possible Manylinux tag for this architecture
|
||||
pub fn get_minimum_manylinux_minor(&self) -> u16 {
|
||||
match self {
|
||||
// manylinux 2014
|
||||
Self::Aarch64 | Self::Armv7L | Self::Powerpc64 | Self::Powerpc64Le | Self::S390X => 17,
|
||||
// manylinux 1
|
||||
Self::X86 | Self::X86_64 => 5,
|
||||
}
|
||||
}
|
||||
}
|
498
crates/platform-tags/src/tags.rs
Normal file
498
crates/platform-tags/src/tags.rs
Normal file
|
@ -0,0 +1,498 @@
|
|||
use std::str::FromStr;
|
||||
use std::sync::Arc;
|
||||
use std::{cmp, num::NonZeroU32};
|
||||
|
||||
use rustc_hash::FxHashMap;
|
||||
|
||||
use crate::{Arch, Os, Platform, PlatformError};
|
||||
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
pub enum TagsError {
|
||||
#[error(transparent)]
|
||||
PlatformError(#[from] PlatformError),
|
||||
#[error("Unsupported implementation: {0}")]
|
||||
UnsupportedImplementation(String),
|
||||
#[error("Unknown implementation: {0}")]
|
||||
UnknownImplementation(String),
|
||||
#[error("Invalid priority: {0}")]
|
||||
InvalidPriority(usize, #[source] std::num::TryFromIntError),
|
||||
}
|
||||
|
||||
#[derive(Debug, Eq, Ord, PartialEq, PartialOrd, Clone)]
|
||||
pub enum IncompatibleTag {
|
||||
Invalid,
|
||||
Python,
|
||||
Abi,
|
||||
Platform,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
pub enum TagCompatibility {
|
||||
Incompatible(IncompatibleTag),
|
||||
Compatible(TagPriority),
|
||||
}
|
||||
|
||||
impl Ord for TagCompatibility {
|
||||
fn cmp(&self, other: &Self) -> std::cmp::Ordering {
|
||||
match (self, other) {
|
||||
(Self::Compatible(p_self), Self::Compatible(p_other)) => p_self.cmp(p_other),
|
||||
(Self::Incompatible(_), Self::Compatible(_)) => cmp::Ordering::Less,
|
||||
(Self::Compatible(_), Self::Incompatible(_)) => cmp::Ordering::Greater,
|
||||
(Self::Incompatible(t_self), Self::Incompatible(t_other)) => t_self.cmp(t_other),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialOrd for TagCompatibility {
|
||||
fn partial_cmp(&self, other: &Self) -> Option<cmp::Ordering> {
|
||||
Some(Self::cmp(self, other))
|
||||
}
|
||||
}
|
||||
|
||||
impl TagCompatibility {
|
||||
pub fn is_compatible(&self) -> bool {
|
||||
matches!(self, Self::Compatible(_))
|
||||
}
|
||||
}
|
||||
|
||||
/// A set of compatible tags for a given Python version and platform.
|
||||
///
|
||||
/// Its principle function is to determine whether the tags for a particular
|
||||
/// wheel are compatible with the current environment.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct Tags {
|
||||
/// python_tag |--> abi_tag |--> platform_tag |--> priority
|
||||
#[allow(clippy::type_complexity)]
|
||||
map: Arc<FxHashMap<String, FxHashMap<String, FxHashMap<String, TagPriority>>>>,
|
||||
}
|
||||
|
||||
impl Tags {
|
||||
/// Create a new set of tags.
|
||||
///
|
||||
/// Tags are prioritized based on their position in the given vector. Specifically, tags that
|
||||
/// appear earlier in the vector are given higher priority than tags that appear later.
|
||||
pub fn new(tags: Vec<(String, String, String)>) -> Self {
|
||||
let mut map = FxHashMap::default();
|
||||
for (index, (py, abi, platform)) in tags.into_iter().rev().enumerate() {
|
||||
map.entry(py.to_string())
|
||||
.or_insert(FxHashMap::default())
|
||||
.entry(abi.to_string())
|
||||
.or_insert(FxHashMap::default())
|
||||
.entry(platform.to_string())
|
||||
.or_insert(TagPriority::try_from(index).expect("valid tag priority"));
|
||||
}
|
||||
Self { map: Arc::new(map) }
|
||||
}
|
||||
|
||||
/// Returns the compatible tags for the given Python implementation (e.g., `cpython`), version,
|
||||
/// and platform.
|
||||
pub fn from_env(
|
||||
platform: &Platform,
|
||||
python_version: (u8, u8),
|
||||
implementation_name: &str,
|
||||
implementation_version: (u8, u8),
|
||||
) -> Result<Self, TagsError> {
|
||||
let implementation = Implementation::from_str(implementation_name)?;
|
||||
let platform_tags = compatible_tags(platform)?;
|
||||
|
||||
let mut tags = Vec::with_capacity(5 * platform_tags.len());
|
||||
|
||||
// 1. This exact c api version
|
||||
for platform_tag in &platform_tags {
|
||||
tags.push((
|
||||
implementation.language_tag(python_version),
|
||||
implementation.abi_tag(python_version, implementation_version),
|
||||
platform_tag.clone(),
|
||||
));
|
||||
tags.push((
|
||||
implementation.language_tag(python_version),
|
||||
"none".to_string(),
|
||||
platform_tag.clone(),
|
||||
));
|
||||
}
|
||||
// 2. abi3 and no abi (e.g. executable binary)
|
||||
if matches!(implementation, Implementation::CPython) {
|
||||
// For some reason 3.2 is the minimum python for the cp abi
|
||||
for minor in (2..=python_version.1).rev() {
|
||||
for platform_tag in &platform_tags {
|
||||
tags.push((
|
||||
implementation.language_tag((python_version.0, minor)),
|
||||
"abi3".to_string(),
|
||||
platform_tag.clone(),
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
// 3. no abi (e.g. executable binary)
|
||||
for minor in (0..=python_version.1).rev() {
|
||||
for platform_tag in &platform_tags {
|
||||
tags.push((
|
||||
format!("py{}{}", python_version.0, minor),
|
||||
"none".to_string(),
|
||||
platform_tag.clone(),
|
||||
));
|
||||
}
|
||||
}
|
||||
// 4. major only
|
||||
for platform_tag in platform_tags {
|
||||
tags.push((
|
||||
format!("py{}", python_version.0),
|
||||
"none".to_string(),
|
||||
platform_tag,
|
||||
));
|
||||
}
|
||||
// 5. no binary
|
||||
for minor in (0..=python_version.1).rev() {
|
||||
tags.push((
|
||||
format!("py{}{}", python_version.0, minor),
|
||||
"none".to_string(),
|
||||
"any".to_string(),
|
||||
));
|
||||
}
|
||||
tags.push((
|
||||
format!("py{}", python_version.0),
|
||||
"none".to_string(),
|
||||
"any".to_string(),
|
||||
));
|
||||
Ok(Self::new(tags))
|
||||
}
|
||||
|
||||
/// Returns true when there exists at least one tag for this platform
|
||||
/// whose individual components all appear in each of the slices given.
|
||||
///
|
||||
/// Like [`Tags::compatibility`], but short-circuits as soon as a compatible
|
||||
/// tag is found.
|
||||
pub fn is_compatible(
|
||||
&self,
|
||||
wheel_python_tags: &[String],
|
||||
wheel_abi_tags: &[String],
|
||||
wheel_platform_tags: &[String],
|
||||
) -> bool {
|
||||
// NOTE: A typical work-load is a context in which the platform tags
|
||||
// are quite large, but the tags of a wheel are quite small. It is
|
||||
// common, for example, for the lengths of the slices given to all be
|
||||
// 1. So while the looping here might look slow, the key thing we want
|
||||
// to avoid is looping over all of the platform tags. We avoid that
|
||||
// with hashmap lookups.
|
||||
|
||||
for wheel_py in wheel_python_tags {
|
||||
let Some(abis) = self.map.get(wheel_py) else {
|
||||
continue;
|
||||
};
|
||||
for wheel_abi in wheel_abi_tags {
|
||||
let Some(platforms) = abis.get(wheel_abi) else {
|
||||
continue;
|
||||
};
|
||||
for wheel_platform in wheel_platform_tags {
|
||||
if platforms.contains_key(wheel_platform) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
false
|
||||
}
|
||||
|
||||
/// Returns the [`TagCompatibility`] of the given tags.
|
||||
///
|
||||
/// If compatible, includes the score of the most-compatible platform tag.
|
||||
/// If incompatible, includes the tag part which was a closest match.
|
||||
pub fn compatibility(
|
||||
&self,
|
||||
wheel_python_tags: &[String],
|
||||
wheel_abi_tags: &[String],
|
||||
wheel_platform_tags: &[String],
|
||||
) -> TagCompatibility {
|
||||
let mut max_compatibility = TagCompatibility::Incompatible(IncompatibleTag::Invalid);
|
||||
|
||||
for wheel_py in wheel_python_tags {
|
||||
let Some(abis) = self.map.get(wheel_py) else {
|
||||
max_compatibility =
|
||||
max_compatibility.max(TagCompatibility::Incompatible(IncompatibleTag::Python));
|
||||
continue;
|
||||
};
|
||||
for wheel_abi in wheel_abi_tags {
|
||||
let Some(platforms) = abis.get(wheel_abi) else {
|
||||
max_compatibility =
|
||||
max_compatibility.max(TagCompatibility::Incompatible(IncompatibleTag::Abi));
|
||||
continue;
|
||||
};
|
||||
for wheel_platform in wheel_platform_tags {
|
||||
let priority = platforms.get(wheel_platform).copied();
|
||||
if let Some(priority) = priority {
|
||||
max_compatibility =
|
||||
max_compatibility.max(TagCompatibility::Compatible(priority));
|
||||
} else {
|
||||
max_compatibility = max_compatibility
|
||||
.max(TagCompatibility::Incompatible(IncompatibleTag::Platform));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
max_compatibility
|
||||
}
|
||||
}
|
||||
|
||||
/// The priority of a platform tag.
|
||||
///
|
||||
/// A wrapper around [`NonZeroU32`]. Higher values indicate higher priority.
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
|
||||
pub struct TagPriority(NonZeroU32);
|
||||
|
||||
impl TryFrom<usize> for TagPriority {
|
||||
type Error = TagsError;
|
||||
|
||||
/// Create a [`TagPriority`] from a `usize`, where higher `usize` values are given higher
|
||||
/// priority.
|
||||
fn try_from(priority: usize) -> Result<Self, TagsError> {
|
||||
match u32::try_from(priority).and_then(|priority| NonZeroU32::try_from(1 + priority)) {
|
||||
Ok(priority) => Ok(Self(priority)),
|
||||
Err(err) => Err(TagsError::InvalidPriority(priority, err)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
enum Implementation {
|
||||
CPython,
|
||||
PyPy,
|
||||
Pyston,
|
||||
}
|
||||
|
||||
impl Implementation {
|
||||
/// Returns the "language implementation and version tag" for the current implementation and
|
||||
/// Python version (e.g., `cp39` or `pp37`).
|
||||
fn language_tag(self, python_version: (u8, u8)) -> String {
|
||||
match self {
|
||||
// Ex) `cp39`
|
||||
Self::CPython => format!("cp{}{}", python_version.0, python_version.1),
|
||||
// Ex) `pp39`
|
||||
Self::PyPy => format!("pp{}{}", python_version.0, python_version.1),
|
||||
// Ex) `pt38``
|
||||
Self::Pyston => format!("pt{}{}", python_version.0, python_version.1),
|
||||
}
|
||||
}
|
||||
|
||||
fn abi_tag(self, python_version: (u8, u8), implementation_version: (u8, u8)) -> String {
|
||||
match self {
|
||||
// Ex) `cp39`
|
||||
Self::CPython => {
|
||||
if python_version.1 <= 7 {
|
||||
format!("cp{}{}m", python_version.0, python_version.1)
|
||||
} else {
|
||||
format!("cp{}{}", python_version.0, python_version.1)
|
||||
}
|
||||
}
|
||||
// Ex) `pypy39_pp73`
|
||||
Self::PyPy => format!(
|
||||
"pypy{}{}_pp{}{}",
|
||||
python_version.0,
|
||||
python_version.1,
|
||||
implementation_version.0,
|
||||
implementation_version.1
|
||||
),
|
||||
// Ex) `pyston38-pyston_23`
|
||||
Self::Pyston => format!(
|
||||
"pyston{}{}-pyston_{}{}",
|
||||
python_version.0,
|
||||
python_version.1,
|
||||
implementation_version.0,
|
||||
implementation_version.1
|
||||
),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl FromStr for Implementation {
|
||||
type Err = TagsError;
|
||||
|
||||
fn from_str(s: &str) -> Result<Self, TagsError> {
|
||||
match s {
|
||||
// Known and supported implementations.
|
||||
"cpython" => Ok(Self::CPython),
|
||||
"pypy" => Ok(Self::PyPy),
|
||||
"pyston" => Ok(Self::Pyston),
|
||||
// Known but unsupported implementations.
|
||||
"python" => Err(TagsError::UnsupportedImplementation(s.to_string())),
|
||||
"ironpython" => Err(TagsError::UnsupportedImplementation(s.to_string())),
|
||||
"jython" => Err(TagsError::UnsupportedImplementation(s.to_string())),
|
||||
// Unknown implementations.
|
||||
_ => Err(TagsError::UnknownImplementation(s.to_string())),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the compatible tags for the current [`Platform`] (e.g., `manylinux_2_17`,
|
||||
/// `macosx_11_0_arm64`, or `win_amd64`).
|
||||
///
|
||||
/// We have two cases: Actual platform specific tags (including "merged" tags such as universal2)
|
||||
/// and "any".
|
||||
///
|
||||
/// Bit of a mess, needs to be cleaned up.
|
||||
fn compatible_tags(platform: &Platform) -> Result<Vec<String>, PlatformError> {
|
||||
let os = platform.os();
|
||||
let arch = platform.arch();
|
||||
|
||||
let platform_tags = match (&os, arch) {
|
||||
(Os::Manylinux { major, minor }, _) => {
|
||||
let mut platform_tags = vec![format!("linux_{}", arch)];
|
||||
platform_tags.extend(
|
||||
(arch.get_minimum_manylinux_minor()..=*minor)
|
||||
.map(|minor| format!("manylinux_{major}_{minor}_{arch}")),
|
||||
);
|
||||
if (arch.get_minimum_manylinux_minor()..=*minor).contains(&12) {
|
||||
platform_tags.push(format!("manylinux2010_{arch}"));
|
||||
}
|
||||
if (arch.get_minimum_manylinux_minor()..=*minor).contains(&17) {
|
||||
platform_tags.push(format!("manylinux2014_{arch}"));
|
||||
}
|
||||
if (arch.get_minimum_manylinux_minor()..=*minor).contains(&5) {
|
||||
platform_tags.push(format!("manylinux1_{arch}"));
|
||||
}
|
||||
platform_tags
|
||||
}
|
||||
(Os::Musllinux { major, minor }, _) => {
|
||||
let mut platform_tags = vec![format!("linux_{}", arch)];
|
||||
// musl 1.1 is the lowest supported version in musllinux
|
||||
platform_tags
|
||||
.extend((1..=*minor).map(|minor| format!("musllinux_{major}_{minor}_{arch}")));
|
||||
platform_tags
|
||||
}
|
||||
(Os::Macos { major, minor }, Arch::X86_64) => {
|
||||
// Source: https://github.com/pypa/packaging/blob/fd4f11139d1c884a637be8aa26bb60a31fbc9411/packaging/tags.py#L346
|
||||
let mut platform_tags = vec![];
|
||||
match major {
|
||||
10 => {
|
||||
// Prior to Mac OS 11, each yearly release of Mac OS bumped the "minor" version
|
||||
// number. The major version was always 10.
|
||||
for minor in (0..=*minor).rev() {
|
||||
for binary_format in get_mac_binary_formats(*major, minor, arch) {
|
||||
platform_tags.push(format!("macosx_{major}_{minor}_{binary_format}"));
|
||||
}
|
||||
}
|
||||
}
|
||||
value if *value >= 11 => {
|
||||
// Starting with Mac OS 11, each yearly release bumps the major version number.
|
||||
// The minor versions are now the midyear updates.
|
||||
for major in (10..=*major).rev() {
|
||||
for binary_format in get_mac_binary_formats(major, 0, arch) {
|
||||
platform_tags.push(format!("macosx_{}_{}_{}", major, 0, binary_format));
|
||||
}
|
||||
}
|
||||
// The "universal2" binary format can have a macOS version earlier than 11.0
|
||||
// when the x86_64 part of the binary supports that version of macOS.
|
||||
for minor in (4..=16).rev() {
|
||||
for binary_format in get_mac_binary_formats(10, minor, arch) {
|
||||
platform_tags
|
||||
.push(format!("macosx_{}_{}_{}", 10, minor, binary_format));
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
return Err(PlatformError::OsVersionDetectionError(format!(
|
||||
"Unsupported macOS version: {major}",
|
||||
)));
|
||||
}
|
||||
}
|
||||
platform_tags
|
||||
}
|
||||
(Os::Macos { major, .. }, Arch::Aarch64) => {
|
||||
// Source: https://github.com/pypa/packaging/blob/fd4f11139d1c884a637be8aa26bb60a31fbc9411/packaging/tags.py#L346
|
||||
let mut platform_tags = vec![];
|
||||
// Starting with Mac OS 11, each yearly release bumps the major version number.
|
||||
// The minor versions are now the midyear updates.
|
||||
for major in (10..=*major).rev() {
|
||||
for binary_format in get_mac_binary_formats(major, 0, arch) {
|
||||
platform_tags.push(format!("macosx_{}_{}_{}", major, 0, binary_format));
|
||||
}
|
||||
}
|
||||
// The "universal2" binary format can have a macOS version earlier than 11.0
|
||||
// when the x86_64 part of the binary supports that version of macOS.
|
||||
platform_tags.extend(
|
||||
(4..=16)
|
||||
.rev()
|
||||
.map(|minor| format!("macosx_{}_{}_universal2", 10, minor)),
|
||||
);
|
||||
platform_tags
|
||||
}
|
||||
(Os::Windows, Arch::X86) => {
|
||||
vec!["win32".to_string()]
|
||||
}
|
||||
(Os::Windows, Arch::X86_64) => {
|
||||
vec!["win_amd64".to_string()]
|
||||
}
|
||||
(Os::Windows, Arch::Aarch64) => vec!["win_arm64".to_string()],
|
||||
(
|
||||
Os::FreeBsd { release }
|
||||
| Os::NetBsd { release }
|
||||
| Os::OpenBsd { release }
|
||||
| Os::Dragonfly { release }
|
||||
| Os::Haiku { release },
|
||||
_,
|
||||
) => {
|
||||
let release = release.replace(['.', '-'], "_");
|
||||
vec![format!(
|
||||
"{}_{}_{}",
|
||||
os.to_string().to_lowercase(),
|
||||
release,
|
||||
arch
|
||||
)]
|
||||
}
|
||||
(Os::Illumos { release, arch }, _) => {
|
||||
// See https://github.com/python/cpython/blob/46c8d915715aa2bd4d697482aa051fe974d440e1/Lib/sysconfig.py#L722-L730
|
||||
if let Some((major, other)) = release.split_once('_') {
|
||||
let major_ver: u64 = major.parse().map_err(|err| {
|
||||
PlatformError::OsVersionDetectionError(format!(
|
||||
"illumos major version is not a number: {err}"
|
||||
))
|
||||
})?;
|
||||
if major_ver >= 5 {
|
||||
// SunOS 5 == Solaris 2
|
||||
let os = "solaris".to_string();
|
||||
let release = format!("{}_{}", major_ver - 3, other);
|
||||
let arch = format!("{arch}_64bit");
|
||||
return Ok(vec![format!("{}_{}_{}", os, release, arch)]);
|
||||
}
|
||||
}
|
||||
|
||||
let os = os.to_string().to_lowercase();
|
||||
vec![format!("{}_{}_{}", os, release, arch)]
|
||||
}
|
||||
_ => {
|
||||
return Err(PlatformError::OsVersionDetectionError(format!(
|
||||
"Unsupported operating system and architecture combination: {os} {arch}"
|
||||
)));
|
||||
}
|
||||
};
|
||||
Ok(platform_tags)
|
||||
}
|
||||
|
||||
/// Determine the appropriate binary formats for a macOS version.
|
||||
/// Source: <https://github.com/pypa/packaging/blob/fd4f11139d1c884a637be8aa26bb60a31fbc9411/packaging/tags.py#L314>
|
||||
fn get_mac_binary_formats(major: u16, minor: u16, arch: Arch) -> Vec<String> {
|
||||
let mut formats = vec![match arch {
|
||||
Arch::Aarch64 => "arm64".to_string(),
|
||||
_ => arch.to_string(),
|
||||
}];
|
||||
|
||||
if matches!(arch, Arch::X86_64) {
|
||||
if (major, minor) < (10, 4) {
|
||||
return vec![];
|
||||
}
|
||||
formats.extend([
|
||||
"intel".to_string(),
|
||||
"fat64".to_string(),
|
||||
"fat32".to_string(),
|
||||
]);
|
||||
}
|
||||
|
||||
if matches!(arch, Arch::X86_64 | Arch::Aarch64) {
|
||||
formats.push("universal2".to_string());
|
||||
}
|
||||
|
||||
if matches!(arch, Arch::X86_64) {
|
||||
formats.push("universal".to_string());
|
||||
}
|
||||
|
||||
formats
|
||||
}
|
|
@ -16,7 +16,6 @@ workspace = true
|
|||
[dependencies]
|
||||
distribution-types = { path = "../distribution-types" }
|
||||
pep508_rs = { path = "../pep508-rs" }
|
||||
platform-host = { path = "../platform-host" }
|
||||
pypi-types = { path = "../pypi-types" }
|
||||
uv-fs = { path = "../uv-fs" }
|
||||
uv-interpreter = { path = "../uv-interpreter" }
|
||||
|
|
|
@ -21,7 +21,6 @@ distribution-types = { path = "../distribution-types" }
|
|||
install-wheel-rs = { path = "../install-wheel-rs" }
|
||||
pep440_rs = { path = "../pep440-rs" }
|
||||
pep508_rs = { path = "../pep508-rs" }
|
||||
platform-host = { path = "../platform-host" }
|
||||
platform-tags = { path = "../platform-tags" }
|
||||
pypi-types = { path = "../pypi-types" }
|
||||
uv-build = { path = "../uv-build" }
|
||||
|
|
|
@ -6,7 +6,6 @@ use clap::Parser;
|
|||
use fs_err as fs;
|
||||
|
||||
use distribution_types::IndexLocations;
|
||||
use platform_host::Platform;
|
||||
use rustc_hash::FxHashMap;
|
||||
use uv_build::{SourceBuild, SourceBuildContext};
|
||||
use uv_cache::{Cache, CacheArgs};
|
||||
|
@ -56,8 +55,7 @@ pub(crate) async fn build(args: BuildArgs) -> Result<PathBuf> {
|
|||
|
||||
let cache = Cache::try_from(args.cache_args)?;
|
||||
|
||||
let platform = Platform::current()?;
|
||||
let venv = PythonEnvironment::from_virtualenv(platform, &cache)?;
|
||||
let venv = PythonEnvironment::from_virtualenv(&cache)?;
|
||||
let client = RegistryClientBuilder::new(cache.clone()).build();
|
||||
let index_urls = IndexLocations::default();
|
||||
let flat_index = FlatIndex::default();
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
use std::path::PathBuf;
|
||||
|
||||
use clap::Parser;
|
||||
use platform_host::Platform;
|
||||
use tracing::info;
|
||||
use uv_cache::{Cache, CacheArgs};
|
||||
use uv_interpreter::PythonEnvironment;
|
||||
|
@ -21,8 +20,7 @@ pub(crate) async fn compile(args: CompileArgs) -> anyhow::Result<()> {
|
|||
let interpreter = if let Some(python) = args.python {
|
||||
python
|
||||
} else {
|
||||
let platform = Platform::current()?;
|
||||
let venv = PythonEnvironment::from_virtualenv(platform, &cache)?;
|
||||
let venv = PythonEnvironment::from_virtualenv(&cache)?;
|
||||
venv.python_executable().to_path_buf()
|
||||
};
|
||||
|
||||
|
|
|
@ -14,7 +14,6 @@ use distribution_types::{
|
|||
};
|
||||
use install_wheel_rs::linker::LinkMode;
|
||||
use pep508_rs::Requirement;
|
||||
use platform_host::Platform;
|
||||
use platform_tags::Tags;
|
||||
use uv_cache::{Cache, CacheArgs};
|
||||
use uv_client::{FlatIndex, RegistryClient, RegistryClientBuilder};
|
||||
|
@ -55,8 +54,7 @@ pub(crate) async fn install_many(args: InstallManyArgs) -> Result<()> {
|
|||
info!("Got {} requirements", requirements.len());
|
||||
|
||||
let cache = Cache::try_from(args.cache_args)?;
|
||||
let platform = Platform::current()?;
|
||||
let venv = PythonEnvironment::from_virtualenv(platform, &cache)?;
|
||||
let venv = PythonEnvironment::from_virtualenv(&cache)?;
|
||||
let client = RegistryClientBuilder::new(cache.clone()).build();
|
||||
let index_locations = IndexLocations::default();
|
||||
let flat_index = FlatIndex::default();
|
||||
|
|
|
@ -11,7 +11,6 @@ use petgraph::dot::{Config as DotConfig, Dot};
|
|||
|
||||
use distribution_types::{FlatIndexLocation, IndexLocations, IndexUrl, Resolution};
|
||||
use pep508_rs::Requirement;
|
||||
use platform_host::Platform;
|
||||
use uv_cache::{Cache, CacheArgs};
|
||||
use uv_client::{FlatIndex, FlatIndexClient, RegistryClientBuilder};
|
||||
use uv_dispatch::BuildDispatch;
|
||||
|
@ -54,8 +53,7 @@ pub(crate) struct ResolveCliArgs {
|
|||
pub(crate) async fn resolve_cli(args: ResolveCliArgs) -> Result<()> {
|
||||
let cache = Cache::try_from(args.cache_args)?;
|
||||
|
||||
let platform = Platform::current()?;
|
||||
let venv = PythonEnvironment::from_virtualenv(platform, &cache)?;
|
||||
let venv = PythonEnvironment::from_virtualenv(&cache)?;
|
||||
let index_locations =
|
||||
IndexLocations::new(args.index_url, args.extra_index_url, args.find_links, false);
|
||||
let client = RegistryClientBuilder::new(cache.clone())
|
||||
|
|
|
@ -13,7 +13,6 @@ use tracing_indicatif::span_ext::IndicatifSpanExt;
|
|||
use distribution_types::IndexLocations;
|
||||
use pep440_rs::{Version, VersionSpecifier, VersionSpecifiers};
|
||||
use pep508_rs::{Requirement, VersionOrUrl};
|
||||
use platform_host::Platform;
|
||||
use uv_cache::{Cache, CacheArgs};
|
||||
use uv_client::{FlatIndex, OwnedArchive, RegistryClient, RegistryClientBuilder};
|
||||
use uv_dispatch::BuildDispatch;
|
||||
|
@ -72,8 +71,7 @@ pub(crate) async fn resolve_many(args: ResolveManyArgs) -> Result<()> {
|
|||
};
|
||||
let total = requirements.len();
|
||||
|
||||
let platform = Platform::current()?;
|
||||
let venv = PythonEnvironment::from_virtualenv(platform, &cache)?;
|
||||
let venv = PythonEnvironment::from_virtualenv(&cache)?;
|
||||
let in_flight = InFlight::default();
|
||||
let client = RegistryClientBuilder::new(cache.clone()).build();
|
||||
|
||||
|
|
|
@ -16,7 +16,6 @@ workspace = true
|
|||
[dependencies]
|
||||
distribution-types = { path = "../distribution-types" }
|
||||
pep508_rs = { path = "../pep508-rs" }
|
||||
platform-host = { path = "../platform-host" }
|
||||
platform-tags = { path = "../platform-tags" }
|
||||
pypi-types = { path = "../pypi-types" }
|
||||
uv-build = { path = "../uv-build" }
|
||||
|
|
|
@ -17,7 +17,6 @@ cache-key = { path = "../cache-key" }
|
|||
install-wheel-rs = { path = "../install-wheel-rs" }
|
||||
pep440_rs = { path = "../pep440-rs" }
|
||||
pep508_rs = { path = "../pep508-rs", features = ["serde"] }
|
||||
platform-host = { path = "../platform-host" }
|
||||
platform-tags = { path = "../platform-tags" }
|
||||
pypi-types = { path = "../pypi-types" }
|
||||
uv-cache = { path = "../uv-cache" }
|
||||
|
@ -31,10 +30,11 @@ rmp-serde = { workspace = true }
|
|||
same-file = { workspace = true }
|
||||
serde = { workspace = true, features = ["derive"] }
|
||||
serde_json = { workspace = true }
|
||||
tempfile = { workspace = true }
|
||||
thiserror = { workspace = true }
|
||||
tokio = { workspace = true }
|
||||
tracing = { workspace = true }
|
||||
which = { workspace = true}
|
||||
which = { workspace = true }
|
||||
|
||||
[target.'cfg(target_os = "windows")'.dependencies]
|
||||
winapi = { workspace = true }
|
||||
|
|
0
crates/uv-interpreter/python/__init__.py
Normal file
0
crates/uv-interpreter/python/__init__.py
Normal file
|
@ -1,10 +1,7 @@
|
|||
"""
|
||||
Queries information about the current Python interpreter and prints it as JSON.
|
||||
|
||||
Exit Codes:
|
||||
0: Success
|
||||
1: General failure
|
||||
3: Python version 3 or newer is required
|
||||
The script will exit with status 0 on known error that are turned into rust errors.
|
||||
"""
|
||||
|
||||
import json
|
||||
|
@ -23,8 +20,8 @@ def format_full_version(info):
|
|||
|
||||
|
||||
if sys.version_info[0] < 3:
|
||||
sys.exit(3)
|
||||
|
||||
print(json.dumps({"result": "error", "kind": "unsupported_python_version"}))
|
||||
sys.exit(0)
|
||||
|
||||
if hasattr(sys, "implementation"):
|
||||
implementation_version = format_full_version(sys.implementation.version)
|
||||
|
@ -373,9 +370,11 @@ def get_scheme():
|
|||
# finalize_options(); we only want to override here if the user
|
||||
# has explicitly requested it hence going back to the config
|
||||
if "install_lib" in d.get_option_dict("install"):
|
||||
# noinspection PyUnresolvedReferences
|
||||
scheme.update({"purelib": i.install_lib, "platlib": i.install_lib})
|
||||
|
||||
if running_under_virtualenv():
|
||||
# noinspection PyUnresolvedReferences
|
||||
scheme["headers"] = os.path.join(
|
||||
i.prefix,
|
||||
"include",
|
||||
|
@ -406,6 +405,91 @@ def get_scheme():
|
|||
return get_distutils_scheme()
|
||||
|
||||
|
||||
def get_operating_system_and_architecture():
|
||||
"""Determine the Python interpreter architecture and operating system.
|
||||
|
||||
This can differ from uv's architecture and operating system. For example, Apple
|
||||
Silicon Macs can run both x86_64 and aarch64 binaries transparently.
|
||||
"""
|
||||
# https://github.com/pypa/packaging/blob/cc938f984bbbe43c5734b9656c9837ab3a28191f/src/packaging/_musllinux.py#L84
|
||||
# Note that this is not `os.name`.
|
||||
[operating_system, version_arch] = sysconfig.get_platform().split("-", 1)
|
||||
if "-" in version_arch:
|
||||
# Ex: macosx-11.2-arm64
|
||||
version, architecture = version_arch.rsplit("-", 1)
|
||||
else:
|
||||
# Ex: linux-x86_64
|
||||
version = None
|
||||
architecture = version_arch
|
||||
|
||||
if operating_system == "linux":
|
||||
# noinspection PyProtectedMember
|
||||
from .packaging._manylinux import _get_glibc_version
|
||||
|
||||
# noinspection PyProtectedMember
|
||||
from .packaging._musllinux import _get_musl_version
|
||||
|
||||
musl_version = _get_musl_version(sys.executable)
|
||||
glibc_version = _get_glibc_version()
|
||||
if musl_version:
|
||||
operating_system = {
|
||||
"name": "musllinux",
|
||||
"major": musl_version[0],
|
||||
"minor": musl_version[1],
|
||||
}
|
||||
elif glibc_version:
|
||||
operating_system = {
|
||||
"name": "manylinux",
|
||||
"major": glibc_version[0],
|
||||
"minor": glibc_version[1],
|
||||
}
|
||||
else:
|
||||
print(json.dumps({"result": "error", "kind": "libc_not_found"}))
|
||||
sys.exit(0)
|
||||
elif operating_system == "win":
|
||||
operating_system = {
|
||||
"name": "windows",
|
||||
}
|
||||
elif operating_system == "macosx":
|
||||
# GitHub Actions python seems to be doing this.
|
||||
if architecture == "universal2":
|
||||
if platform.processor() == "arm":
|
||||
architecture = "aarch64"
|
||||
else:
|
||||
architecture = platform.processor()
|
||||
version = platform.mac_ver()[0].split(".")
|
||||
operating_system = {
|
||||
"name": "macos",
|
||||
"major": int(version[0]),
|
||||
"minor": int(version[1]),
|
||||
}
|
||||
elif operating_system in [
|
||||
"freebsd",
|
||||
"netbsd",
|
||||
"openbsd",
|
||||
"dragonfly",
|
||||
"illumos",
|
||||
"haiku",
|
||||
]:
|
||||
version = platform.mac_ver()[0].split(".")
|
||||
operating_system = {
|
||||
"name": operating_system,
|
||||
"release": version,
|
||||
}
|
||||
else:
|
||||
print(
|
||||
json.dumps(
|
||||
{
|
||||
"result": "error",
|
||||
"kind": "unknown_operating_system",
|
||||
"operating_system": operating_system,
|
||||
}
|
||||
)
|
||||
)
|
||||
sys.exit(0)
|
||||
return {"os": operating_system, "arch": architecture}
|
||||
|
||||
|
||||
markers = {
|
||||
"implementation_name": implementation_name,
|
||||
"implementation_version": implementation_version,
|
||||
|
@ -420,6 +504,7 @@ markers = {
|
|||
"sys_platform": sys.platform,
|
||||
}
|
||||
interpreter_info = {
|
||||
"result": "success",
|
||||
"markers": markers,
|
||||
"base_prefix": sys.base_prefix,
|
||||
"base_exec_prefix": sys.base_exec_prefix,
|
||||
|
@ -429,5 +514,6 @@ interpreter_info = {
|
|||
"stdlib": sysconfig.get_path("stdlib"),
|
||||
"scheme": get_scheme(),
|
||||
"virtualenv": get_virtualenv(),
|
||||
"platform": get_operating_system_and_architecture(),
|
||||
}
|
||||
print(json.dumps(interpreter_info))
|
177
crates/uv-interpreter/python/packaging/LICENSE.APACHE
Normal file
177
crates/uv-interpreter/python/packaging/LICENSE.APACHE
Normal file
|
@ -0,0 +1,177 @@
|
|||
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
23
crates/uv-interpreter/python/packaging/LICENSE.BSD
Normal file
23
crates/uv-interpreter/python/packaging/LICENSE.BSD
Normal file
|
@ -0,0 +1,23 @@
|
|||
Copyright (c) Donald Stufft and individual contributors.
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are met:
|
||||
|
||||
1. Redistributions of source code must retain the above copyright notice,
|
||||
this list of conditions and the following disclaimer.
|
||||
|
||||
2. Redistributions in binary form must reproduce the above copyright
|
||||
notice, this list of conditions and the following disclaimer in the
|
||||
documentation and/or other materials provided with the distribution.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
|
||||
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
|
||||
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
||||
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
||||
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
|
||||
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
6
crates/uv-interpreter/python/packaging/README.md
Normal file
6
crates/uv-interpreter/python/packaging/README.md
Normal file
|
@ -0,0 +1,6 @@
|
|||
# `pypa/packaging`
|
||||
|
||||
This directory contains vendored [pypa/packaging](https://github.com/pypa/packaging) modules as of
|
||||
[cc938f984bbbe43c5734b9656c9837ab3a28191f](https://github.com/pypa/packaging/tree/cc938f984bbbe43c5734b9656c9837ab3a28191f/src/packaging).
|
||||
|
||||
The files are licensed under BSD-2-Clause OR Apache-2.0.
|
15
crates/uv-interpreter/python/packaging/__init__.py
Normal file
15
crates/uv-interpreter/python/packaging/__init__.py
Normal file
|
@ -0,0 +1,15 @@
|
|||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
__title__ = "packaging"
|
||||
__summary__ = "Core utilities for Python packages"
|
||||
__uri__ = "https://github.com/pypa/packaging"
|
||||
|
||||
__version__ = "24.1.dev0"
|
||||
|
||||
__author__ = "Donald Stufft and individual contributors"
|
||||
__email__ = "donald@stufft.io"
|
||||
|
||||
__license__ = "BSD-2-Clause or Apache-2.0"
|
||||
__copyright__ = "2014 %s" % __author__
|
110
crates/uv-interpreter/python/packaging/_elffile.py
Normal file
110
crates/uv-interpreter/python/packaging/_elffile.py
Normal file
|
@ -0,0 +1,110 @@
|
|||
"""
|
||||
ELF file parser.
|
||||
|
||||
This provides a class ``ELFFile`` that parses an ELF executable in a similar
|
||||
interface to ``ZipFile``. Only the read interface is implemented.
|
||||
|
||||
Based on: https://gist.github.com/lyssdod/f51579ae8d93c8657a5564aefc2ffbca
|
||||
ELF header: https://refspecs.linuxfoundation.org/elf/gabi4+/ch4.eheader.html
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import enum
|
||||
import os
|
||||
import struct
|
||||
from typing import IO
|
||||
|
||||
|
||||
class ELFInvalid(ValueError):
|
||||
pass
|
||||
|
||||
|
||||
class EIClass(enum.IntEnum):
|
||||
C32 = 1
|
||||
C64 = 2
|
||||
|
||||
|
||||
class EIData(enum.IntEnum):
|
||||
Lsb = 1
|
||||
Msb = 2
|
||||
|
||||
|
||||
class EMachine(enum.IntEnum):
|
||||
I386 = 3
|
||||
S390 = 22
|
||||
Arm = 40
|
||||
X8664 = 62
|
||||
AArc64 = 183
|
||||
|
||||
|
||||
class ELFFile:
|
||||
"""
|
||||
Representation of an ELF executable.
|
||||
"""
|
||||
|
||||
def __init__(self, f: IO[bytes]) -> None:
|
||||
self._f = f
|
||||
|
||||
try:
|
||||
ident = self._read("16B")
|
||||
except struct.error:
|
||||
raise ELFInvalid("unable to parse identification")
|
||||
magic = bytes(ident[:4])
|
||||
if magic != b"\x7fELF":
|
||||
raise ELFInvalid(f"invalid magic: {magic!r}")
|
||||
|
||||
self.capacity = ident[4] # Format for program header (bitness).
|
||||
self.encoding = ident[5] # Data structure encoding (endianness).
|
||||
|
||||
try:
|
||||
# e_fmt: Format for program header.
|
||||
# p_fmt: Format for section header.
|
||||
# p_idx: Indexes to find p_type, p_offset, and p_filesz.
|
||||
e_fmt, self._p_fmt, self._p_idx = {
|
||||
(1, 1): ("<HHIIIIIHHH", "<IIIIIIII", (0, 1, 4)), # 32-bit LSB.
|
||||
(1, 2): (">HHIIIIIHHH", ">IIIIIIII", (0, 1, 4)), # 32-bit MSB.
|
||||
(2, 1): ("<HHIQQQIHHH", "<IIQQQQQQ", (0, 2, 5)), # 64-bit LSB.
|
||||
(2, 2): (">HHIQQQIHHH", ">IIQQQQQQ", (0, 2, 5)), # 64-bit MSB.
|
||||
}[(self.capacity, self.encoding)]
|
||||
except KeyError:
|
||||
raise ELFInvalid(
|
||||
f"unrecognized capacity ({self.capacity}) or "
|
||||
f"encoding ({self.encoding})"
|
||||
)
|
||||
|
||||
try:
|
||||
(
|
||||
_,
|
||||
self.machine, # Architecture type.
|
||||
_,
|
||||
_,
|
||||
self._e_phoff, # Offset of program header.
|
||||
_,
|
||||
self.flags, # Processor-specific flags.
|
||||
_,
|
||||
self._e_phentsize, # Size of section.
|
||||
self._e_phnum, # Number of sections.
|
||||
) = self._read(e_fmt)
|
||||
except struct.error as e:
|
||||
raise ELFInvalid("unable to parse machine and section information") from e
|
||||
|
||||
def _read(self, fmt: str) -> tuple[int, ...]:
|
||||
return struct.unpack(fmt, self._f.read(struct.calcsize(fmt)))
|
||||
|
||||
@property
|
||||
def interpreter(self) -> str | None:
|
||||
"""
|
||||
The path recorded in the ``PT_INTERP`` section header.
|
||||
"""
|
||||
for index in range(self._e_phnum):
|
||||
self._f.seek(self._e_phoff + self._e_phentsize * index)
|
||||
try:
|
||||
data = self._read(self._p_fmt)
|
||||
except struct.error:
|
||||
continue
|
||||
if data[self._p_idx[0]] != 3: # Not PT_INTERP.
|
||||
continue
|
||||
self._f.seek(data[self._p_idx[1]])
|
||||
return os.fsdecode(self._f.read(data[self._p_idx[2]])).strip("\0")
|
||||
return None
|
262
crates/uv-interpreter/python/packaging/_manylinux.py
Normal file
262
crates/uv-interpreter/python/packaging/_manylinux.py
Normal file
|
@ -0,0 +1,262 @@
|
|||
from __future__ import annotations
|
||||
|
||||
import collections
|
||||
import contextlib
|
||||
import functools
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import warnings
|
||||
from typing import Generator, Iterator, NamedTuple, Sequence
|
||||
|
||||
from ._elffile import EIClass, EIData, ELFFile, EMachine
|
||||
|
||||
EF_ARM_ABIMASK = 0xFF000000
|
||||
EF_ARM_ABI_VER5 = 0x05000000
|
||||
EF_ARM_ABI_FLOAT_HARD = 0x00000400
|
||||
|
||||
|
||||
# `os.PathLike` not a generic type until Python 3.9, so sticking with `str`
|
||||
# as the type for `path` until then.
|
||||
@contextlib.contextmanager
|
||||
def _parse_elf(path: str) -> Generator[ELFFile | None, None, None]:
|
||||
try:
|
||||
with open(path, "rb") as f:
|
||||
yield ELFFile(f)
|
||||
except (OSError, TypeError, ValueError):
|
||||
yield None
|
||||
|
||||
|
||||
def _is_linux_armhf(executable: str) -> bool:
|
||||
# hard-float ABI can be detected from the ELF header of the running
|
||||
# process
|
||||
# https://static.docs.arm.com/ihi0044/g/aaelf32.pdf
|
||||
with _parse_elf(executable) as f:
|
||||
return (
|
||||
f is not None
|
||||
and f.capacity == EIClass.C32
|
||||
and f.encoding == EIData.Lsb
|
||||
and f.machine == EMachine.Arm
|
||||
and f.flags & EF_ARM_ABIMASK == EF_ARM_ABI_VER5
|
||||
and f.flags & EF_ARM_ABI_FLOAT_HARD == EF_ARM_ABI_FLOAT_HARD
|
||||
)
|
||||
|
||||
|
||||
def _is_linux_i686(executable: str) -> bool:
|
||||
with _parse_elf(executable) as f:
|
||||
return (
|
||||
f is not None
|
||||
and f.capacity == EIClass.C32
|
||||
and f.encoding == EIData.Lsb
|
||||
and f.machine == EMachine.I386
|
||||
)
|
||||
|
||||
|
||||
def _have_compatible_abi(executable: str, archs: Sequence[str]) -> bool:
|
||||
if "armv7l" in archs:
|
||||
return _is_linux_armhf(executable)
|
||||
if "i686" in archs:
|
||||
return _is_linux_i686(executable)
|
||||
allowed_archs = {
|
||||
"x86_64",
|
||||
"aarch64",
|
||||
"ppc64",
|
||||
"ppc64le",
|
||||
"s390x",
|
||||
"loongarch64",
|
||||
"riscv64",
|
||||
}
|
||||
return any(arch in allowed_archs for arch in archs)
|
||||
|
||||
|
||||
# If glibc ever changes its major version, we need to know what the last
|
||||
# minor version was, so we can build the complete list of all versions.
|
||||
# For now, guess what the highest minor version might be, assume it will
|
||||
# be 50 for testing. Once this actually happens, update the dictionary
|
||||
# with the actual value.
|
||||
_LAST_GLIBC_MINOR: dict[int, int] = collections.defaultdict(lambda: 50)
|
||||
|
||||
|
||||
class _GLibCVersion(NamedTuple):
|
||||
major: int
|
||||
minor: int
|
||||
|
||||
|
||||
def _glibc_version_string_confstr() -> str | None:
|
||||
"""
|
||||
Primary implementation of glibc_version_string using os.confstr.
|
||||
"""
|
||||
# os.confstr is quite a bit faster than ctypes.DLL. It's also less likely
|
||||
# to be broken or missing. This strategy is used in the standard library
|
||||
# platform module.
|
||||
# https://github.com/python/cpython/blob/fcf1d003bf4f0100c/Lib/platform.py#L175-L183
|
||||
try:
|
||||
# Should be a string like "glibc 2.17".
|
||||
version_string: str | None = os.confstr("CS_GNU_LIBC_VERSION")
|
||||
assert version_string is not None
|
||||
_, version = version_string.rsplit()
|
||||
except (AssertionError, AttributeError, OSError, ValueError):
|
||||
# os.confstr() or CS_GNU_LIBC_VERSION not available (or a bad value)...
|
||||
return None
|
||||
return version
|
||||
|
||||
|
||||
def _glibc_version_string_ctypes() -> str | None:
|
||||
"""
|
||||
Fallback implementation of glibc_version_string using ctypes.
|
||||
"""
|
||||
try:
|
||||
import ctypes
|
||||
except ImportError:
|
||||
return None
|
||||
|
||||
# ctypes.CDLL(None) internally calls dlopen(NULL), and as the dlopen
|
||||
# manpage says, "If filename is NULL, then the returned handle is for the
|
||||
# main program". This way we can let the linker do the work to figure out
|
||||
# which libc our process is actually using.
|
||||
#
|
||||
# We must also handle the special case where the executable is not a
|
||||
# dynamically linked executable. This can occur when using musl libc,
|
||||
# for example. In this situation, dlopen() will error, leading to an
|
||||
# OSError. Interestingly, at least in the case of musl, there is no
|
||||
# errno set on the OSError. The single string argument used to construct
|
||||
# OSError comes from libc itself and is therefore not portable to
|
||||
# hard code here. In any case, failure to call dlopen() means we
|
||||
# can proceed, so we bail on our attempt.
|
||||
try:
|
||||
process_namespace = ctypes.CDLL(None)
|
||||
except OSError:
|
||||
return None
|
||||
|
||||
try:
|
||||
gnu_get_libc_version = process_namespace.gnu_get_libc_version
|
||||
except AttributeError:
|
||||
# Symbol doesn't exist -> therefore, we are not linked to
|
||||
# glibc.
|
||||
return None
|
||||
|
||||
# Call gnu_get_libc_version, which returns a string like "2.5"
|
||||
gnu_get_libc_version.restype = ctypes.c_char_p
|
||||
version_str: str = gnu_get_libc_version()
|
||||
# py2 / py3 compatibility:
|
||||
if not isinstance(version_str, str):
|
||||
version_str = version_str.decode("ascii")
|
||||
|
||||
return version_str
|
||||
|
||||
|
||||
def _glibc_version_string() -> str | None:
|
||||
"""Returns glibc version string, or None if not using glibc."""
|
||||
return _glibc_version_string_confstr() or _glibc_version_string_ctypes()
|
||||
|
||||
|
||||
def _parse_glibc_version(version_str: str) -> tuple[int, int]:
|
||||
"""Parse glibc version.
|
||||
|
||||
We use a regexp instead of str.split because we want to discard any
|
||||
random junk that might come after the minor version -- this might happen
|
||||
in patched/forked versions of glibc (e.g. Linaro's version of glibc
|
||||
uses version strings like "2.20-2014.11"). See gh-3588.
|
||||
"""
|
||||
m = re.match(r"(?P<major>[0-9]+)\.(?P<minor>[0-9]+)", version_str)
|
||||
if not m:
|
||||
warnings.warn(
|
||||
f"Expected glibc version with 2 components major.minor,"
|
||||
f" got: {version_str}",
|
||||
RuntimeWarning,
|
||||
)
|
||||
return -1, -1
|
||||
return int(m.group("major")), int(m.group("minor"))
|
||||
|
||||
|
||||
@functools.lru_cache()
|
||||
def _get_glibc_version() -> tuple[int, int]:
|
||||
version_str = _glibc_version_string()
|
||||
if version_str is None:
|
||||
return (-1, -1)
|
||||
return _parse_glibc_version(version_str)
|
||||
|
||||
|
||||
# From PEP 513, PEP 600
|
||||
def _is_compatible(arch: str, version: _GLibCVersion) -> bool:
|
||||
sys_glibc = _get_glibc_version()
|
||||
if sys_glibc < version:
|
||||
return False
|
||||
# Check for presence of _manylinux module.
|
||||
try:
|
||||
import _manylinux
|
||||
except ImportError:
|
||||
return True
|
||||
if hasattr(_manylinux, "manylinux_compatible"):
|
||||
result = _manylinux.manylinux_compatible(version[0], version[1], arch)
|
||||
if result is not None:
|
||||
return bool(result)
|
||||
return True
|
||||
if version == _GLibCVersion(2, 5):
|
||||
if hasattr(_manylinux, "manylinux1_compatible"):
|
||||
return bool(_manylinux.manylinux1_compatible)
|
||||
if version == _GLibCVersion(2, 12):
|
||||
if hasattr(_manylinux, "manylinux2010_compatible"):
|
||||
return bool(_manylinux.manylinux2010_compatible)
|
||||
if version == _GLibCVersion(2, 17):
|
||||
if hasattr(_manylinux, "manylinux2014_compatible"):
|
||||
return bool(_manylinux.manylinux2014_compatible)
|
||||
return True
|
||||
|
||||
|
||||
_LEGACY_MANYLINUX_MAP = {
|
||||
# CentOS 7 w/ glibc 2.17 (PEP 599)
|
||||
(2, 17): "manylinux2014",
|
||||
# CentOS 6 w/ glibc 2.12 (PEP 571)
|
||||
(2, 12): "manylinux2010",
|
||||
# CentOS 5 w/ glibc 2.5 (PEP 513)
|
||||
(2, 5): "manylinux1",
|
||||
}
|
||||
|
||||
|
||||
def platform_tags(archs: Sequence[str]) -> Iterator[str]:
|
||||
"""Generate manylinux tags compatible to the current platform.
|
||||
|
||||
:param archs: Sequence of compatible architectures.
|
||||
The first one shall be the closest to the actual architecture and be the part of
|
||||
platform tag after the ``linux_`` prefix, e.g. ``x86_64``.
|
||||
The ``linux_`` prefix is assumed as a prerequisite for the current platform to
|
||||
be manylinux-compatible.
|
||||
|
||||
:returns: An iterator of compatible manylinux tags.
|
||||
"""
|
||||
if not _have_compatible_abi(sys.executable, archs):
|
||||
return
|
||||
# Oldest glibc to be supported regardless of architecture is (2, 17).
|
||||
too_old_glibc2 = _GLibCVersion(2, 16)
|
||||
if set(archs) & {"x86_64", "i686"}:
|
||||
# On x86/i686 also oldest glibc to be supported is (2, 5).
|
||||
too_old_glibc2 = _GLibCVersion(2, 4)
|
||||
current_glibc = _GLibCVersion(*_get_glibc_version())
|
||||
glibc_max_list = [current_glibc]
|
||||
# We can assume compatibility across glibc major versions.
|
||||
# https://sourceware.org/bugzilla/show_bug.cgi?id=24636
|
||||
#
|
||||
# Build a list of maximum glibc versions so that we can
|
||||
# output the canonical list of all glibc from current_glibc
|
||||
# down to too_old_glibc2, including all intermediary versions.
|
||||
for glibc_major in range(current_glibc.major - 1, 1, -1):
|
||||
glibc_minor = _LAST_GLIBC_MINOR[glibc_major]
|
||||
glibc_max_list.append(_GLibCVersion(glibc_major, glibc_minor))
|
||||
for arch in archs:
|
||||
for glibc_max in glibc_max_list:
|
||||
if glibc_max.major == too_old_glibc2.major:
|
||||
min_minor = too_old_glibc2.minor
|
||||
else:
|
||||
# For other glibc major versions oldest supported is (x, 0).
|
||||
min_minor = -1
|
||||
for glibc_minor in range(glibc_max.minor, min_minor, -1):
|
||||
glibc_version = _GLibCVersion(glibc_max.major, glibc_minor)
|
||||
tag = "manylinux_{}_{}".format(*glibc_version)
|
||||
if _is_compatible(arch, glibc_version):
|
||||
yield f"{tag}_{arch}"
|
||||
# Handle the legacy manylinux1, manylinux2010, manylinux2014 tags.
|
||||
if glibc_version in _LEGACY_MANYLINUX_MAP:
|
||||
legacy_tag = _LEGACY_MANYLINUX_MAP[glibc_version]
|
||||
if _is_compatible(arch, glibc_version):
|
||||
yield f"{legacy_tag}_{arch}"
|
85
crates/uv-interpreter/python/packaging/_musllinux.py
Normal file
85
crates/uv-interpreter/python/packaging/_musllinux.py
Normal file
|
@ -0,0 +1,85 @@
|
|||
"""PEP 656 support.
|
||||
|
||||
This module implements logic to detect if the currently running Python is
|
||||
linked against musl, and what musl version is used.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import functools
|
||||
import re
|
||||
import subprocess
|
||||
import sys
|
||||
from typing import Iterator, NamedTuple, Sequence
|
||||
|
||||
from ._elffile import ELFFile
|
||||
|
||||
|
||||
class _MuslVersion(NamedTuple):
|
||||
major: int
|
||||
minor: int
|
||||
|
||||
|
||||
def _parse_musl_version(output: str) -> _MuslVersion | None:
|
||||
lines = [n for n in (n.strip() for n in output.splitlines()) if n]
|
||||
if len(lines) < 2 or lines[0][:4] != "musl":
|
||||
return None
|
||||
m = re.match(r"Version (\d+)\.(\d+)", lines[1])
|
||||
if not m:
|
||||
return None
|
||||
return _MuslVersion(major=int(m.group(1)), minor=int(m.group(2)))
|
||||
|
||||
|
||||
@functools.lru_cache()
|
||||
def _get_musl_version(executable: str) -> _MuslVersion | None:
|
||||
"""Detect currently-running musl runtime version.
|
||||
|
||||
This is done by checking the specified executable's dynamic linking
|
||||
information, and invoking the loader to parse its output for a version
|
||||
string. If the loader is musl, the output would be something like::
|
||||
|
||||
musl libc (x86_64)
|
||||
Version 1.2.2
|
||||
Dynamic Program Loader
|
||||
"""
|
||||
try:
|
||||
with open(executable, "rb") as f:
|
||||
ld = ELFFile(f).interpreter
|
||||
except (OSError, TypeError, ValueError):
|
||||
return None
|
||||
if ld is None or "musl" not in ld:
|
||||
return None
|
||||
proc = subprocess.run([ld], stderr=subprocess.PIPE, text=True)
|
||||
return _parse_musl_version(proc.stderr)
|
||||
|
||||
|
||||
def platform_tags(archs: Sequence[str]) -> Iterator[str]:
|
||||
"""Generate musllinux tags compatible to the current platform.
|
||||
|
||||
:param archs: Sequence of compatible architectures.
|
||||
The first one shall be the closest to the actual architecture and be the part of
|
||||
platform tag after the ``linux_`` prefix, e.g. ``x86_64``.
|
||||
The ``linux_`` prefix is assumed as a prerequisite for the current platform to
|
||||
be musllinux-compatible.
|
||||
|
||||
:returns: An iterator of compatible musllinux tags.
|
||||
"""
|
||||
sys_musl = _get_musl_version(sys.executable)
|
||||
if sys_musl is None: # Python not dynamically linked against musl.
|
||||
return
|
||||
for arch in archs:
|
||||
for minor in range(sys_musl.minor, -1, -1):
|
||||
yield f"musllinux_{sys_musl.major}_{minor}_{arch}"
|
||||
|
||||
|
||||
if __name__ == "__main__": # pragma: no cover
|
||||
import sysconfig
|
||||
|
||||
plat = sysconfig.get_platform()
|
||||
assert plat.startswith("linux-"), "not linux"
|
||||
|
||||
print("plat:", plat)
|
||||
print("musl:", _get_musl_version(sys.executable))
|
||||
print("tags:", end=" ")
|
||||
for t in platform_tags(re.sub(r"[.-]", "_", plat.split("-", 1)[-1])):
|
||||
print(t, end="\n ")
|
|
@ -5,10 +5,10 @@ use std::path::PathBuf;
|
|||
|
||||
use tracing::{debug, instrument};
|
||||
|
||||
use platform_host::Platform;
|
||||
use uv_cache::Cache;
|
||||
use uv_fs::normalize_path;
|
||||
|
||||
use crate::interpreter::InterpreterInfoError;
|
||||
use crate::python_environment::{detect_python_executable, detect_virtual_env};
|
||||
use crate::{Error, Interpreter, PythonVersion};
|
||||
|
||||
|
@ -25,11 +25,7 @@ use crate::{Error, Interpreter, PythonVersion};
|
|||
/// patch version (e.g. `python3.12.1`) is often not in `PATH` and we make the simplifying
|
||||
/// assumption that the user has only this one patch version installed.
|
||||
#[instrument(skip_all, fields(%request))]
|
||||
pub fn find_requested_python(
|
||||
request: &str,
|
||||
platform: &Platform,
|
||||
cache: &Cache,
|
||||
) -> Result<Option<Interpreter>, Error> {
|
||||
pub fn find_requested_python(request: &str, cache: &Cache) -> Result<Option<Interpreter>, Error> {
|
||||
debug!("Starting interpreter discovery for Python @ `{request}`");
|
||||
let versions = request
|
||||
.splitn(3, '.')
|
||||
|
@ -46,18 +42,18 @@ pub fn find_requested_python(
|
|||
// SAFETY: Guaranteed by the Ok(versions) guard
|
||||
_ => unreachable!(),
|
||||
};
|
||||
find_python(selector, platform, cache)
|
||||
find_python(selector, cache)
|
||||
} else if !request.contains(std::path::MAIN_SEPARATOR) {
|
||||
// `-p python3.10`; Generally not used on windows because all Python are `python.exe`.
|
||||
let Some(executable) = find_executable(request)? else {
|
||||
return Ok(None);
|
||||
};
|
||||
Interpreter::query(executable, platform.clone(), cache).map(Some)
|
||||
Interpreter::query(executable, cache).map(Some)
|
||||
} else {
|
||||
// `-p /home/ferris/.local/bin/python3.10`
|
||||
let executable = normalize_path(request);
|
||||
|
||||
Interpreter::query(executable, platform.clone(), cache).map(Some)
|
||||
Interpreter::query(executable, cache).map(Some)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -66,9 +62,9 @@ pub fn find_requested_python(
|
|||
/// We prefer the test overwrite `UV_TEST_PYTHON_PATH` if it is set, otherwise `python3`/`python` or
|
||||
/// `python.exe` respectively.
|
||||
#[instrument(skip_all)]
|
||||
pub fn find_default_python(platform: &Platform, cache: &Cache) -> Result<Interpreter, Error> {
|
||||
pub fn find_default_python(cache: &Cache) -> Result<Interpreter, Error> {
|
||||
debug!("Starting interpreter discovery for default Python");
|
||||
try_find_default_python(platform, cache)?.ok_or(if cfg!(windows) {
|
||||
try_find_default_python(cache)?.ok_or(if cfg!(windows) {
|
||||
Error::NoPythonInstalledWindows
|
||||
} else if cfg!(unix) {
|
||||
Error::NoPythonInstalledUnix
|
||||
|
@ -78,11 +74,8 @@ pub fn find_default_python(platform: &Platform, cache: &Cache) -> Result<Interpr
|
|||
}
|
||||
|
||||
/// Same as [`find_default_python`] but returns `None` if no python is found instead of returning an `Err`.
|
||||
pub(crate) fn try_find_default_python(
|
||||
platform: &Platform,
|
||||
cache: &Cache,
|
||||
) -> Result<Option<Interpreter>, Error> {
|
||||
find_python(PythonVersionSelector::Default, platform, cache)
|
||||
pub(crate) fn try_find_default_python(cache: &Cache) -> Result<Option<Interpreter>, Error> {
|
||||
find_python(PythonVersionSelector::Default, cache)
|
||||
}
|
||||
|
||||
/// Find a Python version matching `selector`.
|
||||
|
@ -100,7 +93,6 @@ pub(crate) fn try_find_default_python(
|
|||
/// (Windows): Filter out the Windows store shim (Enabled in Settings/Apps/Advanced app settings/App execution aliases).
|
||||
fn find_python(
|
||||
selector: PythonVersionSelector,
|
||||
platform: &Platform,
|
||||
cache: &Cache,
|
||||
) -> Result<Option<Interpreter>, Error> {
|
||||
#[allow(non_snake_case)]
|
||||
|
@ -126,11 +118,16 @@ fn find_python(
|
|||
continue;
|
||||
}
|
||||
|
||||
let interpreter = match Interpreter::query(&path, platform.clone(), cache) {
|
||||
let interpreter = match Interpreter::query(&path, cache) {
|
||||
Ok(interpreter) => interpreter,
|
||||
Err(Error::Python2OrOlder) => {
|
||||
Err(
|
||||
err @ Error::QueryScript {
|
||||
err: InterpreterInfoError::UnsupportedPythonVersion,
|
||||
..
|
||||
},
|
||||
) => {
|
||||
if selector.major() <= Some(2) {
|
||||
return Err(Error::Python2OrOlder);
|
||||
return Err(err);
|
||||
}
|
||||
// Skip over Python 2 or older installation when querying for a recent python installation.
|
||||
debug!("Found a Python 2 installation that isn't supported by uv, skipping.");
|
||||
|
@ -141,7 +138,7 @@ fn find_python(
|
|||
|
||||
let installation = PythonInstallation::Interpreter(interpreter);
|
||||
|
||||
if let Some(interpreter) = installation.select(selector, platform, cache)? {
|
||||
if let Some(interpreter) = installation.select(selector, cache)? {
|
||||
return Ok(Some(interpreter));
|
||||
}
|
||||
}
|
||||
|
@ -154,7 +151,7 @@ fn find_python(
|
|||
if cfg!(windows) {
|
||||
if let Ok(shims) = which::which_in_global("python.bat", Some(&path)) {
|
||||
for shim in shims {
|
||||
let interpreter = match Interpreter::query(&shim, platform.clone(), cache) {
|
||||
let interpreter = match Interpreter::query(&shim, cache) {
|
||||
Ok(interpreter) => interpreter,
|
||||
Err(error) => {
|
||||
// Don't fail when querying the shim failed. E.g it's possible that no python version is selected
|
||||
|
@ -164,8 +161,8 @@ fn find_python(
|
|||
}
|
||||
};
|
||||
|
||||
if let Some(interpreter) = PythonInstallation::Interpreter(interpreter)
|
||||
.select(selector, platform, cache)?
|
||||
if let Some(interpreter) =
|
||||
PythonInstallation::Interpreter(interpreter).select(selector, cache)?
|
||||
{
|
||||
return Ok(Some(interpreter));
|
||||
}
|
||||
|
@ -180,7 +177,7 @@ fn find_python(
|
|||
Ok(paths) => {
|
||||
for entry in paths {
|
||||
let installation = PythonInstallation::PyListPath(entry);
|
||||
if let Some(interpreter) = installation.select(selector, platform, cache)? {
|
||||
if let Some(interpreter) = installation.select(selector, cache)? {
|
||||
return Ok(Some(interpreter));
|
||||
}
|
||||
}
|
||||
|
@ -299,7 +296,6 @@ impl PythonInstallation {
|
|||
fn select(
|
||||
self,
|
||||
selector: PythonVersionSelector,
|
||||
platform: &Platform,
|
||||
cache: &Cache,
|
||||
) -> Result<Option<Interpreter>, Error> {
|
||||
let selected = match selector {
|
||||
|
@ -312,7 +308,7 @@ impl PythonInstallation {
|
|||
}
|
||||
|
||||
PythonVersionSelector::MajorMinorPatch(major, minor, requested_patch) => {
|
||||
let interpreter = self.into_interpreter(platform, cache)?;
|
||||
let interpreter = self.into_interpreter(cache)?;
|
||||
return Ok(
|
||||
if major == interpreter.python_major()
|
||||
&& minor == interpreter.python_minor()
|
||||
|
@ -327,21 +323,17 @@ impl PythonInstallation {
|
|||
};
|
||||
|
||||
if selected {
|
||||
self.into_interpreter(platform, cache).map(Some)
|
||||
self.into_interpreter(cache).map(Some)
|
||||
} else {
|
||||
Ok(None)
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) fn into_interpreter(
|
||||
self,
|
||||
platform: &Platform,
|
||||
cache: &Cache,
|
||||
) -> Result<Interpreter, Error> {
|
||||
pub(super) fn into_interpreter(self, cache: &Cache) -> Result<Interpreter, Error> {
|
||||
match self {
|
||||
Self::PyListPath(PyListPath {
|
||||
executable_path, ..
|
||||
}) => Interpreter::query(executable_path, platform.clone(), cache),
|
||||
}) => Interpreter::query(executable_path, cache),
|
||||
Self::Interpreter(interpreter) => Ok(interpreter),
|
||||
}
|
||||
}
|
||||
|
@ -415,7 +407,6 @@ impl PythonVersionSelector {
|
|||
#[instrument(skip_all, fields(?python_version))]
|
||||
pub fn find_best_python(
|
||||
python_version: Option<&PythonVersion>,
|
||||
platform: &Platform,
|
||||
cache: &Cache,
|
||||
) -> Result<Interpreter, Error> {
|
||||
if let Some(python_version) = python_version {
|
||||
|
@ -428,7 +419,7 @@ pub fn find_best_python(
|
|||
}
|
||||
|
||||
// First, check for an exact match (or the first available version if no Python version was provided)
|
||||
if let Some(interpreter) = find_version(python_version, platform, cache)? {
|
||||
if let Some(interpreter) = find_version(python_version, cache)? {
|
||||
return Ok(interpreter);
|
||||
}
|
||||
|
||||
|
@ -436,16 +427,14 @@ pub fn find_best_python(
|
|||
// If that fails, and a specific patch version was requested try again allowing a
|
||||
// different patch version
|
||||
if python_version.patch().is_some() {
|
||||
if let Some(interpreter) =
|
||||
find_version(Some(&python_version.without_patch()), platform, cache)?
|
||||
{
|
||||
if let Some(interpreter) = find_version(Some(&python_version.without_patch()), cache)? {
|
||||
return Ok(interpreter);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// If a Python version was requested but cannot be fulfilled, just take any version
|
||||
if let Some(interpreter) = find_version(None, platform, cache)? {
|
||||
if let Some(interpreter) = find_version(None, cache)? {
|
||||
return Ok(interpreter);
|
||||
}
|
||||
|
||||
|
@ -470,7 +459,6 @@ pub fn find_best_python(
|
|||
/// we will return [`None`].
|
||||
fn find_version(
|
||||
python_version: Option<&PythonVersion>,
|
||||
platform: &Platform,
|
||||
cache: &Cache,
|
||||
) -> Result<Option<Interpreter>, Error> {
|
||||
let version_matches = |interpreter: &Interpreter| -> bool {
|
||||
|
@ -486,7 +474,7 @@ fn find_version(
|
|||
// Check if the venv Python matches.
|
||||
if let Some(venv) = detect_virtual_env()? {
|
||||
let executable = detect_python_executable(venv);
|
||||
let interpreter = Interpreter::query(executable, platform.clone(), cache)?;
|
||||
let interpreter = Interpreter::query(executable, cache)?;
|
||||
|
||||
if version_matches(&interpreter) {
|
||||
return Ok(Some(interpreter));
|
||||
|
@ -496,9 +484,9 @@ fn find_version(
|
|||
// Look for the requested version with by search for `python{major}.{minor}` in `PATH` on
|
||||
// Unix and `py --list-paths` on Windows.
|
||||
let interpreter = if let Some(python_version) = python_version {
|
||||
find_requested_python(&python_version.string, platform, cache)?
|
||||
find_requested_python(&python_version.string, cache)?
|
||||
} else {
|
||||
try_find_default_python(platform, cache)?
|
||||
try_find_default_python(cache)?
|
||||
};
|
||||
|
||||
if let Some(interpreter) = interpreter {
|
||||
|
@ -706,14 +694,12 @@ mod windows {
|
|||
}
|
||||
|
||||
#[cfg(test)]
|
||||
#[cfg(windows)]
|
||||
mod tests {
|
||||
use std::fmt::Debug;
|
||||
|
||||
use insta::assert_snapshot;
|
||||
use itertools::Itertools;
|
||||
|
||||
use platform_host::Platform;
|
||||
use uv_cache::Cache;
|
||||
|
||||
use crate::{find_requested_python, Error};
|
||||
|
@ -725,12 +711,10 @@ mod windows {
|
|||
}
|
||||
|
||||
#[test]
|
||||
#[cfg_attr(not(windows), ignore)]
|
||||
fn no_such_python_path() {
|
||||
let result = find_requested_python(
|
||||
r"C:\does\not\exists\python3.12",
|
||||
&Platform::current().unwrap(),
|
||||
&Cache::temp().unwrap(),
|
||||
);
|
||||
let result =
|
||||
find_requested_python(r"C:\does\not\exists\python3.12", &Cache::temp().unwrap());
|
||||
insta::with_settings!({
|
||||
filters => vec![
|
||||
// The exact message is host language dependent
|
||||
|
@ -747,13 +731,11 @@ mod windows {
|
|||
}
|
||||
}
|
||||
|
||||
#[cfg(unix)]
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use insta::assert_snapshot;
|
||||
use itertools::Itertools;
|
||||
|
||||
use platform_host::Platform;
|
||||
use uv_cache::Cache;
|
||||
|
||||
use crate::find_python::find_requested_python;
|
||||
|
@ -766,44 +748,35 @@ mod tests {
|
|||
}
|
||||
|
||||
#[test]
|
||||
#[cfg_attr(not(unix), ignore)]
|
||||
fn no_such_python_version() {
|
||||
let request = "3.1000";
|
||||
let result = find_requested_python(
|
||||
request,
|
||||
&Platform::current().unwrap(),
|
||||
&Cache::temp().unwrap(),
|
||||
)
|
||||
.unwrap()
|
||||
.ok_or(Error::NoSuchPython(request.to_string()));
|
||||
let result = find_requested_python(request, &Cache::temp().unwrap())
|
||||
.unwrap()
|
||||
.ok_or(Error::NoSuchPython(request.to_string()));
|
||||
assert_snapshot!(
|
||||
format_err(result),
|
||||
@"No Python 3.1000 In `PATH`. Is Python 3.1000 installed?"
|
||||
@"No Python 3.1000 in `PATH`. Is Python 3.1000 installed?"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[cfg_attr(not(unix), ignore)]
|
||||
fn no_such_python_binary() {
|
||||
let request = "python3.1000";
|
||||
let result = find_requested_python(
|
||||
request,
|
||||
&Platform::current().unwrap(),
|
||||
&Cache::temp().unwrap(),
|
||||
)
|
||||
.unwrap()
|
||||
.ok_or(Error::NoSuchPython(request.to_string()));
|
||||
let result = find_requested_python(request, &Cache::temp().unwrap())
|
||||
.unwrap()
|
||||
.ok_or(Error::NoSuchPython(request.to_string()));
|
||||
assert_snapshot!(
|
||||
format_err(result),
|
||||
@"No Python python3.1000 In `PATH`. Is Python python3.1000 installed?"
|
||||
@"No Python python3.1000 in `PATH`. Is Python python3.1000 installed?"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[cfg_attr(not(unix), ignore)]
|
||||
fn no_such_python_path() {
|
||||
let result = find_requested_python(
|
||||
"/does/not/exists/python3.12",
|
||||
&Platform::current().unwrap(),
|
||||
&Cache::temp().unwrap(),
|
||||
);
|
||||
let result = find_requested_python("/does/not/exists/python3.12", &Cache::temp().unwrap());
|
||||
assert_snapshot!(
|
||||
format_err(result), @r###"
|
||||
failed to canonicalize path `/does/not/exists/python3.12`
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
use std::io::Write;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::process::Command;
|
||||
|
||||
|
@ -12,7 +11,7 @@ use cache_key::digest;
|
|||
use install_wheel_rs::Layout;
|
||||
use pep440_rs::Version;
|
||||
use pep508_rs::{MarkerEnvironment, StringVersion};
|
||||
use platform_host::Platform;
|
||||
use platform_tags::Platform;
|
||||
use platform_tags::{Tags, TagsError};
|
||||
use pypi_types::Scheme;
|
||||
use uv_cache::{Cache, CacheBucket, CachedByTimestamp, Freshness, Timestamp};
|
||||
|
@ -39,11 +38,7 @@ pub struct Interpreter {
|
|||
|
||||
impl Interpreter {
|
||||
/// Detect the interpreter info for the given Python executable.
|
||||
pub fn query(
|
||||
executable: impl AsRef<Path>,
|
||||
platform: Platform,
|
||||
cache: &Cache,
|
||||
) -> Result<Self, Error> {
|
||||
pub fn query(executable: impl AsRef<Path>, cache: &Cache) -> Result<Self, Error> {
|
||||
let info = InterpreterInfo::query_cached(executable.as_ref(), cache)?;
|
||||
|
||||
debug_assert!(
|
||||
|
@ -53,7 +48,7 @@ impl Interpreter {
|
|||
);
|
||||
|
||||
Ok(Self {
|
||||
platform,
|
||||
platform: info.platform,
|
||||
markers: Box::new(info.markers),
|
||||
scheme: info.scheme,
|
||||
virtualenv: info.virtualenv,
|
||||
|
@ -336,8 +331,27 @@ impl ExternallyManaged {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize, Serialize)]
|
||||
#[serde(tag = "result", rename_all = "lowercase")]
|
||||
enum InterpreterInfoResult {
|
||||
Error(InterpreterInfoError),
|
||||
Success(Box<InterpreterInfo>),
|
||||
}
|
||||
|
||||
#[derive(Debug, Error, Deserialize, Serialize)]
|
||||
#[serde(tag = "kind", rename_all = "snake_case")]
|
||||
pub enum InterpreterInfoError {
|
||||
#[error("Could not detect a glibc or a musl libc (while running on Linux)")]
|
||||
LibcNotFound,
|
||||
#[error("Unknown operation system: `{operating_system}`")]
|
||||
UnknownOperatingSystem { operating_system: String },
|
||||
#[error("Python 2 is not supported. Please use Python 3.8 or newer.")]
|
||||
UnsupportedPythonVersion,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize, Serialize, Clone)]
|
||||
struct InterpreterInfo {
|
||||
platform: Platform,
|
||||
markers: MarkerEnvironment,
|
||||
scheme: Scheme,
|
||||
virtualenv: Scheme,
|
||||
|
@ -351,59 +365,23 @@ struct InterpreterInfo {
|
|||
|
||||
impl InterpreterInfo {
|
||||
/// Return the resolved [`InterpreterInfo`] for the given Python executable.
|
||||
pub(crate) fn query(interpreter: &Path) -> Result<Self, Error> {
|
||||
let script = include_str!("get_interpreter_info.py");
|
||||
let output = if cfg!(windows)
|
||||
&& interpreter
|
||||
.extension()
|
||||
.is_some_and(|extension| extension == "bat")
|
||||
{
|
||||
// Multiline arguments aren't well-supported in batch files and `pyenv-win`, for example, trips over it.
|
||||
// We work around this batch limitation by passing the script via stdin instead.
|
||||
// This is somewhat more expensive because we have to spawn a new thread to write the
|
||||
// stdin to avoid deadlocks in case the child process waits for the parent to read stdout.
|
||||
// The performance overhead is the reason why we only applies this to batch files.
|
||||
// https://github.com/pyenv-win/pyenv-win/issues/589
|
||||
let mut child = Command::new(interpreter)
|
||||
.arg("-")
|
||||
.stdin(std::process::Stdio::piped())
|
||||
.stdout(std::process::Stdio::piped())
|
||||
.spawn()
|
||||
.map_err(|err| Error::PythonSubcommandLaunch {
|
||||
interpreter: interpreter.to_path_buf(),
|
||||
err,
|
||||
})?;
|
||||
pub(crate) fn query(interpreter: &Path, cache: &Cache) -> Result<Self, Error> {
|
||||
let tempdir = tempfile::tempdir_in(cache.root())?;
|
||||
Self::setup_python_query_files(tempdir.path())?;
|
||||
|
||||
let mut stdin = child.stdin.take().unwrap();
|
||||
|
||||
// From the Rust documentation:
|
||||
// If the child process fills its stdout buffer, it may end up
|
||||
// waiting until the parent reads the stdout, and not be able to
|
||||
// read stdin in the meantime, causing a deadlock.
|
||||
// Writing from another thread ensures that stdout is being read
|
||||
// at the same time, avoiding the problem.
|
||||
std::thread::spawn(move || {
|
||||
stdin
|
||||
.write_all(script.as_bytes())
|
||||
.expect("failed to write to stdin");
|
||||
});
|
||||
|
||||
child.wait_with_output()
|
||||
} else {
|
||||
Command::new(interpreter).arg("-c").arg(script).output()
|
||||
}
|
||||
.map_err(|err| Error::PythonSubcommandLaunch {
|
||||
interpreter: interpreter.to_path_buf(),
|
||||
err,
|
||||
})?;
|
||||
let output = Command::new(interpreter)
|
||||
.arg("-m")
|
||||
.arg("python.get_interpreter_info")
|
||||
.current_dir(tempdir.path().simplified())
|
||||
.output()
|
||||
.map_err(|err| Error::PythonSubcommandLaunch {
|
||||
interpreter: interpreter.to_path_buf(),
|
||||
err,
|
||||
})?;
|
||||
|
||||
// stderr isn't technically a criterion for success, but i don't know of any cases where there
|
||||
// should be stderr output and if there is, we want to know
|
||||
if !output.status.success() || !output.stderr.is_empty() {
|
||||
if output.status.code() == Some(3) {
|
||||
return Err(Error::Python2OrOlder);
|
||||
}
|
||||
|
||||
return Err(Error::PythonSubcommandOutput {
|
||||
message: format!(
|
||||
"Querying Python at `{}` failed with status {}",
|
||||
|
@ -416,19 +394,60 @@ impl InterpreterInfo {
|
|||
});
|
||||
}
|
||||
|
||||
let data: Self = serde_json::from_slice(&output.stdout).map_err(|err| {
|
||||
Error::PythonSubcommandOutput {
|
||||
message: format!(
|
||||
"Querying Python at `{}` did not return the expected data: {err}",
|
||||
interpreter.display(),
|
||||
),
|
||||
exit_code: output.status,
|
||||
stdout: String::from_utf8_lossy(&output.stdout).trim().to_string(),
|
||||
stderr: String::from_utf8_lossy(&output.stderr).trim().to_string(),
|
||||
}
|
||||
})?;
|
||||
let result: InterpreterInfoResult =
|
||||
serde_json::from_slice(&output.stdout).map_err(|err| {
|
||||
Error::PythonSubcommandOutput {
|
||||
message: format!(
|
||||
"Querying Python at `{}` did not return the expected data: {err}",
|
||||
interpreter.display(),
|
||||
),
|
||||
exit_code: output.status,
|
||||
stdout: String::from_utf8_lossy(&output.stdout).trim().to_string(),
|
||||
stderr: String::from_utf8_lossy(&output.stderr).trim().to_string(),
|
||||
}
|
||||
})?;
|
||||
|
||||
Ok(data)
|
||||
match result {
|
||||
InterpreterInfoResult::Error(err) => Err(Error::QueryScript {
|
||||
err,
|
||||
interpreter: interpreter.to_path_buf(),
|
||||
}),
|
||||
InterpreterInfoResult::Success(data) => Ok(*data),
|
||||
}
|
||||
}
|
||||
|
||||
/// Duplicate the directory structure we have in `../python` into a tempdir, so we can run
|
||||
/// the Python probing scripts with `python -m python.get_interpreter_info` from that tempdir.
|
||||
fn setup_python_query_files(root: &Path) -> Result<(), Error> {
|
||||
let python_dir = root.join("python");
|
||||
fs_err::create_dir(&python_dir)?;
|
||||
fs_err::write(
|
||||
python_dir.join("get_interpreter_info.py"),
|
||||
include_str!("../python/get_interpreter_info.py"),
|
||||
)?;
|
||||
fs_err::write(
|
||||
python_dir.join("__init__.py"),
|
||||
include_str!("../python/__init__.py"),
|
||||
)?;
|
||||
let packaging_dir = python_dir.join("packaging");
|
||||
fs_err::create_dir(&packaging_dir)?;
|
||||
fs_err::write(
|
||||
packaging_dir.join("__init__.py"),
|
||||
include_str!("../python/packaging/__init__.py"),
|
||||
)?;
|
||||
fs_err::write(
|
||||
packaging_dir.join("_elffile.py"),
|
||||
include_str!("../python/packaging/_elffile.py"),
|
||||
)?;
|
||||
fs_err::write(
|
||||
packaging_dir.join("_manylinux.py"),
|
||||
include_str!("../python/packaging/_manylinux.py"),
|
||||
)?;
|
||||
fs_err::write(
|
||||
packaging_dir.join("_musllinux.py"),
|
||||
include_str!("../python/packaging/_musllinux.py"),
|
||||
)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// A wrapper around [`markers::query_interpreter_info`] to cache the computed markers.
|
||||
|
@ -482,7 +501,7 @@ impl InterpreterInfo {
|
|||
|
||||
// Otherwise, run the Python script.
|
||||
debug!("Probing interpreter info for: {}", executable.display());
|
||||
let info = Self::query(executable)?;
|
||||
let info = Self::query(executable, cache)?;
|
||||
debug!(
|
||||
"Found Python {} for: {}",
|
||||
info.markers.python_full_version,
|
||||
|
@ -516,7 +535,6 @@ mod tests {
|
|||
use tempfile::tempdir;
|
||||
|
||||
use pep440_rs::Version;
|
||||
use platform_host::Platform;
|
||||
use uv_cache::Cache;
|
||||
|
||||
use crate::Interpreter;
|
||||
|
@ -527,6 +545,15 @@ mod tests {
|
|||
let mocked_interpreter = mock_dir.path().join("python");
|
||||
let json = indoc! {r##"
|
||||
{
|
||||
"result": "success",
|
||||
"platform": {
|
||||
"os": {
|
||||
"name": "manylinux",
|
||||
"major": 2,
|
||||
"minor": 38
|
||||
},
|
||||
"arch": "x86_64"
|
||||
},
|
||||
"markers": {
|
||||
"implementation_name": "cpython",
|
||||
"implementation_version": "3.12.0",
|
||||
|
@ -563,7 +590,6 @@ mod tests {
|
|||
"##};
|
||||
|
||||
let cache = Cache::temp().unwrap();
|
||||
let platform = Platform::current().unwrap();
|
||||
|
||||
fs::write(
|
||||
&mocked_interpreter,
|
||||
|
@ -578,8 +604,7 @@ mod tests {
|
|||
std::os::unix::fs::PermissionsExt::from_mode(0o770),
|
||||
)
|
||||
.unwrap();
|
||||
let interpreter =
|
||||
Interpreter::query(&mocked_interpreter, platform.clone(), &cache).unwrap();
|
||||
let interpreter = Interpreter::query(&mocked_interpreter, &cache).unwrap();
|
||||
assert_eq!(
|
||||
interpreter.markers.python_version.version,
|
||||
Version::from_str("3.12").unwrap()
|
||||
|
@ -592,8 +617,7 @@ mod tests {
|
|||
"##, json.replace("3.12", "3.13")},
|
||||
)
|
||||
.unwrap();
|
||||
let interpreter =
|
||||
Interpreter::query(&mocked_interpreter, platform.clone(), &cache).unwrap();
|
||||
let interpreter = Interpreter::query(&mocked_interpreter, &cache).unwrap();
|
||||
assert_eq!(
|
||||
interpreter.markers.python_version.version,
|
||||
Version::from_str("3.13").unwrap()
|
||||
|
|
|
@ -17,6 +17,7 @@ use thiserror::Error;
|
|||
pub use crate::cfg::PyVenvConfiguration;
|
||||
pub use crate::find_python::{find_best_python, find_default_python, find_requested_python};
|
||||
pub use crate::interpreter::Interpreter;
|
||||
use crate::interpreter::InterpreterInfoError;
|
||||
pub use crate::python_environment::PythonEnvironment;
|
||||
pub use crate::python_version::PythonVersion;
|
||||
pub use crate::virtualenv::Virtualenv;
|
||||
|
@ -38,11 +39,11 @@ pub enum Error {
|
|||
PythonNotFound,
|
||||
#[error("Failed to locate a virtualenv or Conda environment (checked: `VIRTUAL_ENV`, `CONDA_PREFIX`, and `.venv`). Run `uv venv` to create a virtualenv.")]
|
||||
VenvNotFound,
|
||||
#[error("Failed to locate Python interpreter at: `{0}`")]
|
||||
#[error("Failed to locate Python interpreter at `{0}`")]
|
||||
RequestedPythonNotFound(String),
|
||||
#[error(transparent)]
|
||||
Io(#[from] io::Error),
|
||||
#[error("Failed to query python interpreter `{interpreter}`")]
|
||||
#[error("Failed to query Python interpreter at `{interpreter}`")]
|
||||
PythonSubcommandLaunch {
|
||||
interpreter: PathBuf,
|
||||
#[source]
|
||||
|
@ -56,7 +57,7 @@ pub enum Error {
|
|||
)]
|
||||
NoSuchPython(String),
|
||||
#[cfg(unix)]
|
||||
#[error("No Python {0} In `PATH`. Is Python {0} installed?")]
|
||||
#[error("No Python {0} in `PATH`. Is Python {0} installed?")]
|
||||
NoSuchPython(String),
|
||||
#[error("Neither `python` nor `python3` are in `PATH`. Is Python installed?")]
|
||||
NoPythonInstalledUnix,
|
||||
|
@ -71,12 +72,16 @@ pub enum Error {
|
|||
stdout: String,
|
||||
stderr: String,
|
||||
},
|
||||
#[error("Python 2 or older is not supported. Please use Python 3 or newer.")]
|
||||
Python2OrOlder,
|
||||
#[error("Failed to write to cache")]
|
||||
Encode(#[from] rmp_serde::encode::Error),
|
||||
#[error("Broken virtualenv: Failed to parse pyvenv.cfg")]
|
||||
Cfg(#[from] cfg::Error),
|
||||
#[error("Error finding `{}` in PATH", _0.to_string_lossy())]
|
||||
WhichError(OsString, #[source] which::Error),
|
||||
#[error("Can't use Python at `{interpreter}`")]
|
||||
QueryScript {
|
||||
#[source]
|
||||
err: InterpreterInfoError,
|
||||
interpreter: PathBuf,
|
||||
},
|
||||
}
|
||||
|
|
|
@ -3,7 +3,6 @@ use std::path::{Path, PathBuf};
|
|||
|
||||
use tracing::debug;
|
||||
|
||||
use platform_host::Platform;
|
||||
use uv_cache::Cache;
|
||||
use uv_fs::{LockedFile, Simplified};
|
||||
|
||||
|
@ -19,13 +18,13 @@ pub struct PythonEnvironment {
|
|||
|
||||
impl PythonEnvironment {
|
||||
/// Create a [`PythonEnvironment`] for an existing virtual environment.
|
||||
pub fn from_virtualenv(platform: Platform, cache: &Cache) -> Result<Self, Error> {
|
||||
pub fn from_virtualenv(cache: &Cache) -> Result<Self, Error> {
|
||||
let Some(venv) = detect_virtual_env()? else {
|
||||
return Err(Error::VenvNotFound);
|
||||
};
|
||||
let venv = fs_err::canonicalize(venv)?;
|
||||
let executable = detect_python_executable(&venv);
|
||||
let interpreter = Interpreter::query(&executable, platform, cache)?;
|
||||
let interpreter = Interpreter::query(&executable, cache)?;
|
||||
|
||||
debug_assert!(
|
||||
interpreter.base_prefix() == interpreter.base_exec_prefix(),
|
||||
|
@ -41,12 +40,8 @@ impl PythonEnvironment {
|
|||
}
|
||||
|
||||
/// Create a [`PythonEnvironment`] for a Python interpreter specifier (e.g., a path or a binary name).
|
||||
pub fn from_requested_python(
|
||||
python: &str,
|
||||
platform: &Platform,
|
||||
cache: &Cache,
|
||||
) -> Result<Self, Error> {
|
||||
let Some(interpreter) = find_requested_python(python, platform, cache)? else {
|
||||
pub fn from_requested_python(python: &str, cache: &Cache) -> Result<Self, Error> {
|
||||
let Some(interpreter) = find_requested_python(python, cache)? else {
|
||||
return Err(Error::RequestedPythonNotFound(python.to_string()));
|
||||
};
|
||||
Ok(Self {
|
||||
|
@ -56,8 +51,8 @@ impl PythonEnvironment {
|
|||
}
|
||||
|
||||
/// Create a [`PythonEnvironment`] for the default Python interpreter.
|
||||
pub fn from_default_python(platform: &Platform, cache: &Cache) -> Result<Self, Error> {
|
||||
let interpreter = find_default_python(platform, cache)?;
|
||||
pub fn from_default_python(cache: &Cache) -> Result<Self, Error> {
|
||||
let interpreter = find_default_python(cache)?;
|
||||
Ok(Self {
|
||||
root: interpreter.prefix().to_path_buf(),
|
||||
interpreter,
|
||||
|
|
|
@ -20,7 +20,6 @@ install-wheel-rs = { path = "../install-wheel-rs" }
|
|||
once-map = { path = "../once-map" }
|
||||
pep440_rs = { path = "../pep440-rs", features = ["pubgrub"] }
|
||||
pep508_rs = { path = "../pep508-rs" }
|
||||
platform-host = { path = "../platform-host" }
|
||||
platform-tags = { path = "../platform-tags" }
|
||||
pypi-types = { path = "../pypi-types" }
|
||||
uv-cache = { path = "../uv-cache" }
|
||||
|
@ -55,7 +54,7 @@ sha2 = { workspace = true }
|
|||
tempfile = { workspace = true }
|
||||
thiserror = { workspace = true }
|
||||
tokio = { workspace = true, features = ["macros"] }
|
||||
tokio-stream = { workspace = true }
|
||||
tokio-stream = { workspace = true }
|
||||
tokio-util = { workspace = true, features = ["compat"] }
|
||||
tracing = { workspace = true }
|
||||
url = { workspace = true }
|
||||
|
|
|
@ -12,11 +12,10 @@ use once_cell::sync::Lazy;
|
|||
|
||||
use distribution_types::{IndexLocations, Resolution, SourceDist};
|
||||
use pep508_rs::{MarkerEnvironment, Requirement, StringVersion};
|
||||
use platform_host::{Arch, Os, Platform};
|
||||
use platform_tags::Tags;
|
||||
use platform_tags::{Arch, Os, Platform, Tags};
|
||||
use uv_cache::Cache;
|
||||
use uv_client::{FlatIndex, RegistryClientBuilder};
|
||||
use uv_interpreter::{Interpreter, PythonEnvironment};
|
||||
use uv_interpreter::{find_default_python, Interpreter, PythonEnvironment};
|
||||
use uv_resolver::{
|
||||
DisplayResolutionGraph, InMemoryIndex, Manifest, Options, OptionsBuilder, PreReleaseMode,
|
||||
ResolutionGraph, ResolutionMode, Resolver,
|
||||
|
@ -120,7 +119,10 @@ async fn resolve(
|
|||
let client = RegistryClientBuilder::new(Cache::temp()?).build();
|
||||
let flat_index = FlatIndex::default();
|
||||
let index = InMemoryIndex::default();
|
||||
let interpreter = Interpreter::artificial(Platform::current()?, markers.clone());
|
||||
// TODO(konstin): Should we also use the bootstrapped pythons here?
|
||||
let real_interpreter =
|
||||
find_default_python(&Cache::temp().unwrap()).expect("Expected a python to be installed");
|
||||
let interpreter = Interpreter::artificial(real_interpreter.platform().clone(), markers.clone());
|
||||
let build_context = DummyContext::new(Cache::temp()?, interpreter.clone());
|
||||
let resolver = Resolver::new(
|
||||
manifest,
|
||||
|
|
|
@ -21,7 +21,7 @@ required-features = ["cli"]
|
|||
workspace = true
|
||||
|
||||
[dependencies]
|
||||
platform-host = { path = "../platform-host" }
|
||||
platform-tags = { path = "../platform-tags" }
|
||||
pypi-types = { path = "../pypi-types" }
|
||||
uv-cache = { path = "../uv-cache" }
|
||||
uv-fs = { path = "../uv-fs" }
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
use std::io;
|
||||
use std::path::Path;
|
||||
|
||||
use platform_tags::PlatformError;
|
||||
use thiserror::Error;
|
||||
|
||||
use platform_host::PlatformError;
|
||||
use uv_interpreter::{Interpreter, PythonEnvironment};
|
||||
|
||||
pub use crate::bare::create_bare_venv;
|
||||
|
|
|
@ -11,7 +11,6 @@ use tracing_subscriber::layer::SubscriberExt;
|
|||
use tracing_subscriber::util::SubscriberInitExt;
|
||||
use tracing_subscriber::{fmt, EnvFilter};
|
||||
|
||||
use platform_host::Platform;
|
||||
use uv_cache::Cache;
|
||||
use uv_interpreter::{find_default_python, find_requested_python};
|
||||
use uv_virtualenv::{create_bare_venv, Prompt};
|
||||
|
@ -30,18 +29,17 @@ struct Cli {
|
|||
fn run() -> Result<(), uv_virtualenv::Error> {
|
||||
let cli = Cli::parse();
|
||||
let location = cli.path.unwrap_or(PathBuf::from(".venv"));
|
||||
let platform = Platform::current()?;
|
||||
let cache = if let Some(project_dirs) = ProjectDirs::from("", "", "uv-virtualenv") {
|
||||
Cache::from_path(project_dirs.cache_dir())?
|
||||
} else {
|
||||
Cache::from_path(".cache")?
|
||||
};
|
||||
let interpreter = if let Some(python_request) = &cli.python {
|
||||
find_requested_python(python_request, &platform, &cache)?.ok_or(
|
||||
find_requested_python(python_request, &cache)?.ok_or(
|
||||
uv_interpreter::Error::NoSuchPython(python_request.to_string()),
|
||||
)?
|
||||
} else {
|
||||
find_default_python(&platform, &cache)?
|
||||
find_default_python(&cache)?
|
||||
};
|
||||
create_bare_venv(
|
||||
&location,
|
||||
|
|
|
@ -19,7 +19,6 @@ distribution-types = { path = "../distribution-types" }
|
|||
install-wheel-rs = { path = "../install-wheel-rs", features = ["clap"], default-features = false }
|
||||
pep440_rs = { path = "../pep440-rs" }
|
||||
pep508_rs = { path = "../pep508-rs" }
|
||||
platform-host = { path = "../platform-host" }
|
||||
platform-tags = { path = "../platform-tags" }
|
||||
pypi-types = { path = "../pypi-types" }
|
||||
requirements-txt = { path = "../requirements-txt", features = ["reqwest"] }
|
||||
|
|
|
@ -16,7 +16,6 @@ use tempfile::tempdir_in;
|
|||
use tracing::debug;
|
||||
|
||||
use distribution_types::{IndexLocations, LocalEditable, Verbatim};
|
||||
use platform_host::Platform;
|
||||
use platform_tags::Tags;
|
||||
use requirements_txt::EditableRequirement;
|
||||
use uv_cache::Cache;
|
||||
|
@ -127,8 +126,7 @@ pub(crate) async fn pip_compile(
|
|||
let preferences = read_lockfile(output_file, upgrade).await?;
|
||||
|
||||
// Find an interpreter to use for building distributions
|
||||
let platform = Platform::current()?;
|
||||
let interpreter = find_best_python(python_version.as_ref(), &platform, &cache)?;
|
||||
let interpreter = find_best_python(python_version.as_ref(), &cache)?;
|
||||
debug!(
|
||||
"Using Python {} interpreter at {} for builds",
|
||||
interpreter.python_version(),
|
||||
|
|
|
@ -6,7 +6,6 @@ use owo_colors::OwoColorize;
|
|||
use tracing::debug;
|
||||
|
||||
use distribution_types::{InstalledDist, Name};
|
||||
use platform_host::Platform;
|
||||
use uv_cache::Cache;
|
||||
use uv_fs::Simplified;
|
||||
use uv_installer::SitePackages;
|
||||
|
@ -24,16 +23,15 @@ pub(crate) fn pip_freeze(
|
|||
printer: Printer,
|
||||
) -> Result<ExitStatus> {
|
||||
// Detect the current Python interpreter.
|
||||
let platform = Platform::current()?;
|
||||
let venv = if let Some(python) = python {
|
||||
PythonEnvironment::from_requested_python(python, &platform, cache)?
|
||||
PythonEnvironment::from_requested_python(python, cache)?
|
||||
} else if system {
|
||||
PythonEnvironment::from_default_python(&platform, cache)?
|
||||
PythonEnvironment::from_default_python(cache)?
|
||||
} else {
|
||||
match PythonEnvironment::from_virtualenv(platform.clone(), cache) {
|
||||
match PythonEnvironment::from_virtualenv(cache) {
|
||||
Ok(venv) => venv,
|
||||
Err(uv_interpreter::Error::VenvNotFound) => {
|
||||
PythonEnvironment::from_default_python(&platform, cache)?
|
||||
PythonEnvironment::from_default_python(cache)?
|
||||
}
|
||||
Err(err) => return Err(err.into()),
|
||||
}
|
||||
|
|
|
@ -17,7 +17,6 @@ use distribution_types::{
|
|||
};
|
||||
use install_wheel_rs::linker::LinkMode;
|
||||
use pep508_rs::{MarkerEnvironment, Requirement};
|
||||
use platform_host::Platform;
|
||||
use platform_tags::Tags;
|
||||
use pypi_types::Yanked;
|
||||
use requirements_txt::EditableRequirement;
|
||||
|
@ -108,13 +107,12 @@ pub(crate) async fn pip_install(
|
|||
}
|
||||
|
||||
// Detect the current Python interpreter.
|
||||
let platform = Platform::current()?;
|
||||
let venv = if let Some(python) = python.as_ref() {
|
||||
PythonEnvironment::from_requested_python(python, &platform, &cache)?
|
||||
PythonEnvironment::from_requested_python(python, &cache)?
|
||||
} else if system {
|
||||
PythonEnvironment::from_default_python(&platform, &cache)?
|
||||
PythonEnvironment::from_default_python(&cache)?
|
||||
} else {
|
||||
PythonEnvironment::from_virtualenv(platform, &cache)?
|
||||
PythonEnvironment::from_virtualenv(&cache)?
|
||||
};
|
||||
debug!(
|
||||
"Using Python {} environment at {}",
|
||||
|
@ -935,7 +933,7 @@ enum Error {
|
|||
Client(#[from] uv_client::Error),
|
||||
|
||||
#[error(transparent)]
|
||||
Platform(#[from] platform_host::PlatformError),
|
||||
Platform(#[from] platform_tags::PlatformError),
|
||||
|
||||
#[error(transparent)]
|
||||
Io(#[from] std::io::Error),
|
||||
|
|
|
@ -9,7 +9,6 @@ use tracing::debug;
|
|||
use unicode_width::UnicodeWidthStr;
|
||||
|
||||
use distribution_types::{InstalledDist, Name};
|
||||
use platform_host::Platform;
|
||||
use uv_cache::Cache;
|
||||
use uv_fs::Simplified;
|
||||
use uv_installer::SitePackages;
|
||||
|
@ -35,16 +34,15 @@ pub(crate) fn pip_list(
|
|||
printer: Printer,
|
||||
) -> Result<ExitStatus> {
|
||||
// Detect the current Python interpreter.
|
||||
let platform = Platform::current()?;
|
||||
let venv = if let Some(python) = python {
|
||||
PythonEnvironment::from_requested_python(python, &platform, cache)?
|
||||
PythonEnvironment::from_requested_python(python, cache)?
|
||||
} else if system {
|
||||
PythonEnvironment::from_default_python(&platform, cache)?
|
||||
PythonEnvironment::from_default_python(cache)?
|
||||
} else {
|
||||
match PythonEnvironment::from_virtualenv(platform.clone(), cache) {
|
||||
match PythonEnvironment::from_virtualenv(cache) {
|
||||
Ok(venv) => venv,
|
||||
Err(uv_interpreter::Error::VenvNotFound) => {
|
||||
PythonEnvironment::from_default_python(&platform, cache)?
|
||||
PythonEnvironment::from_default_python(cache)?
|
||||
}
|
||||
Err(err) => return Err(err.into()),
|
||||
}
|
||||
|
|
|
@ -7,7 +7,6 @@ use owo_colors::OwoColorize;
|
|||
use tracing::debug;
|
||||
|
||||
use distribution_types::Name;
|
||||
use platform_host::Platform;
|
||||
use uv_cache::Cache;
|
||||
use uv_fs::Simplified;
|
||||
use uv_installer::SitePackages;
|
||||
|
@ -40,16 +39,15 @@ pub(crate) fn pip_show(
|
|||
}
|
||||
|
||||
// Detect the current Python interpreter.
|
||||
let platform = Platform::current()?;
|
||||
let venv = if let Some(python) = python {
|
||||
PythonEnvironment::from_requested_python(python, &platform, cache)?
|
||||
PythonEnvironment::from_requested_python(python, cache)?
|
||||
} else if system {
|
||||
PythonEnvironment::from_default_python(&platform, cache)?
|
||||
PythonEnvironment::from_default_python(cache)?
|
||||
} else {
|
||||
match PythonEnvironment::from_virtualenv(platform.clone(), cache) {
|
||||
match PythonEnvironment::from_virtualenv(cache) {
|
||||
Ok(venv) => venv,
|
||||
Err(uv_interpreter::Error::VenvNotFound) => {
|
||||
PythonEnvironment::from_default_python(&platform, cache)?
|
||||
PythonEnvironment::from_default_python(cache)?
|
||||
}
|
||||
Err(err) => return Err(err.into()),
|
||||
}
|
||||
|
|
|
@ -7,7 +7,6 @@ use tracing::debug;
|
|||
|
||||
use distribution_types::{IndexLocations, InstalledMetadata, LocalDist, LocalEditable, Name};
|
||||
use install_wheel_rs::linker::LinkMode;
|
||||
use platform_host::Platform;
|
||||
use platform_tags::Tags;
|
||||
use pypi_types::Yanked;
|
||||
use requirements_txt::EditableRequirement;
|
||||
|
@ -72,13 +71,12 @@ pub(crate) async fn pip_sync(
|
|||
}
|
||||
|
||||
// Detect the current Python interpreter.
|
||||
let platform = Platform::current()?;
|
||||
let venv = if let Some(python) = python.as_ref() {
|
||||
PythonEnvironment::from_requested_python(python, &platform, &cache)?
|
||||
PythonEnvironment::from_requested_python(python, &cache)?
|
||||
} else if system {
|
||||
PythonEnvironment::from_default_python(&platform, &cache)?
|
||||
PythonEnvironment::from_default_python(&cache)?
|
||||
} else {
|
||||
PythonEnvironment::from_virtualenv(platform, &cache)?
|
||||
PythonEnvironment::from_virtualenv(&cache)?
|
||||
};
|
||||
debug!(
|
||||
"Using Python {} environment at {}",
|
||||
|
|
|
@ -5,7 +5,6 @@ use owo_colors::OwoColorize;
|
|||
use tracing::debug;
|
||||
|
||||
use distribution_types::{InstalledMetadata, Name};
|
||||
use platform_host::Platform;
|
||||
use uv_cache::Cache;
|
||||
use uv_client::Connectivity;
|
||||
use uv_fs::Simplified;
|
||||
|
@ -42,13 +41,12 @@ pub(crate) async fn pip_uninstall(
|
|||
} = RequirementsSpecification::from_simple_sources(sources, connectivity).await?;
|
||||
|
||||
// Detect the current Python interpreter.
|
||||
let platform = Platform::current()?;
|
||||
let venv = if let Some(python) = python.as_ref() {
|
||||
PythonEnvironment::from_requested_python(python, &platform, &cache)?
|
||||
PythonEnvironment::from_requested_python(python, &cache)?
|
||||
} else if system {
|
||||
PythonEnvironment::from_default_python(&platform, &cache)?
|
||||
PythonEnvironment::from_default_python(&cache)?
|
||||
} else {
|
||||
PythonEnvironment::from_virtualenv(platform, &cache)?
|
||||
PythonEnvironment::from_virtualenv(&cache)?
|
||||
};
|
||||
debug!(
|
||||
"Using Python {} environment at {}",
|
||||
|
|
|
@ -13,7 +13,6 @@ use thiserror::Error;
|
|||
|
||||
use distribution_types::{DistributionMetadata, IndexLocations, Name};
|
||||
use pep508_rs::Requirement;
|
||||
use platform_host::Platform;
|
||||
use uv_cache::Cache;
|
||||
use uv_client::{Connectivity, FlatIndex, FlatIndexClient, RegistryClientBuilder};
|
||||
use uv_dispatch::BuildDispatch;
|
||||
|
@ -97,14 +96,13 @@ async fn venv_impl(
|
|||
printer: Printer,
|
||||
) -> miette::Result<ExitStatus> {
|
||||
// Locate the Python interpreter.
|
||||
let platform = Platform::current().into_diagnostic()?;
|
||||
let interpreter = if let Some(python_request) = python_request {
|
||||
find_requested_python(python_request, &platform, cache)
|
||||
find_requested_python(python_request, cache)
|
||||
.into_diagnostic()?
|
||||
.ok_or(Error::NoSuchPython(python_request.to_string()))
|
||||
.into_diagnostic()?
|
||||
} else {
|
||||
find_default_python(&platform, cache).into_diagnostic()?
|
||||
find_default_python(cache).into_diagnostic()?
|
||||
};
|
||||
|
||||
writeln!(
|
||||
|
|
|
@ -17,7 +17,6 @@ use std::path::{Path, PathBuf};
|
|||
use std::process::Output;
|
||||
use uv_fs::Simplified;
|
||||
|
||||
use platform_host::Platform;
|
||||
use uv_cache::Cache;
|
||||
use uv_interpreter::find_requested_python;
|
||||
|
||||
|
@ -297,14 +296,14 @@ pub fn get_bin() -> PathBuf {
|
|||
PathBuf::from(env!("CARGO_BIN_EXE_uv"))
|
||||
}
|
||||
|
||||
/// Create a directory with the requested Python binaries available.
|
||||
/// Create a `PATH` with the requested Python versions available in order.
|
||||
pub fn create_bin_with_executables(
|
||||
temp_dir: &assert_fs::TempDir,
|
||||
python_versions: &[&str],
|
||||
) -> anyhow::Result<OsString> {
|
||||
if let Some(bootstrapped_pythons) = bootstrapped_pythons() {
|
||||
let selected_pythons = bootstrapped_pythons.into_iter().filter(|path| {
|
||||
python_versions.iter().any(|python_version| {
|
||||
let selected_pythons = python_versions.iter().flat_map(|python_version| {
|
||||
bootstrapped_pythons.iter().filter(move |path| {
|
||||
// Good enough since we control the directory
|
||||
path.to_str()
|
||||
.unwrap()
|
||||
|
@ -317,12 +316,8 @@ pub fn create_bin_with_executables(
|
|||
let bin = temp_dir.child("bin");
|
||||
fs_err::create_dir(&bin)?;
|
||||
for &request in python_versions {
|
||||
let interpreter = find_requested_python(
|
||||
request,
|
||||
&Platform::current().unwrap(),
|
||||
&Cache::temp().unwrap(),
|
||||
)?
|
||||
.ok_or(uv_interpreter::Error::NoSuchPython(request.to_string()))?;
|
||||
let interpreter = find_requested_python(request, &Cache::temp().unwrap())?
|
||||
.ok_or(uv_interpreter::Error::NoSuchPython(request.to_string()))?;
|
||||
let name = interpreter
|
||||
.sys_executable()
|
||||
.file_name()
|
||||
|
|
|
@ -284,7 +284,7 @@ fn create_venv_unknown_python_minor() -> Result<()> {
|
|||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
× No Python 3.15 In `PATH`. Is Python 3.15 installed?
|
||||
× No Python 3.15 in `PATH`. Is Python 3.15 installed?
|
||||
"###
|
||||
);
|
||||
}
|
||||
|
@ -309,7 +309,7 @@ fn create_venv_unknown_python_patch() -> Result<()> {
|
|||
),
|
||||
(
|
||||
r"No Python 3\.8\.0 found through `py --list-paths` or in `PATH`\. Is Python 3\.8\.0 installed\?",
|
||||
"No Python 3.8.0 In `PATH`. Is Python 3.8.0 installed?",
|
||||
"No Python 3.8.0 in `PATH`. Is Python 3.8.0 installed?",
|
||||
),
|
||||
(&filter_venv, "/home/ferris/project/.venv"),
|
||||
];
|
||||
|
@ -330,7 +330,7 @@ fn create_venv_unknown_python_patch() -> Result<()> {
|
|||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
× No Python 3.8.0 In `PATH`. Is Python 3.8.0 installed?
|
||||
× No Python 3.8.0 in `PATH`. Is Python 3.8.0 installed?
|
||||
"###
|
||||
);
|
||||
|
||||
|
@ -537,7 +537,7 @@ fn windows_shims() -> Result<()> {
|
|||
let temp_dir = assert_fs::TempDir::new()?;
|
||||
let cache_dir = assert_fs::TempDir::new()?;
|
||||
let bin =
|
||||
create_bin_with_executables(&temp_dir, &["3.8", "3.9"]).expect("Failed to create bin dir");
|
||||
create_bin_with_executables(&temp_dir, &["3.9", "3.8"]).expect("Failed to create bin dir");
|
||||
let venv = temp_dir.child(".venv");
|
||||
let shim_path = temp_dir.child("shim");
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue