Replace Python bootstrapping script with Rust implementation (#2842)

See https://github.com/astral-sh/uv/issues/2617

Note this also includes:
- #2918 
- #2931 (pending)

A first step towards Python toolchain management in Rust.

First, we add a new crate to manage Python download metadata:

- Adds a new `uv-toolchain` crate
- Adds Rust structs for Python version download metadata
- Duplicates the script which downloads Python version metadata
- Adds a script to generate Rust code from the JSON metadata
- Adds a utility to download and extract the Python version

I explored some alternatives like a build script using things like
`serde` and `uneval` to automatically construct the code from our
structs but deemed it to heavy. Unlike Rye, I don't generate the Rust
directly from the web requests and have an intermediate JSON layer to
speed up iteration on the Rust types.

Next, we add add a `uv-dev` command `fetch-python` to download Python
versions per the bootstrapping script.

- Downloads a requested version or reads from `.python-versions`
- Extracts to `UV_BOOTSTRAP_DIR`
- Links executables for path extension

This command is not really intended to be user facing, but it's a good
PoC for the `uv-toolchain` API. Hash checking (via the sha256) isn't
implemented yet, we can do that in a follow-up.

Finally, we remove the `scripts/bootstrap` directory, update CI to use
the new command, and update the CONTRIBUTING docs.

<img width="1023" alt="Screenshot 2024-04-08 at 17 12 15"
src="57bd3cf1-7477-4bb8-a8e9-802a00d772cb">
This commit is contained in:
Zanie Blue 2024-04-10 11:22:41 -05:00 committed by GitHub
parent 7cd98d2499
commit 44e39bdca3
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
31 changed files with 8170 additions and 3703 deletions

View file

@ -23,13 +23,16 @@ pep508_rs = { workspace = true }
uv-build = { workspace = true }
uv-cache = { workspace = true, features = ["clap"] }
uv-client = { workspace = true }
uv-configuration = { workspace = true }
uv-dispatch = { workspace = true }
uv-extract = { workspace = true }
uv-fs = { workspace = true }
uv-installer = { workspace = true }
uv-interpreter = { workspace = true }
uv-normalize = { workspace = true }
uv-resolver = { workspace = true }
uv-toolchain = { workspace = true }
uv-types = { workspace = true }
uv-configuration = { workspace = true }
# Any dependencies that are exclusively used in `uv-dev` should be listed as non-workspace
# dependencies, to ensure that we're forced to think twice before including them in other crates.
@ -46,14 +49,18 @@ petgraph = { workspace = true }
poloto = { version = "19.1.2" }
resvg = { version = "0.29.0" }
rustc-hash = { workspace = true }
reqwest = { workspace = true }
tempfile = { workspace = true }
serde = { workspace = true }
serde_json = { workspace = true }
tagu = { version = "0.1.6" }
tokio = { workspace = true }
tokio-util = { workspace = true, features = ["compat"] }
tracing = { workspace = true }
tracing-durations-export = { workspace = true, features = ["plot"] }
tracing-indicatif = { workspace = true }
tracing-subscriber = { workspace = true }
url = { workspace = true }
walkdir = { workspace = true }
[target.'cfg(target_os = "windows")'.dependencies]

View file

@ -0,0 +1,142 @@
use anyhow::Result;
use clap::Parser;
use fs_err as fs;
#[cfg(unix)]
use fs_err::tokio::symlink;
use futures::StreamExt;
#[cfg(unix)]
use itertools::Itertools;
use std::str::FromStr;
#[cfg(unix)]
use std::{collections::HashMap, path::PathBuf};
use tokio::time::Instant;
use tracing::{info, info_span, Instrument};
use uv_fs::Simplified;
use uv_toolchain::{
DownloadResult, Error, PythonDownload, PythonDownloadRequest, TOOLCHAIN_DIRECTORY,
};
#[derive(Parser, Debug)]
pub(crate) struct FetchPythonArgs {
versions: Vec<String>,
}
pub(crate) async fn fetch_python(args: FetchPythonArgs) -> Result<()> {
let start = Instant::now();
let bootstrap_dir = &*TOOLCHAIN_DIRECTORY;
fs_err::create_dir_all(bootstrap_dir)?;
let versions = if args.versions.is_empty() {
info!("Reading versions from file...");
read_versions_file().await?
} else {
args.versions
};
let requests = versions
.iter()
.map(|version| {
PythonDownloadRequest::from_str(version).and_then(PythonDownloadRequest::fill)
})
.collect::<Result<Vec<_>, Error>>()?;
let downloads = requests
.iter()
.map(|request| match PythonDownload::from_request(request) {
Some(download) => download,
None => panic!("No download found for request {request:?}"),
})
.collect::<Vec<_>>();
let client = uv_client::BaseClientBuilder::new().build();
info!("Fetching requested versions...");
let mut tasks = futures::stream::iter(downloads.iter())
.map(|download| {
async {
let result = download.fetch(&client, bootstrap_dir).await;
(download.python_version(), result)
}
.instrument(info_span!("download", key = %download))
})
.buffered(4);
let mut results = Vec::new();
let mut downloaded = 0;
while let Some(task) = tasks.next().await {
let (version, result) = task;
let path = match result? {
DownloadResult::AlreadyAvailable(path) => {
info!("Found existing download for v{}", version);
path
}
DownloadResult::Fetched(path) => {
info!("Downloaded v{} to {}", version, path.user_display());
downloaded += 1;
path
}
};
results.push((version, path));
}
if downloaded > 0 {
let s = if downloaded == 1 { "" } else { "s" };
info!(
"Fetched {} in {}s",
format!("{} version{}", downloaded, s),
start.elapsed().as_secs()
);
} else {
info!("All versions downloaded already.");
};
// Order matters here, as we overwrite previous links
info!("Installing to `{}`...", bootstrap_dir.user_display());
// On Windows, linking the executable generally results in broken installations
// and each toolchain path will need to be added to the PATH separately in the
// desired order
#[cfg(unix)]
{
let mut links: HashMap<PathBuf, PathBuf> = HashMap::new();
for (version, path) in results {
// TODO(zanieb): This path should be a part of the download metadata
let executable = path.join("install").join("bin").join("python3");
for target in [
bootstrap_dir.join(format!("python{}", version.python_full_version())),
bootstrap_dir.join(format!("python{}.{}", version.major(), version.minor())),
bootstrap_dir.join(format!("python{}", version.major())),
bootstrap_dir.join("python"),
] {
// Attempt to remove it, we'll fail on link if we couldn't remove it for some reason
// but if it's missing we don't want to error
let _ = fs::remove_file(&target);
symlink(&executable, &target).await?;
links.insert(target, executable.clone());
}
}
for (target, executable) in links.iter().sorted() {
info!(
"Linked `{}` to `{}`",
target.user_display(),
executable.user_display()
);
}
};
info!("Installed {} versions", requests.len());
Ok(())
}
async fn read_versions_file() -> Result<Vec<String>> {
let lines: Vec<String> = fs::tokio::read_to_string(".python-versions")
.await?
.lines()
.map(ToString::to_string)
.collect();
Ok(lines)
}

View file

@ -21,6 +21,7 @@ use resolve_many::ResolveManyArgs;
use crate::build::{build, BuildArgs};
use crate::clear_compile::ClearCompileArgs;
use crate::compile::CompileArgs;
use crate::fetch_python::FetchPythonArgs;
use crate::render_benchmarks::RenderBenchmarksArgs;
use crate::resolve_cli::ResolveCliArgs;
use crate::wheel_metadata::WheelMetadataArgs;
@ -44,6 +45,7 @@ static GLOBAL: tikv_jemallocator::Jemalloc = tikv_jemallocator::Jemalloc;
mod build;
mod clear_compile;
mod compile;
mod fetch_python;
mod render_benchmarks;
mod resolve_cli;
mod resolve_many;
@ -72,6 +74,8 @@ enum Cli {
Compile(CompileArgs),
/// Remove all `.pyc` in the tree.
ClearCompile(ClearCompileArgs),
/// Fetch Python versions for testing
FetchPython(FetchPythonArgs),
}
#[instrument] // Anchor span to check for overhead
@ -92,6 +96,7 @@ async fn run() -> Result<()> {
Cli::RenderBenchmarks(args) => render_benchmarks::render_benchmarks(&args)?,
Cli::Compile(args) => compile::compile(args).await?,
Cli::ClearCompile(args) => clear_compile::clear_compile(&args)?,
Cli::FetchPython(args) => fetch_python::fetch_python(args).await?,
}
Ok(())
}

View file

@ -157,6 +157,7 @@ pub async fn untar_gz<R: tokio::io::AsyncRead + Unpin>(
) -> Result<(), Error> {
let reader = tokio::io::BufReader::new(reader);
let decompressed_bytes = async_compression::tokio::bufread::GzipDecoder::new(reader);
let mut archive = tokio_tar::ArchiveBuilder::new(decompressed_bytes)
.set_preserve_mtime(false)
.build();

View file

@ -21,6 +21,7 @@ platform-tags = { workspace = true }
pypi-types = { workspace = true }
uv-cache = { workspace = true }
uv-fs = { workspace = true }
uv-toolchain = { workspace = true }
configparser = { workspace = true }
fs-err = { workspace = true, features = ["tokio"] }

View file

@ -7,10 +7,11 @@ use tracing::{debug, instrument};
use uv_cache::Cache;
use uv_fs::normalize_path;
use uv_toolchain::PythonVersion;
use crate::interpreter::InterpreterInfoError;
use crate::python_environment::{detect_python_executable, detect_virtual_env};
use crate::{Error, Interpreter, PythonVersion};
use crate::{Error, Interpreter};
/// Find a Python of a specific version, a binary with a name or a path to a binary.
///
@ -464,7 +465,7 @@ fn find_version(
let version_matches = |interpreter: &Interpreter| -> bool {
if let Some(python_version) = python_version {
// If a patch version was provided, check for an exact match
python_version.is_satisfied_by(interpreter)
interpreter.satisfies(python_version)
} else {
// The version always matches if one was not provided
true

View file

@ -16,6 +16,7 @@ use platform_tags::{Tags, TagsError};
use pypi_types::Scheme;
use uv_cache::{Cache, CacheBucket, CachedByTimestamp, Freshness, Timestamp};
use uv_fs::{write_atomic_sync, PythonExt, Simplified};
use uv_toolchain::PythonVersion;
use crate::Error;
use crate::Virtualenv;
@ -314,6 +315,18 @@ impl Interpreter {
},
}
}
/// Check if the interpreter matches the given Python version.
///
/// If a patch version is present, we will require an exact match.
/// Otherwise, just the major and minor version numbers need to match.
pub fn satisfies(&self, version: &PythonVersion) -> bool {
if version.patch().is_some() {
version.version() == self.python_version()
} else {
(version.major(), version.minor()) == self.python_tuple()
}
}
}
/// The `EXTERNALLY-MANAGED` file in a Python installation.

View file

@ -19,14 +19,12 @@ pub use crate::find_python::{find_best_python, find_default_python, find_request
pub use crate::interpreter::Interpreter;
use crate::interpreter::InterpreterInfoError;
pub use crate::python_environment::PythonEnvironment;
pub use crate::python_version::PythonVersion;
pub use crate::virtualenv::Virtualenv;
mod cfg;
mod find_python;
mod interpreter;
mod python_environment;
mod python_version;
mod virtualenv;
#[derive(Debug, Error)]

View file

@ -0,0 +1,33 @@
[package]
name = "uv-toolchain"
version = "0.1.0"
edition.workspace = true
rust-version.workspace = true
homepage.workspace = true
documentation.workspace = true
repository.workspace = true
authors.workspace = true
license.workspace = true
[dependencies]
uv-client = { workspace = true }
uv-extract = { workspace = true }
uv-fs = { workspace = true }
pep440_rs = { workspace = true }
pep508_rs = { workspace = true }
anyhow = { workspace = true }
fs-err = { workspace = true }
futures = { workspace = true }
once_cell = {workspace = true}
reqwest = { workspace = true }
reqwest-middleware = { workspace = true }
tempfile = { workspace = true }
thiserror = { workspace = true }
tokio = { workspace = true }
tokio-util = { workspace = true, features = ["compat"] }
tracing = { workspace = true }
url = { workspace = true }
[lints]
workspace = true

View file

@ -0,0 +1,292 @@
#!/usr/bin/env python3.12
"""
Fetch Python version metadata.
Generates the `python-version-metadata.json` file.
Usage:
python fetch-version-metadata.py
Acknowledgements:
Derived from https://github.com/mitsuhiko/rye/tree/f9822267a7f00332d15be8551f89a212e7bc9017
Originally authored by Armin Ronacher under the MIT license
"""
import argparse
import hashlib
import json
import logging
import re
import urllib.error
import urllib.request
from itertools import chain
from pathlib import Path
from urllib.parse import unquote
SELF_DIR = Path(__file__).parent
RELEASE_URL = "https://api.github.com/repos/indygreg/python-build-standalone/releases"
HEADERS = {
"X-GitHub-Api-Version": "2022-11-28",
}
VERSIONS_FILE = SELF_DIR / "python-version-metadata.json"
FLAVOR_PREFERENCES = [
"shared-pgo",
"shared-noopt",
"shared-noopt",
"static-noopt",
"gnu-pgo+lto",
"gnu-lto",
"gnu-pgo",
"pgo+lto",
"lto",
"pgo",
]
HIDDEN_FLAVORS = [
"debug",
"noopt",
"install_only",
]
SPECIAL_TRIPLES = {
"macos": "x86_64-apple-darwin",
"linux64": "x86_64-unknown-linux-gnu",
"windows-amd64": "x86_64-pc-windows",
"windows-x86": "i686-pc-windows",
"windows-amd64-shared": "x86_64-pc-windows",
"windows-x86-shared": "i686-pc-windows",
"linux64-musl": "x86_64-unknown-linux-musl",
}
_filename_re = re.compile(
r"""(?x)
^
cpython-(?P<ver>\d+\.\d+\.\d+?)
(?:\+\d+)?
-(?P<triple>.*?)
(?:-[\dT]+)?\.tar\.(?:gz|zst)
$
"""
)
_suffix_re = re.compile(
r"""(?x)^(.*?)-(%s)$"""
% (
"|".join(
map(
re.escape,
sorted(FLAVOR_PREFERENCES + HIDDEN_FLAVORS, key=len, reverse=True),
)
)
)
)
# Normalized mappings to match the Rust types
ARCH_MAP = {
"ppc64": "powerpc64",
"ppc64le": "powerpc64le",
"i686": "x86",
"i386": "x86",
}
OS_MAP = {"darwin": "macos"}
def parse_filename(filename):
match = _filename_re.match(filename)
if match is None:
return
version, triple = match.groups()
if triple.endswith("-full"):
triple = triple[:-5]
match = _suffix_re.match(triple)
if match is not None:
triple, suffix = match.groups()
else:
suffix = None
return (version, triple, suffix)
def normalize_triple(triple):
if "-static" in triple:
logging.debug("Skipping %r: unknown triple", triple)
return
triple = SPECIAL_TRIPLES.get(triple, triple)
pieces = triple.split("-")
try:
arch = normalize_arch(pieces[0])
operating_system = normalize_os(pieces[2])
if pieces[2] == "linux":
# On linux, the triple has four segments, the last one is the libc
libc = pieces[3]
else:
libc = "none"
except IndexError:
logging.debug("Skipping %r: unknown triple", triple)
return
return "%s-%s-%s" % (arch, operating_system, libc)
def normalize_arch(arch):
arch = ARCH_MAP.get(arch, arch)
pieces = arch.split("_")
# Strip `_vN` from `x86_64`
return "_".join(pieces[:2])
def normalize_os(os):
return OS_MAP.get(os, os)
def read_sha256(url):
try:
resp = urllib.request.urlopen(url + ".sha256")
except urllib.error.HTTPError:
return None
assert resp.status == 200
return resp.read().decode().strip()
def sha256(path):
h = hashlib.sha256()
with open(path, "rb") as file:
while True:
# Reading is buffered, so we can read smaller chunks.
chunk = file.read(h.block_size)
if not chunk:
break
h.update(chunk)
return h.hexdigest()
def _sort_by_flavor_preference(info):
_triple, flavor, _url = info
try:
pref = FLAVOR_PREFERENCES.index(flavor)
except ValueError:
pref = len(FLAVOR_PREFERENCES) + 1
return pref
def _sort_by_interpreter_and_version(info):
interpreter, version_tuple, _ = info
return (interpreter, version_tuple)
def find():
"""
Find available Python versions and write metadata to a file.
"""
results = {}
# Collect all available Python downloads
for page in range(1, 100):
logging.debug("Reading release page %s...", page)
resp = urllib.request.urlopen("%s?page=%d" % (RELEASE_URL, page))
rows = json.loads(resp.read())
if not rows:
break
for row in rows:
for asset in row["assets"]:
url = asset["browser_download_url"]
base_name = unquote(url.rsplit("/")[-1])
if base_name.endswith(".sha256"):
continue
info = parse_filename(base_name)
if info is None:
continue
py_ver, triple, flavor = info
if "-static" in triple or (flavor and "noopt" in flavor):
continue
triple = normalize_triple(triple)
if triple is None:
continue
results.setdefault(py_ver, []).append((triple, flavor, url))
# Collapse CPython variants to a single URL flavor per triple
cpython_results: dict[tuple[int, int, int], dict[tuple[str, str, str], str]] = {}
for py_ver, choices in results.items():
urls = {}
for triple, flavor, url in sorted(choices, key=_sort_by_flavor_preference):
triple = tuple(triple.split("-"))
# Skip existing triples, preferring the first flavor
if triple in urls:
continue
urls[triple] = url
cpython_results[tuple(map(int, py_ver.split(".")))] = urls
# Collect variants across interpreter kinds
# TODO(zanieb): Note we only support CPython downloads at this time
# but this will include PyPy chain in the future.
final_results = {}
for interpreter, py_ver, choices in sorted(
chain(
(("cpython",) + x for x in cpython_results.items()),
),
key=_sort_by_interpreter_and_version,
# Reverse the ordering so newer versions are first
reverse=True,
):
# Sort by the remaining information for determinism
# This groups download metadata in triple component order
for (arch, operating_system, libc), url in sorted(choices.items()):
key = "%s-%s.%s.%s-%s-%s-%s" % (
interpreter,
*py_ver,
operating_system,
arch,
libc,
)
logging.info("Found %s", key)
sha256 = read_sha256(url)
final_results[key] = {
"name": interpreter,
"arch": arch,
"os": operating_system,
"libc": libc,
"major": py_ver[0],
"minor": py_ver[1],
"patch": py_ver[2],
"url": url,
"sha256": sha256,
}
VERSIONS_FILE.parent.mkdir(parents=True, exist_ok=True)
VERSIONS_FILE.write_text(json.dumps(final_results, indent=2))
def main():
parser = argparse.ArgumentParser(description="Fetch Python version metadata.")
parser.add_argument(
"-v",
"--verbose",
action="store_true",
help="Enable debug logging",
)
parser.add_argument(
"-q",
"--quiet",
action="store_true",
help="Disable logging",
)
args = parser.parse_args()
if args.quiet:
log_level = logging.CRITICAL
elif args.verbose:
log_level = logging.DEBUG
else:
log_level = logging.INFO
logging.basicConfig(
level=log_level,
format="%(asctime)s %(levelname)s %(message)s",
datefmt="%Y-%m-%d %H:%M:%S",
)
find()
if __name__ == "__main__":
main()

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,438 @@
use std::fmt::{self, Display};
use std::io;
use std::path::{Path, PathBuf};
use std::str::FromStr;
use crate::PythonVersion;
use thiserror::Error;
use uv_client::BetterReqwestError;
use futures::TryStreamExt;
use tokio_util::compat::FuturesAsyncReadCompatExt;
use tracing::debug;
use url::Url;
use uv_fs::Simplified;
#[derive(Error, Debug)]
pub enum Error {
#[error("operating system not supported: {0}")]
OsNotSupported(String),
#[error("architecture not supported: {0}")]
ArchNotSupported(String),
#[error("libc type could not be detected")]
LibcNotDetected(),
#[error("invalid python version: {0}")]
InvalidPythonVersion(String),
#[error("download failed")]
NetworkError(#[from] BetterReqwestError),
#[error("download failed")]
NetworkMiddlewareError(#[source] anyhow::Error),
#[error(transparent)]
ExtractError(#[from] uv_extract::Error),
#[error("invalid download url")]
InvalidUrl(#[from] url::ParseError),
#[error("failed to create download directory")]
DownloadDirError(#[source] io::Error),
#[error("failed to copy to: {0}", to.user_display())]
CopyError {
to: PathBuf,
#[source]
err: io::Error,
},
#[error("failed to read toolchain directory: {0}", dir.user_display())]
ReadError {
dir: PathBuf,
#[source]
err: io::Error,
},
}
#[derive(Debug, PartialEq)]
pub struct PythonDownload {
key: &'static str,
implementation: ImplementationName,
arch: Arch,
os: Os,
libc: Libc,
major: u8,
minor: u8,
patch: u8,
url: &'static str,
sha256: Option<&'static str>,
}
#[derive(Debug)]
pub struct PythonDownloadRequest {
version: Option<PythonVersion>,
implementation: Option<ImplementationName>,
arch: Option<Arch>,
os: Option<Os>,
libc: Option<Libc>,
}
impl PythonDownloadRequest {
pub fn new(
version: Option<PythonVersion>,
implementation: Option<ImplementationName>,
arch: Option<Arch>,
os: Option<Os>,
libc: Option<Libc>,
) -> Self {
Self {
version,
implementation,
arch,
os,
libc,
}
}
#[must_use]
pub fn with_implementation(mut self, implementation: ImplementationName) -> Self {
self.implementation = Some(implementation);
self
}
#[must_use]
pub fn with_arch(mut self, arch: Arch) -> Self {
self.arch = Some(arch);
self
}
#[must_use]
pub fn with_os(mut self, os: Os) -> Self {
self.os = Some(os);
self
}
#[must_use]
pub fn with_libc(mut self, libc: Libc) -> Self {
self.libc = Some(libc);
self
}
pub fn fill(mut self) -> Result<Self, Error> {
if self.implementation.is_none() {
self.implementation = Some(ImplementationName::Cpython);
}
if self.arch.is_none() {
self.arch = Some(Arch::from_env()?);
}
if self.os.is_none() {
self.os = Some(Os::from_env()?);
}
if self.libc.is_none() {
self.libc = Some(Libc::from_env()?);
}
Ok(self)
}
}
impl FromStr for PythonDownloadRequest {
type Err = Error;
fn from_str(s: &str) -> Result<Self, Self::Err> {
// TOOD(zanieb): Implement parsing of additional request parts
let version = PythonVersion::from_str(s).map_err(Error::InvalidPythonVersion)?;
Ok(Self::new(Some(version), None, None, None, None))
}
}
#[derive(Debug, PartialEq)]
pub enum Libc {
Gnu,
Musl,
None,
}
#[derive(Debug, PartialEq)]
pub enum ImplementationName {
Cpython,
}
#[derive(Debug, PartialEq)]
pub struct Platform {
os: Os,
arch: Arch,
libc: Libc,
}
include!("python_versions.inc");
pub enum DownloadResult {
AlreadyAvailable(PathBuf),
Fetched(PathBuf),
}
impl PythonDownload {
/// Return the [`PythonDownload`] corresponding to the key, if it exists.
pub fn from_key(key: &str) -> Option<&PythonDownload> {
PYTHON_DOWNLOADS.iter().find(|&value| value.key == key)
}
pub fn from_request(request: &PythonDownloadRequest) -> Option<&'static PythonDownload> {
for download in PYTHON_DOWNLOADS {
if let Some(arch) = &request.arch {
if download.arch != *arch {
continue;
}
}
if let Some(os) = &request.os {
if download.os != *os {
continue;
}
}
if let Some(implementation) = &request.implementation {
if download.implementation != *implementation {
continue;
}
}
if let Some(version) = &request.version {
if download.major != version.major() {
continue;
}
if download.minor != version.minor() {
continue;
}
if let Some(patch) = version.patch() {
if download.patch != patch {
continue;
}
}
}
return Some(download);
}
None
}
pub fn url(&self) -> &str {
self.url
}
pub fn sha256(&self) -> Option<&str> {
self.sha256
}
/// Download and extract
pub async fn fetch(
&self,
client: &uv_client::BaseClient,
path: &Path,
) -> Result<DownloadResult, Error> {
let url = Url::parse(self.url)?;
let path = path.join(self.key).clone();
// If it already exists, return it
if path.is_dir() {
return Ok(DownloadResult::AlreadyAvailable(path));
}
let filename = url.path_segments().unwrap().last().unwrap();
let response = client.get(url.clone()).send().await?;
// Ensure the request was successful.
response.error_for_status_ref()?;
// Download and extract into a temporary directory.
let temp_dir = tempfile::tempdir().map_err(Error::DownloadDirError)?;
debug!(
"Downloading {url} to temporary location {}",
temp_dir.path().display()
);
let reader = response
.bytes_stream()
.map_err(|err| std::io::Error::new(std::io::ErrorKind::Other, err))
.into_async_read();
debug!("Extracting {filename}");
uv_extract::stream::archive(reader.compat(), filename, temp_dir.path()).await?;
// Extract the top-level directory.
let extracted = match uv_extract::strip_component(temp_dir.path()) {
Ok(top_level) => top_level,
Err(uv_extract::Error::NonSingularArchive(_)) => temp_dir.into_path(),
Err(err) => return Err(err.into()),
};
// Persist it to the target
debug!("Moving {} to {}", extracted.display(), path.user_display());
fs_err::tokio::rename(extracted, &path)
.await
.map_err(|err| Error::CopyError {
to: path.clone(),
err,
})?;
Ok(DownloadResult::Fetched(path))
}
pub fn python_version(&self) -> PythonVersion {
PythonVersion::from_str(&format!("{}.{}.{}", self.major, self.minor, self.patch))
.expect("Python downloads should always have valid versions")
}
}
impl Platform {
pub fn new(os: Os, arch: Arch, libc: Libc) -> Self {
Self { os, arch, libc }
}
pub fn from_env() -> Result<Self, Error> {
Ok(Self::new(
Os::from_env()?,
Arch::from_env()?,
Libc::from_env()?,
))
}
}
/// All supported operating systems.
#[derive(Debug, Clone, Eq, PartialEq)]
pub enum Os {
Windows,
Linux,
Macos,
FreeBsd,
NetBsd,
OpenBsd,
Dragonfly,
Illumos,
Haiku,
}
impl fmt::Display for Os {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
Self::Windows => write!(f, "Windows"),
Self::Macos => write!(f, "MacOS"),
Self::FreeBsd => write!(f, "FreeBSD"),
Self::NetBsd => write!(f, "NetBSD"),
Self::Linux => write!(f, "Linux"),
Self::OpenBsd => write!(f, "OpenBSD"),
Self::Dragonfly => write!(f, "DragonFly"),
Self::Illumos => write!(f, "Illumos"),
Self::Haiku => write!(f, "Haiku"),
}
}
}
impl Os {
pub(crate) fn from_env() -> Result<Self, Error> {
Self::from_str(std::env::consts::OS)
}
}
impl FromStr for Os {
type Err = Error;
fn from_str(s: &str) -> Result<Self, Self::Err> {
match s.to_lowercase().as_str() {
"windows" => Ok(Self::Windows),
"linux" => Ok(Self::Linux),
"macos" => Ok(Self::Macos),
"freebsd" => Ok(Self::FreeBsd),
"netbsd" => Ok(Self::NetBsd),
"openbsd" => Ok(Self::OpenBsd),
"dragonfly" => Ok(Self::Dragonfly),
"illumos" => Ok(Self::Illumos),
"haiku" => Ok(Self::Haiku),
_ => Err(Error::OsNotSupported(s.to_string())),
}
}
}
/// All supported CPU architectures
#[derive(Debug, Clone, Copy, Eq, PartialEq)]
pub enum Arch {
Aarch64,
Armv6L,
Armv7L,
Powerpc64Le,
Powerpc64,
X86,
X86_64,
S390X,
}
impl fmt::Display for Arch {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
Self::Aarch64 => write!(f, "aarch64"),
Self::Armv6L => write!(f, "armv6l"),
Self::Armv7L => write!(f, "armv7l"),
Self::Powerpc64Le => write!(f, "ppc64le"),
Self::Powerpc64 => write!(f, "ppc64"),
Self::X86 => write!(f, "i686"),
Self::X86_64 => write!(f, "x86_64"),
Self::S390X => write!(f, "s390x"),
}
}
}
impl FromStr for Arch {
type Err = Error;
fn from_str(s: &str) -> Result<Self, Self::Err> {
match s.to_lowercase().as_str() {
"aarch64" | "arm64" => Ok(Self::Aarch64),
"armv6l" => Ok(Self::Armv6L),
"armv7l" => Ok(Self::Armv7L),
"powerpc64le" | "ppc64le" => Ok(Self::Powerpc64Le),
"powerpc64" | "ppc64" => Ok(Self::Powerpc64),
"x86" | "i686" | "i386" => Ok(Self::X86),
"x86_64" | "amd64" => Ok(Self::X86_64),
"s390x" => Ok(Self::S390X),
_ => Err(Error::ArchNotSupported(s.to_string())),
}
}
}
impl Arch {
pub(crate) fn from_env() -> Result<Self, Error> {
Self::from_str(std::env::consts::ARCH)
}
}
impl Libc {
pub(crate) fn from_env() -> Result<Self, Error> {
// TODO(zanieb): Perform this lookup
match std::env::consts::OS {
"linux" => Ok(Libc::Gnu),
"windows" | "macos" => Ok(Libc::None),
_ => Err(Error::LibcNotDetected()),
}
}
}
impl fmt::Display for Libc {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Libc::Gnu => f.write_str("gnu"),
Libc::None => f.write_str("none"),
Libc::Musl => f.write_str("musl"),
}
}
}
impl From<reqwest::Error> for Error {
fn from(error: reqwest::Error) -> Self {
Self::NetworkError(BetterReqwestError::from(error))
}
}
impl From<reqwest_middleware::Error> for Error {
fn from(error: reqwest_middleware::Error) -> Self {
match error {
reqwest_middleware::Error::Middleware(error) => Self::NetworkMiddlewareError(error),
reqwest_middleware::Error::Reqwest(error) => {
Self::NetworkError(BetterReqwestError::from(error))
}
}
}
}
impl Display for PythonDownload {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.write_str(self.key)
}
}

View file

@ -0,0 +1,110 @@
use std::collections::BTreeSet;
use std::ffi::OsStr;
use std::path::{Path, PathBuf};
use crate::downloads::{Arch, Error, Libc, Os};
use crate::python_version::PythonVersion;
use once_cell::sync::Lazy;
/// The directory where Python toolchains we install are stored.
pub static TOOLCHAIN_DIRECTORY: Lazy<PathBuf> = Lazy::new(|| {
std::env::var_os("UV_BOOTSTRAP_DIR").map_or(
Path::new(&std::env::var("CARGO_MANIFEST_DIR").unwrap())
.parent()
.expect("CARGO_MANIFEST_DIR should be nested in workspace")
.parent()
.expect("CARGO_MANIFEST_DIR should be doubly nested in workspace")
.join("bin"),
PathBuf::from,
)
});
/// An installed Python toolchain.
#[derive(Debug, Clone)]
pub struct Toolchain {
/// The path to the top-level directory of the installed toolchain.
path: PathBuf,
}
impl Toolchain {
pub fn executable(&self) -> PathBuf {
if cfg!(windows) {
self.path.join("install").join("python.exe")
} else if cfg!(unix) {
self.path.join("install").join("bin").join("python3")
} else {
unimplemented!("Only Windows and Unix systems are supported.")
}
}
}
/// Return the toolchains that satisfy the given Python version on this platform.
///
/// ## Errors
///
/// - The platform metadata cannot be read
/// - A directory in the toolchain directory cannot be read
pub fn toolchains_for_version(version: &PythonVersion) -> Result<Vec<Toolchain>, Error> {
let platform_key = platform_key_from_env()?;
// TODO(zanieb): Consider returning an iterator instead of a `Vec`
// Note we need to collect paths regardless for sorting by version.
let toolchain_dirs = match fs_err::read_dir(TOOLCHAIN_DIRECTORY.to_path_buf()) {
Ok(toolchain_dirs) => {
// Collect sorted directory paths; `read_dir` is not stable across platforms
let directories: BTreeSet<_> = toolchain_dirs
.filter_map(|read_dir| match read_dir {
Ok(entry) => match entry.file_type() {
Ok(file_type) => file_type.is_dir().then_some(Ok(entry.path())),
Err(err) => Some(Err(err)),
},
Err(err) => Some(Err(err)),
})
.collect::<Result<_, std::io::Error>>()
.map_err(|err| Error::ReadError {
dir: TOOLCHAIN_DIRECTORY.to_path_buf(),
err,
})?;
directories
}
Err(err) if err.kind() == std::io::ErrorKind::NotFound => {
return Ok(Vec::new());
}
Err(err) => {
return Err(Error::ReadError {
dir: TOOLCHAIN_DIRECTORY.to_path_buf(),
err,
})
}
};
Ok(toolchain_dirs
.into_iter()
// Sort "newer" versions of Python first
.rev()
.filter_map(|path| {
if path
.file_name()
.map(OsStr::to_string_lossy)
.is_some_and(|filename| {
filename.starts_with(&format!("cpython-{version}"))
&& filename.ends_with(&platform_key)
})
{
Some(Toolchain { path })
} else {
None
}
})
.collect::<Vec<_>>())
}
/// Generate a platform portion of a key from the environment.
fn platform_key_from_env() -> Result<String, Error> {
let os = Os::from_env()?;
let arch = Arch::from_env()?;
let libc = Libc::from_env()?;
Ok(format!("{os}-{arch}-{libc}").to_lowercase())
}

View file

@ -0,0 +1,9 @@
pub use crate::downloads::{
DownloadResult, Error, Platform, PythonDownload, PythonDownloadRequest,
};
pub use crate::find::{toolchains_for_version, Toolchain, TOOLCHAIN_DIRECTORY};
pub use crate::python_version::PythonVersion;
mod downloads;
mod find;
mod python_version;

View file

@ -5,8 +5,6 @@ use std::str::FromStr;
use pep440_rs::Version;
use pep508_rs::{MarkerEnvironment, StringVersion};
use crate::Interpreter;
#[derive(Debug, Clone)]
pub struct PythonVersion(StringVersion);
@ -142,18 +140,6 @@ impl PythonVersion {
Self::from_str(format!("{}.{}", self.major(), self.minor()).as_str())
.expect("dropping a patch should always be valid")
}
/// Check if this Python version is satisfied by the given interpreter.
///
/// If a patch version is present, we will require an exact match.
/// Otherwise, just the major and minor version numbers need to match.
pub fn is_satisfied_by(&self, interpreter: &Interpreter) -> bool {
if self.patch().is_some() {
self.version() == interpreter.python_version()
} else {
(self.major(), self.minor()) == interpreter.python_tuple()
}
}
}
#[cfg(test)]

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,26 @@
// DO NOT EDIT
//
// Generated with `{{generated_with}}`
// From template at `{{generated_from}}`
pub(crate) const PYTHON_DOWNLOADS: &[PythonDownload] = &[
{{#versions}}
PythonDownload {
key: "{{key}}",
major: {{value.major}},
minor: {{value.minor}},
patch: {{value.patch}},
implementation: ImplementationName::{{value.name}},
arch: Arch::{{value.arch}},
os: Os::{{value.os}},
libc: Libc::{{value.libc}},
url: "{{value.url}}",
{{#value.sha256}}
sha256: Some("{{.}}")
{{/value.sha256}}
{{^value.sha256}}
sha256: None
{{/value.sha256}}
},
{{/versions}}
];

View file

@ -0,0 +1,99 @@
#!/usr/bin/env python3.12
"""
Generate static Rust code from Python version metadata.
Generates the `python_versions.rs` file from the `python_versions.rs.mustache` template.
Usage:
python template-version-metadata.py
"""
import sys
import logging
import argparse
import json
import subprocess
from pathlib import Path
CRATE_ROOT = Path(__file__).parent
WORKSPACE_ROOT = CRATE_ROOT.parent.parent
VERSION_METADATA = CRATE_ROOT / "python-version-metadata.json"
TEMPLATE = CRATE_ROOT / "src" / "python_versions.inc.mustache"
TARGET = TEMPLATE.with_suffix("")
try:
import chevron_blue
except ImportError:
print(
"missing requirement `chevron-blue`",
file=sys.stderr,
)
exit(1)
def prepare_value(value: dict) -> dict:
# Convert fields from snake case to camel case for enums
for key in ["arch", "os", "libc", "name"]:
value[key] = value[key].title()
return value
def main():
debug = logging.getLogger().getEffectiveLevel() <= logging.DEBUG
data = {}
data["generated_with"] = Path(__file__).relative_to(WORKSPACE_ROOT)
data["generated_from"] = TEMPLATE.relative_to(WORKSPACE_ROOT)
data["versions"] = [
{"key": key, "value": prepare_value(value)}
for key, value in json.loads(VERSION_METADATA.read_text()).items()
]
# Render the template
logging.info(f"Rendering `{TEMPLATE.name}`...")
output = chevron_blue.render(
template=TEMPLATE.read_text(), data=data, no_escape=True, warn=debug
)
# Update the file
logging.info(f"Updating `{TARGET}`...")
TARGET.write_text(output)
subprocess.check_call(
["rustfmt", str(TARGET)],
stderr=subprocess.STDOUT,
stdout=sys.stderr if debug else subprocess.DEVNULL,
)
logging.info("Done!")
if __name__ == "__main__":
parser = argparse.ArgumentParser(
description="Generates Rust code for Python version metadata.",
)
parser.add_argument(
"-v",
"--verbose",
action="store_true",
help="Enable debug logging",
)
parser.add_argument(
"-q",
"--quiet",
action="store_true",
help="Disable logging",
)
args = parser.parse_args()
if args.quiet:
log_level = logging.CRITICAL
elif args.verbose:
log_level = logging.DEBUG
else:
log_level = logging.INFO
logging.basicConfig(level=log_level, format="%(message)s")
main()

View file

@ -34,6 +34,7 @@ uv-resolver = { workspace = true, features = ["clap"] }
uv-types = { workspace = true, features = ["clap"] }
uv-configuration = { workspace = true, features = ["clap"] }
uv-virtualenv = { workspace = true }
uv-toolchain = { workspace = true }
uv-warnings = { workspace = true }
anstream = { workspace = true }

View file

@ -29,7 +29,7 @@ use uv_configuration::{
use uv_dispatch::BuildDispatch;
use uv_fs::Simplified;
use uv_installer::Downloader;
use uv_interpreter::{find_best_python, PythonEnvironment, PythonVersion};
use uv_interpreter::{find_best_python, PythonEnvironment};
use uv_normalize::{ExtraName, PackageName};
use uv_requirements::{
upgrade::read_lockfile, ExtrasSpecification, LookaheadResolver, NamedRequirementsResolver,
@ -39,6 +39,7 @@ use uv_resolver::{
AnnotationStyle, DependencyMode, DisplayResolutionGraph, Exclusions, InMemoryIndex, Manifest,
OptionsBuilder, PreReleaseMode, PythonRequirement, ResolutionMode, Resolver,
};
use uv_toolchain::PythonVersion;
use uv_types::{BuildIsolation, EmptyInstalledPackages, InFlight};
use uv_warnings::warn_user;

View file

@ -21,10 +21,10 @@ use uv_configuration::{
Upgrade,
};
use uv_configuration::{IndexStrategy, NoBinary};
use uv_interpreter::PythonVersion;
use uv_normalize::{ExtraName, PackageName};
use uv_requirements::{ExtrasSpecification, RequirementsSource};
use uv_resolver::{AnnotationStyle, DependencyMode, PreReleaseMode, ResolutionMode};
use uv_toolchain::PythonVersion;
use crate::commands::{extra_name_with_clap_error, ExitStatus, ListFormat, VersionFormat};
use crate::compat::CompatArgs;

View file

@ -4,11 +4,8 @@
use assert_cmd::assert::{Assert, OutputAssertExt};
use assert_cmd::Command;
use assert_fs::assert::PathAssert;
use assert_fs::fixture::PathChild;
#[cfg(unix)]
use fs_err::os::unix::fs::symlink as symlink_file;
#[cfg(windows)]
use fs_err::os::windows::fs::symlink_file;
use regex::Regex;
use std::borrow::BorrowMut;
use std::env;
@ -16,10 +13,11 @@ use std::ffi::OsString;
use std::path::{Path, PathBuf};
use std::process::Output;
use std::str::FromStr;
use uv_fs::Simplified;
use uv_interpreter::find_requested_python;
use uv_cache::Cache;
use uv_interpreter::{find_requested_python, PythonVersion};
use uv_fs::Simplified;
use uv_toolchain::{toolchains_for_version, PythonVersion};
// Exclude any packages uploaded after this date.
pub static EXCLUDE_NEWER: &str = "2024-03-25T00:00:00Z";
@ -316,81 +314,23 @@ pub fn venv_to_interpreter(venv: &Path) -> PathBuf {
}
}
/// If bootstrapped python build standalone pythons exists in `<project root>/bin`,
/// return the paths to the directories containing the python binaries (i.e. as paths that
/// `which::which_in` can use).
///
/// Use `scripts/bootstrap/install.py` to bootstrap.
///
/// Python versions are sorted from newest to oldest.
pub fn bootstrapped_pythons() -> Option<Vec<PathBuf>> {
// Current dir is `<project root>/crates/uv`.
let project_root = std::env::current_dir()
.unwrap()
.parent()
.unwrap()
.parent()
.unwrap()
.to_path_buf();
let bootstrap_dir = if let Some(bootstrap_dir) = env::var_os("UV_BOOTSTRAP_DIR") {
let bootstrap_dir = PathBuf::from(bootstrap_dir);
if bootstrap_dir.is_absolute() {
bootstrap_dir
} else {
// cargo test changes directory to the test crate, but doesn't tell us from where the user is running the
// tests. We'll assume that it's the project root.
project_root.join(bootstrap_dir)
}
} else {
project_root.join("bin")
};
let bootstrapped_pythons = bootstrap_dir.join("versions");
let Ok(bootstrapped_pythons) = fs_err::read_dir(bootstrapped_pythons) else {
return None;
};
let mut bootstrapped_pythons: Vec<PathBuf> = bootstrapped_pythons
.map(Result::unwrap)
.filter(|entry| entry.metadata().unwrap().is_dir())
.map(|entry| {
if cfg!(unix) {
entry.path().join("install").join("bin")
} else if cfg!(windows) {
entry.path().join("install")
} else {
unimplemented!("Only Windows and Unix are supported")
}
})
.collect();
bootstrapped_pythons.sort();
// Prefer the most recent patch version.
bootstrapped_pythons.reverse();
Some(bootstrapped_pythons)
}
/// Create a virtual environment named `.venv` in a temporary directory with the given
/// Python version. Expected format for `python` is "python<version>".
/// Python version. Expected format for `python` is "<version>".
pub fn create_venv<Parent: assert_fs::prelude::PathChild + AsRef<std::path::Path>>(
temp_dir: &Parent,
cache_dir: &assert_fs::TempDir,
python: &str,
) -> PathBuf {
let python = if let Some(bootstrapped_pythons) = bootstrapped_pythons() {
bootstrapped_pythons
.into_iter()
// Good enough since we control the directory
.find(|path| path.to_str().unwrap().contains(&format!("@{python}")))
.expect("Missing python bootstrap version")
.join(if cfg!(unix) {
"python3"
} else if cfg!(windows) {
"python.exe"
} else {
unimplemented!("Only Windows and Unix are supported")
})
} else {
PathBuf::from(python)
};
let python = toolchains_for_version(
&PythonVersion::from_str(python).expect("Tests should use a valid Python version"),
)
.expect("Tests are run on a supported platform")
.first()
.map(uv_toolchain::Toolchain::executable)
// We'll search for the request Python on the PATH if not found in the toolchain versions
// We hack this into a `PathBuf` to satisfy the compiler but it's just a string
.unwrap_or(PathBuf::from(python));
let venv = temp_dir.child(".venv");
Command::new(get_bin())
.arg("venv")
@ -414,34 +354,48 @@ pub fn get_bin() -> PathBuf {
}
/// Create a `PATH` with the requested Python versions available in order.
pub fn create_bin_with_executables(
///
/// Generally this should be used with `UV_TEST_PYTHON_PATH`.
pub fn python_path_with_versions(
temp_dir: &assert_fs::TempDir,
python_versions: &[&str],
) -> anyhow::Result<OsString> {
if let Some(bootstrapped_pythons) = bootstrapped_pythons() {
let selected_pythons = python_versions.iter().flat_map(|python_version| {
bootstrapped_pythons.iter().filter(move |path| {
// Good enough since we control the directory
path.to_str()
.unwrap()
.contains(&format!("@{python_version}"))
let cache = Cache::from_path(temp_dir.child("cache").to_path_buf())?;
let selected_pythons = python_versions
.iter()
.flat_map(|python_version| {
let inner = toolchains_for_version(
&PythonVersion::from_str(python_version)
.expect("Tests should use a valid Python version"),
)
.expect("Tests are run on a supported platform")
.iter()
.map(|toolchain| {
toolchain
.executable()
.parent()
.expect("Executables must exist in a directory")
.to_path_buf()
})
});
return Ok(env::join_paths(selected_pythons)?);
}
.collect::<Vec<_>>();
if inner.is_empty() {
// Fallback to a system lookup if we failed to find one in the toolchain directory
if let Some(interpreter) = find_requested_python(python_version, &cache).unwrap() {
vec![interpreter
.sys_executable()
.parent()
.expect("Python executable should always be in a directory")
.to_path_buf()]
} else {
panic!("Could not find Python {python_version} for test");
}
} else {
inner
}
})
.collect::<Vec<_>>();
let bin = temp_dir.child("bin");
fs_err::create_dir(&bin)?;
for &request in python_versions {
let interpreter = find_requested_python(request, &Cache::temp().unwrap())?
.ok_or(uv_interpreter::Error::NoSuchPython(request.to_string()))?;
let name = interpreter
.sys_executable()
.file_name()
.expect("Discovered executable must have a filename");
symlink_file(interpreter.sys_executable(), bin.child(name))?;
}
Ok(bin.canonicalize()?.into())
Ok(env::join_paths(selected_pythons)?)
}
/// Execute the command and format its output status, stdout and stderr into a snapshot string.

View file

@ -13,14 +13,14 @@ use assert_cmd::assert::OutputAssertExt;
use assert_fs::fixture::{FileWriteStr, PathChild};
use predicates::prelude::predicate;
use common::{create_bin_with_executables, get_bin, uv_snapshot, TestContext};
use common::{get_bin, python_path_with_versions, uv_snapshot, TestContext};
mod common;
/// Provision python binaries and return a `pip compile` command with options shared across all scenarios.
fn command(context: &TestContext, python_versions: &[&str]) -> Command {
let bin = create_bin_with_executables(&context.temp_dir, python_versions)
.expect("Failed to create bin dir");
let python_path = python_path_with_versions(&context.temp_dir, python_versions)
.expect("Failed to create Python test path");
let mut command = Command::new(get_bin());
command
.arg("pip")
@ -34,7 +34,7 @@ fn command(context: &TestContext, python_versions: &[&str]) -> Command {
.arg(context.cache_dir.path())
.env("VIRTUAL_ENV", context.venv.as_os_str())
.env("UV_NO_WRAP", "1")
.env("UV_TEST_PYTHON_PATH", bin)
.env("UV_TEST_PYTHON_PATH", python_path)
.current_dir(&context.temp_dir);
if cfg!(all(windows, debug_assertions)) {

View file

@ -13,7 +13,7 @@ use indoc::indoc;
use predicates::Predicate;
use url::Url;
use common::{create_bin_with_executables, create_venv, uv_snapshot, venv_to_interpreter};
use common::{create_venv, python_path_with_versions, uv_snapshot, venv_to_interpreter};
use uv_fs::Simplified;
use crate::common::{copy_dir_all, get_bin, TestContext};
@ -338,8 +338,8 @@ fn link() -> Result<()> {
.success();
let venv2 = context.temp_dir.child(".venv2");
let bin = create_bin_with_executables(&context.temp_dir, &["3.12"])
.expect("Failed to create bin dir");
let python_path = python_path_with_versions(&context.temp_dir, &["3.12"])
.expect("Failed to create Python test path");
Command::new(get_bin())
.arg("venv")
.arg(venv2.as_os_str())
@ -347,7 +347,7 @@ fn link() -> Result<()> {
.arg(context.cache_dir.path())
.arg("--python")
.arg("3.12")
.env("UV_TEST_PYTHON_PATH", bin)
.env("UV_TEST_PYTHON_PATH", python_path)
.current_dir(&context.temp_dir)
.assert()
.success();

View file

@ -9,11 +9,9 @@ use assert_fs::fixture::ChildPath;
use assert_fs::prelude::*;
use fs_err::PathExt;
use uv_fs::Simplified;
use uv_interpreter::PythonVersion;
use uv_toolchain::PythonVersion;
use crate::common::{
create_bin_with_executables, get_bin, uv_snapshot, TestContext, EXCLUDE_NEWER,
};
use crate::common::{get_bin, python_path_with_versions, uv_snapshot, TestContext, EXCLUDE_NEWER};
mod common;
@ -21,15 +19,15 @@ struct VenvTestContext {
cache_dir: assert_fs::TempDir,
temp_dir: assert_fs::TempDir,
venv: ChildPath,
bin: OsString,
python_path: OsString,
python_versions: Vec<PythonVersion>,
}
impl VenvTestContext {
fn new(python_versions: &[&str]) -> Self {
let temp_dir = assert_fs::TempDir::new().unwrap();
let bin = create_bin_with_executables(&temp_dir, python_versions)
.expect("Failed to create bin dir");
let python_path = python_path_with_versions(&temp_dir, python_versions)
.expect("Failed to create Python test path");
let venv = temp_dir.child(".venv");
let python_versions = python_versions
.iter()
@ -41,7 +39,7 @@ impl VenvTestContext {
cache_dir: assert_fs::TempDir::new().unwrap(),
temp_dir,
venv,
bin,
python_path,
python_versions,
}
}
@ -54,7 +52,7 @@ impl VenvTestContext {
.arg(self.cache_dir.path())
.arg("--exclude-newer")
.arg(EXCLUDE_NEWER)
.env("UV_TEST_PYTHON_PATH", self.bin.clone())
.env("UV_TEST_PYTHON_PATH", self.python_path.clone())
.current_dir(self.temp_dir.path());
command
}
@ -397,9 +395,9 @@ fn windows_shims() -> Result<()> {
let context = VenvTestContext::new(&["3.9", "3.8"]);
let shim_path = context.temp_dir.child("shim");
let py38 = std::env::split_paths(&context.bin)
let py38 = std::env::split_paths(&context.python_path)
.last()
.expect("create_bin_with_executables to set up the python versions");
.expect("python_path_with_versions to set up the python versions");
// We want 3.8 and the first version should be 3.9.
// Picking the last is necessary to prove that shims work because the python version selects
// the python version from the first path segment by default, so we take the last to prove it's not
@ -417,7 +415,7 @@ fn windows_shims() -> Result<()> {
uv_snapshot!(context.filters(), context.venv_command()
.arg(context.venv.as_os_str())
.arg("--clear")
.env("UV_TEST_PYTHON_PATH", format!("{};{}", shim_path.display(), context.bin.simplified_display())), @r###"
.env("UV_TEST_PYTHON_PATH", format!("{};{}", shim_path.display(), context.python_path.simplified_display())), @r###"
success: true
exit_code: 0
----- stdout -----