mirror of
https://github.com/astral-sh/uv.git
synced 2025-08-03 18:38:21 +00:00
Modify install plan to support all distribution types (#581)
This PR adds caching support for built wheels in the installer. Specifically, the `RegistryWheelIndex` now indexes both downloaded and built wheels (from registries), and we have a new `BuiltWheelIndex` that takes a subdirectory and returns the "best-matching" compatible wheel. Closes #570.
This commit is contained in:
parent
edaeb9b0e8
commit
aa065f5c97
15 changed files with 444 additions and 147 deletions
1
Cargo.lock
generated
1
Cargo.lock
generated
|
@ -2461,6 +2461,7 @@ dependencies = [
|
|||
"futures",
|
||||
"fxhash",
|
||||
"install-wheel-rs",
|
||||
"pep440_rs 0.3.12",
|
||||
"platform-tags",
|
||||
"puffin-cache",
|
||||
"puffin-client",
|
||||
|
|
|
@ -134,9 +134,17 @@ impl CachedDirectUrlDist {
|
|||
}
|
||||
}
|
||||
|
||||
impl CachedRegistryDist {
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct CachedWheel {
|
||||
/// The filename of the wheel.
|
||||
pub filename: WheelFilename,
|
||||
/// The path to the wheel.
|
||||
pub path: PathBuf,
|
||||
}
|
||||
|
||||
impl CachedWheel {
|
||||
/// Try to parse a distribution from a cached directory name (like `typing-extensions-4.8.0-py3-none-any`).
|
||||
pub fn try_from_path(path: &Path) -> Result<Option<Self>> {
|
||||
pub fn from_path(path: &Path) -> Result<Option<Self>> {
|
||||
let Some(file_name) = path.file_name() else {
|
||||
return Ok(None);
|
||||
};
|
||||
|
@ -154,4 +162,21 @@ impl CachedRegistryDist {
|
|||
|
||||
Ok(Some(Self { filename, path }))
|
||||
}
|
||||
|
||||
/// Convert a [`CachedWheel`] into a [`CachedRegistryDist`].
|
||||
pub fn into_registry_dist(self) -> CachedRegistryDist {
|
||||
CachedRegistryDist {
|
||||
filename: self.filename,
|
||||
path: self.path,
|
||||
}
|
||||
}
|
||||
|
||||
/// Convert a [`CachedWheel`] into a [`CachedDirectUrlDist`].
|
||||
pub fn into_url_dist(self, url: Url) -> CachedDirectUrlDist {
|
||||
CachedDirectUrlDist {
|
||||
filename: self.filename,
|
||||
url,
|
||||
path: self.path,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -88,6 +88,12 @@ fn get_subdirectory(url: &Url) -> Option<PathBuf> {
|
|||
Some(PathBuf::from(subdirectory))
|
||||
}
|
||||
|
||||
/// Return the Git reference of the given URL, if it exists.
|
||||
pub fn git_reference(url: &Url) -> Result<Option<String>, Error> {
|
||||
let DirectGitUrl { url, .. } = DirectGitUrl::try_from(url)?;
|
||||
Ok(url.reference().map(ToString::to_string))
|
||||
}
|
||||
|
||||
impl TryFrom<&Url> for DirectUrl {
|
||||
type Error = Error;
|
||||
|
||||
|
|
|
@ -14,6 +14,7 @@ pub(crate) const INSTA_FILTERS: &[(&str, &str)] = &[
|
|||
(r"--cache-dir .*", "--cache-dir [CACHE_DIR]"),
|
||||
];
|
||||
|
||||
/// Create a virtual environment named `.venv` in a temporary directory.
|
||||
pub(crate) fn create_venv_py312(temp_dir: &TempDir, cache_dir: &TempDir) -> PathBuf {
|
||||
let venv = temp_dir.child(".venv");
|
||||
Command::new(get_cargo_bin(BIN_NAME))
|
||||
|
|
|
@ -987,9 +987,6 @@ fn install_url_source_dist_cached() -> Result<()> {
|
|||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
Resolved 1 package in [TIME]
|
||||
Downloaded 1 package in [TIME]
|
||||
Unzipped 1 package in [TIME]
|
||||
Installed 1 package in [TIME]
|
||||
+ tqdm @ https://files.pythonhosted.org/packages/62/06/d5604a70d160f6a6ca5fd2ba25597c24abd5c5ca5f437263d177ac242308/tqdm-4.66.1.tar.gz
|
||||
"###);
|
||||
|
@ -1056,9 +1053,6 @@ fn install_git_source_dist_cached() -> Result<()> {
|
|||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
Resolved 1 package in [TIME]
|
||||
Downloaded 1 package in [TIME]
|
||||
Unzipped 1 package in [TIME]
|
||||
Installed 1 package in [TIME]
|
||||
+ werkzeug @ git+https://github.com/pallets/werkzeug.git@af160e0b6b7ddd81c22f1652c728ff5ac72d5c74
|
||||
"###);
|
||||
|
@ -1124,9 +1118,6 @@ fn install_registry_source_dist_cached() -> Result<()> {
|
|||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
Resolved 1 package in [TIME]
|
||||
Downloaded 1 package in [TIME]
|
||||
Unzipped 1 package in [TIME]
|
||||
Installed 1 package in [TIME]
|
||||
+ future==0.18.3
|
||||
"###);
|
||||
|
@ -1201,9 +1192,6 @@ fn install_path_source_dist_cached() -> Result<()> {
|
|||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
Resolved 1 package in [TIME]
|
||||
Downloaded 1 package in [TIME]
|
||||
Unzipped 1 package in [TIME]
|
||||
Installed 1 package in [TIME]
|
||||
+ wheel @ file://[TEMP_DIR]/wheel-0.42.0.tar.gz
|
||||
"###);
|
||||
|
@ -1287,3 +1275,68 @@ fn install_path_built_dist_cached() -> Result<()> {
|
|||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Check that we show the right messages on cached, direct URL built distribution installs.
|
||||
#[test]
|
||||
fn install_url_built_dist_cached() -> Result<()> {
|
||||
let temp_dir = assert_fs::TempDir::new()?;
|
||||
let cache_dir = assert_fs::TempDir::new()?;
|
||||
let venv = create_venv_py312(&temp_dir, &cache_dir);
|
||||
|
||||
let requirements_txt = temp_dir.child("requirements.txt");
|
||||
requirements_txt.touch()?;
|
||||
requirements_txt.write_str("tqdm @ https://files.pythonhosted.org/packages/00/e5/f12a80907d0884e6dff9c16d0c0114d81b8cd07dc3ae54c5e962cc83037e/tqdm-4.66.1-py3-none-any.whl")?;
|
||||
|
||||
insta::with_settings!({
|
||||
filters => INSTA_FILTERS.to_vec()
|
||||
}, {
|
||||
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
|
||||
.arg("pip-sync")
|
||||
.arg("requirements.txt")
|
||||
.arg("--cache-dir")
|
||||
.arg(cache_dir.path())
|
||||
.env("VIRTUAL_ENV", venv.as_os_str())
|
||||
.current_dir(&temp_dir), @r###"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
Resolved 1 package in [TIME]
|
||||
Downloaded 1 package in [TIME]
|
||||
Unzipped 1 package in [TIME]
|
||||
Installed 1 package in [TIME]
|
||||
+ tqdm @ https://files.pythonhosted.org/packages/00/e5/f12a80907d0884e6dff9c16d0c0114d81b8cd07dc3ae54c5e962cc83037e/tqdm-4.66.1-py3-none-any.whl
|
||||
"###);
|
||||
});
|
||||
|
||||
check_command(&venv, "import tqdm", &temp_dir);
|
||||
|
||||
// Re-run the installation in a new virtual environment.
|
||||
let parent = assert_fs::TempDir::new()?;
|
||||
let venv = create_venv_py312(&parent, &cache_dir);
|
||||
|
||||
insta::with_settings!({
|
||||
filters => INSTA_FILTERS.to_vec()
|
||||
}, {
|
||||
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
|
||||
.arg("pip-sync")
|
||||
.arg("requirements.txt")
|
||||
.arg("--cache-dir")
|
||||
.arg(cache_dir.path())
|
||||
.env("VIRTUAL_ENV", venv.as_os_str())
|
||||
.current_dir(&temp_dir), @r###"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
Installed 1 package in [TIME]
|
||||
+ tqdm @ https://files.pythonhosted.org/packages/00/e5/f12a80907d0884e6dff9c16d0c0114d81b8cd07dc3ae54c5e962cc83037e/tqdm-4.66.1-py3-none-any.whl
|
||||
"###);
|
||||
});
|
||||
|
||||
check_command(&venv, "import tqdm", &temp_dir);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
|
|
@ -6,9 +6,9 @@ info:
|
|||
- pip-sync
|
||||
- requirements.txt
|
||||
- "--cache-dir"
|
||||
- /var/folders/nt/6gf2v7_s3k13zq_t3944rwz40000gn/T/.tmpD0MMCB
|
||||
- /var/folders/nt/6gf2v7_s3k13zq_t3944rwz40000gn/T/.tmpqdCeIU
|
||||
env:
|
||||
VIRTUAL_ENV: /var/folders/nt/6gf2v7_s3k13zq_t3944rwz40000gn/T/.tmpSwXmTa/.venv
|
||||
VIRTUAL_ENV: /var/folders/nt/6gf2v7_s3k13zq_t3944rwz40000gn/T/.tmpWkiF67/.venv
|
||||
---
|
||||
success: true
|
||||
exit_code: 0
|
||||
|
|
|
@ -6,9 +6,9 @@ info:
|
|||
- pip-sync
|
||||
- requirements.txt
|
||||
- "--cache-dir"
|
||||
- /var/folders/nt/6gf2v7_s3k13zq_t3944rwz40000gn/T/.tmpOaWXtJ
|
||||
- /var/folders/nt/6gf2v7_s3k13zq_t3944rwz40000gn/T/.tmp9AMtBI
|
||||
env:
|
||||
VIRTUAL_ENV: /var/folders/nt/6gf2v7_s3k13zq_t3944rwz40000gn/T/.tmpl1QcDF/.venv
|
||||
VIRTUAL_ENV: /var/folders/nt/6gf2v7_s3k13zq_t3944rwz40000gn/T/.tmpA2E3PE/.venv
|
||||
---
|
||||
success: true
|
||||
exit_code: 0
|
||||
|
|
|
@ -16,6 +16,7 @@ workspace = true
|
|||
distribution-filename = { path = "../distribution-filename", features = ["serde"] }
|
||||
distribution-types = { path = "../distribution-types" }
|
||||
install-wheel-rs = { path = "../install-wheel-rs" }
|
||||
pep440_rs = { path = "../pep440-rs" }
|
||||
platform-tags = { path = "../platform-tags" }
|
||||
puffin-cache = { path = "../puffin-cache" }
|
||||
puffin-client = { path = "../puffin-client" }
|
||||
|
|
91
crates/puffin-distribution/src/index/built_wheel_index.rs
Normal file
91
crates/puffin-distribution/src/index/built_wheel_index.rs
Normal file
|
@ -0,0 +1,91 @@
|
|||
use std::path::PathBuf;
|
||||
|
||||
use fs_err as fs;
|
||||
use tracing::warn;
|
||||
|
||||
use distribution_types::CachedWheel;
|
||||
use platform_tags::Tags;
|
||||
|
||||
use crate::index::iter_directories;
|
||||
|
||||
/// A local index of built distributions for a specific source distribution.
|
||||
#[derive(Debug)]
|
||||
pub struct BuiltWheelIndex<'a> {
|
||||
directory: PathBuf,
|
||||
tags: &'a Tags,
|
||||
}
|
||||
|
||||
impl<'a> BuiltWheelIndex<'a> {
|
||||
/// Create a new index of built distributions.
|
||||
///
|
||||
/// The `directory` should be the directory containing the built distributions for a specific
|
||||
/// source distribution. For example, given the built wheel cache structure:
|
||||
/// ```text
|
||||
/// built-wheels-v0/
|
||||
/// └── pypi
|
||||
/// └── django-allauth-0.51.0.tar.gz
|
||||
/// ├── django_allauth-0.51.0-py3-none-any.whl
|
||||
/// └── metadata.json
|
||||
/// ```
|
||||
///
|
||||
/// The `directory` should be `built-wheels-v0/pypi/django-allauth-0.51.0.tar.gz`.
|
||||
pub fn new(directory: impl Into<PathBuf>, tags: &'a Tags) -> Self {
|
||||
Self {
|
||||
directory: directory.into(),
|
||||
tags,
|
||||
}
|
||||
}
|
||||
|
||||
/// Find the "best" distribution in the index.
|
||||
///
|
||||
/// This lookup prefers newer versions over older versions, and aims to maximize compatibility
|
||||
/// with the target platform.
|
||||
pub fn find(&self) -> Option<CachedWheel> {
|
||||
let mut candidate: Option<CachedWheel> = None;
|
||||
|
||||
for subdir in iter_directories(self.directory.read_dir().ok()?) {
|
||||
match CachedWheel::from_path(&subdir) {
|
||||
Ok(None) => {}
|
||||
Ok(Some(dist_info)) => {
|
||||
// Pick the wheel with the highest priority
|
||||
let compatibility = dist_info.filename.compatibility(self.tags);
|
||||
|
||||
// Only consider wheels that are compatible with our tags.
|
||||
if compatibility.is_none() {
|
||||
continue;
|
||||
}
|
||||
|
||||
// TODO(charlie): Consider taking into account the freshness checks that we
|
||||
// encode when building source distributions (e.g., timestamps). For now, we
|
||||
// assume that distributions are immutable when installing (i.e., in this
|
||||
// index).
|
||||
if let Some(existing) = candidate.as_ref() {
|
||||
// Override if the wheel is newer, or "more" compatible.
|
||||
if dist_info.filename.version > existing.filename.version
|
||||
|| compatibility > existing.filename.compatibility(self.tags)
|
||||
{
|
||||
candidate = Some(dist_info);
|
||||
}
|
||||
} else {
|
||||
candidate = Some(dist_info);
|
||||
}
|
||||
}
|
||||
Err(err) => {
|
||||
warn!(
|
||||
"Invalid cache entry at {}, removing. {err}",
|
||||
subdir.display()
|
||||
);
|
||||
let result = fs::remove_dir_all(&subdir);
|
||||
if let Err(err) = result {
|
||||
warn!(
|
||||
"Failed to remove invalid cache entry at {}: {err}",
|
||||
subdir.display()
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
candidate
|
||||
}
|
||||
}
|
27
crates/puffin-distribution/src/index/mod.rs
Normal file
27
crates/puffin-distribution/src/index/mod.rs
Normal file
|
@ -0,0 +1,27 @@
|
|||
use std::path::PathBuf;
|
||||
|
||||
use tracing::warn;
|
||||
|
||||
pub use built_wheel_index::BuiltWheelIndex;
|
||||
pub use registry_wheel_index::RegistryWheelIndex;
|
||||
|
||||
mod built_wheel_index;
|
||||
mod registry_wheel_index;
|
||||
|
||||
/// Iterate over the subdirectories of a directory.
|
||||
fn iter_directories(read_dir: std::fs::ReadDir) -> impl Iterator<Item = PathBuf> {
|
||||
read_dir
|
||||
.filter_map(|entry| match entry {
|
||||
Ok(entry) => Some(entry),
|
||||
Err(err) => {
|
||||
warn!("Failed to read entry of cache: {}", err);
|
||||
None
|
||||
}
|
||||
})
|
||||
.filter(|entry| {
|
||||
entry
|
||||
.file_type()
|
||||
.map_or(false, |file_type| file_type.is_dir())
|
||||
})
|
||||
.map(|entry| entry.path())
|
||||
}
|
111
crates/puffin-distribution/src/index/registry_wheel_index.rs
Normal file
111
crates/puffin-distribution/src/index/registry_wheel_index.rs
Normal file
|
@ -0,0 +1,111 @@
|
|||
use std::collections::BTreeMap;
|
||||
use std::path::Path;
|
||||
|
||||
use fs_err as fs;
|
||||
use fxhash::FxHashMap;
|
||||
use tracing::warn;
|
||||
|
||||
use distribution_types::{CachedRegistryDist, CachedWheel, Metadata};
|
||||
use pep440_rs::Version;
|
||||
use platform_tags::Tags;
|
||||
use puffin_cache::{Cache, CacheBucket, WheelCache};
|
||||
use puffin_normalize::PackageName;
|
||||
use pypi_types::IndexUrls;
|
||||
|
||||
use crate::index::iter_directories;
|
||||
|
||||
/// A local index of distributions that originate from a registry, like `PyPI`.
|
||||
#[derive(Debug, Default)]
|
||||
pub struct RegistryWheelIndex(FxHashMap<PackageName, BTreeMap<Version, CachedRegistryDist>>);
|
||||
|
||||
impl RegistryWheelIndex {
|
||||
/// Build an index of cached distributions from a directory.
|
||||
pub fn from_directory(cache: &Cache, tags: &Tags, index_urls: &IndexUrls) -> Self {
|
||||
let mut index = Self::default();
|
||||
|
||||
for index_url in index_urls {
|
||||
// Index all the wheels that were downloaded directly from the registry.
|
||||
// TODO(charlie): Shard the cache by package name, and do this lazily.
|
||||
let wheel_dir = cache
|
||||
.bucket(CacheBucket::Wheels)
|
||||
.join(WheelCache::Index(index_url).wheel_dir());
|
||||
|
||||
index.add_directory(wheel_dir, tags);
|
||||
|
||||
// Index all the built wheels, created by downloading and building source distributions
|
||||
// from the registry.
|
||||
// TODO(charlie): Shard the cache by package name, and do this lazily.
|
||||
let built_wheel_dir = cache
|
||||
.bucket(CacheBucket::BuiltWheels)
|
||||
.join(WheelCache::Index(index_url).wheel_dir());
|
||||
|
||||
let Ok(read_dir) = built_wheel_dir.read_dir() else {
|
||||
continue;
|
||||
};
|
||||
for subdir in iter_directories(read_dir) {
|
||||
index.add_directory(subdir, tags);
|
||||
}
|
||||
}
|
||||
|
||||
index
|
||||
}
|
||||
|
||||
/// Returns a distribution from the index, if it exists.
|
||||
pub fn by_name(
|
||||
&self,
|
||||
name: &PackageName,
|
||||
) -> impl Iterator<Item = (&Version, &CachedRegistryDist)> {
|
||||
// Using static to extend the lifetime
|
||||
static DEFAULT_MAP: BTreeMap<Version, CachedRegistryDist> = BTreeMap::new();
|
||||
self.0.get(name).unwrap_or(&DEFAULT_MAP).iter().rev()
|
||||
}
|
||||
|
||||
/// Add the wheels in a given directory to the index.
|
||||
///
|
||||
/// Each subdirectory in the given path is expected to be that of an unzipped wheel.
|
||||
fn add_directory(&mut self, path: impl AsRef<Path>, tags: &Tags) {
|
||||
let Ok(read_dir) = path.as_ref().read_dir() else {
|
||||
return;
|
||||
};
|
||||
|
||||
for wheel_dir in iter_directories(read_dir) {
|
||||
match CachedWheel::from_path(&wheel_dir) {
|
||||
Ok(None) => {}
|
||||
Ok(Some(dist_info)) => {
|
||||
let dist_info = dist_info.into_registry_dist();
|
||||
|
||||
// Pick the wheel with the highest priority
|
||||
let compatibility = dist_info.filename.compatibility(tags);
|
||||
if let Some(existing) = self
|
||||
.0
|
||||
.get_mut(dist_info.name())
|
||||
.and_then(|package| package.get_mut(&dist_info.filename.version))
|
||||
{
|
||||
// Override if we have better compatibility
|
||||
if compatibility > existing.filename.compatibility(tags) {
|
||||
*existing = dist_info;
|
||||
}
|
||||
} else if compatibility.is_some() {
|
||||
self.0
|
||||
.entry(dist_info.name().clone())
|
||||
.or_default()
|
||||
.insert(dist_info.filename.version.clone(), dist_info);
|
||||
}
|
||||
}
|
||||
Err(err) => {
|
||||
warn!(
|
||||
"Invalid cache entry at {}, removing. {err}",
|
||||
wheel_dir.display()
|
||||
);
|
||||
let result = fs::remove_dir_all(&wheel_dir);
|
||||
if let Err(err) = result {
|
||||
warn!(
|
||||
"Failed to remove invalid cache entry at {}: {err}",
|
||||
wheel_dir.display()
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,5 +1,6 @@
|
|||
pub use distribution_database::{DistributionDatabase, DistributionDatabaseError};
|
||||
pub use download::{DiskWheel, Download, InMemoryWheel, LocalWheel, SourceDistDownload};
|
||||
pub use index::{BuiltWheelIndex, RegistryWheelIndex};
|
||||
pub use reporter::Reporter;
|
||||
pub use source_dist::{SourceDistCachedBuilder, SourceDistError};
|
||||
pub use unzip::Unzip;
|
||||
|
@ -7,6 +8,7 @@ pub use unzip::Unzip;
|
|||
mod distribution_database;
|
||||
mod download;
|
||||
mod error;
|
||||
mod index;
|
||||
mod locks;
|
||||
mod reporter;
|
||||
mod source_dist;
|
||||
|
|
|
@ -1,13 +1,11 @@
|
|||
pub use installer::{Installer, Reporter as InstallReporter};
|
||||
pub use plan::InstallPlan;
|
||||
pub use registry_index::RegistryIndex;
|
||||
pub use site_packages::SitePackages;
|
||||
pub use uninstall::uninstall;
|
||||
pub use unzipper::{Reporter as UnzipReporter, Unzipper};
|
||||
|
||||
mod installer;
|
||||
mod plan;
|
||||
mod registry_index;
|
||||
mod site_packages;
|
||||
mod uninstall;
|
||||
mod unzipper;
|
||||
|
|
|
@ -1,18 +1,19 @@
|
|||
use std::str::FromStr;
|
||||
|
||||
use anyhow::{bail, Context, Result};
|
||||
use anyhow::{Context, Result};
|
||||
use tracing::debug;
|
||||
|
||||
use distribution_filename::WheelFilename;
|
||||
use distribution_types::direct_url::DirectUrl;
|
||||
use distribution_types::{CachedDirectUrlDist, CachedDist, InstalledDist, RemoteSource};
|
||||
use distribution_types::direct_url::{git_reference, DirectUrl};
|
||||
use distribution_types::{
|
||||
BuiltDist, CachedDirectUrlDist, CachedDist, Dist, InstalledDist, Metadata, RemoteSource,
|
||||
SourceDist,
|
||||
};
|
||||
use pep508_rs::{Requirement, VersionOrUrl};
|
||||
use platform_tags::Tags;
|
||||
use puffin_cache::{Cache, CacheBucket, WheelCache};
|
||||
use puffin_distribution::{BuiltWheelIndex, RegistryWheelIndex};
|
||||
use puffin_interpreter::Virtualenv;
|
||||
use pypi_types::IndexUrls;
|
||||
|
||||
use crate::{RegistryIndex, SitePackages};
|
||||
use crate::SitePackages;
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
pub struct InstallPlan {
|
||||
|
@ -44,7 +45,7 @@ impl InstallPlan {
|
|||
SitePackages::try_from_executable(venv).context("Failed to list installed packages")?;
|
||||
|
||||
// Index all the already-downloaded wheels in the cache.
|
||||
let registry_index = RegistryIndex::try_from_directory(cache, tags, index_urls);
|
||||
let registry_index = RegistryWheelIndex::from_directory(cache, tags, index_urls);
|
||||
|
||||
let mut local = vec![];
|
||||
let mut remote = vec![];
|
||||
|
@ -88,6 +89,7 @@ impl InstallPlan {
|
|||
// Identify any locally-available distributions that satisfy the requirement.
|
||||
match requirement.version_or_url.as_ref() {
|
||||
None => {
|
||||
// TODO(charlie): This doesn't respect built wheels.
|
||||
if let Some((_version, distribution)) =
|
||||
registry_index.by_name(&requirement.name).next()
|
||||
{
|
||||
|
@ -110,37 +112,107 @@ impl InstallPlan {
|
|||
}
|
||||
}
|
||||
Some(VersionOrUrl::Url(url)) => {
|
||||
// TODO(konstin): Add source dist url support. It's more tricky since we don't
|
||||
// know yet whether source dist is fresh in the cache.
|
||||
if let Ok(filename) = url
|
||||
.filename()
|
||||
.and_then(|disk_filename| Ok(WheelFilename::from_str(disk_filename)?))
|
||||
{
|
||||
if requirement.name != filename.name {
|
||||
bail!(
|
||||
"Given name `{}` does not match url name `{}`",
|
||||
requirement.name,
|
||||
url
|
||||
);
|
||||
match Dist::from_url(requirement.name.clone(), url.clone())? {
|
||||
Dist::Built(BuiltDist::Registry(_wheel)) => {
|
||||
// Nothing to do.
|
||||
}
|
||||
|
||||
let cache_entry = cache.entry(
|
||||
CacheBucket::Wheels,
|
||||
WheelCache::Url(url).wheel_dir(),
|
||||
filename.stem(),
|
||||
);
|
||||
|
||||
// Ignore zipped wheels, which represent intermediary cached artifacts.
|
||||
if cache_entry.path().is_dir() {
|
||||
let cached_dist = CachedDirectUrlDist::from_url(
|
||||
filename,
|
||||
url.clone(),
|
||||
cache_entry.path(),
|
||||
Dist::Source(SourceDist::Registry(_)) => {
|
||||
// Nothing to do.
|
||||
}
|
||||
Dist::Built(BuiltDist::DirectUrl(wheel)) => {
|
||||
// Find the exact wheel from the cache, since we know the filename in
|
||||
// advance.
|
||||
let cache_entry = cache.entry(
|
||||
CacheBucket::Wheels,
|
||||
WheelCache::Url(&wheel.url).wheel_dir(),
|
||||
wheel.filename.stem(),
|
||||
);
|
||||
|
||||
debug!("URL wheel requirement already cached: {cached_dist}");
|
||||
local.push(CachedDist::Url(cached_dist.clone()));
|
||||
continue;
|
||||
if cache_entry.path().exists() {
|
||||
let cached_dist = CachedDirectUrlDist::from_url(
|
||||
wheel.filename,
|
||||
wheel.url,
|
||||
cache_entry.path(),
|
||||
);
|
||||
|
||||
debug!("URL wheel requirement already cached: {cached_dist}");
|
||||
local.push(CachedDist::Url(cached_dist.clone()));
|
||||
continue;
|
||||
}
|
||||
}
|
||||
Dist::Built(BuiltDist::Path(wheel)) => {
|
||||
// Find the exact wheel from the cache, since we know the filename in
|
||||
// advance.
|
||||
let cache_entry = cache.entry(
|
||||
CacheBucket::Wheels,
|
||||
WheelCache::Url(&wheel.url).wheel_dir(),
|
||||
wheel.filename.stem(),
|
||||
);
|
||||
|
||||
if cache_entry.path().exists() {
|
||||
let cached_dist = CachedDirectUrlDist::from_url(
|
||||
wheel.filename,
|
||||
wheel.url,
|
||||
cache_entry.path(),
|
||||
);
|
||||
|
||||
debug!("Path wheel requirement already cached: {cached_dist}");
|
||||
local.push(CachedDist::Url(cached_dist.clone()));
|
||||
continue;
|
||||
}
|
||||
}
|
||||
Dist::Source(SourceDist::DirectUrl(sdist)) => {
|
||||
// Find the most-compatible wheel from the cache, since we don't know
|
||||
// the filename in advance.
|
||||
let cache_entry = cache.entry(
|
||||
CacheBucket::BuiltWheels,
|
||||
WheelCache::Url(&sdist.url).wheel_dir(),
|
||||
sdist.filename()?.to_string(),
|
||||
);
|
||||
let index = BuiltWheelIndex::new(cache_entry.path(), tags);
|
||||
|
||||
if let Some(wheel) = index.find() {
|
||||
let cached_dist = wheel.into_url_dist(url.clone());
|
||||
debug!("URL source requirement already cached: {cached_dist}");
|
||||
local.push(CachedDist::Url(cached_dist.clone()));
|
||||
continue;
|
||||
}
|
||||
}
|
||||
Dist::Source(SourceDist::Path(sdist)) => {
|
||||
// Find the most-compatible wheel from the cache, since we don't know
|
||||
// the filename in advance.
|
||||
let cache_entry = cache.entry(
|
||||
CacheBucket::BuiltWheels,
|
||||
WheelCache::Path(&sdist.url).wheel_dir(),
|
||||
sdist.name().to_string(),
|
||||
);
|
||||
let index = BuiltWheelIndex::new(cache_entry.path(), tags);
|
||||
|
||||
if let Some(wheel) = index.find() {
|
||||
let cached_dist = wheel.into_url_dist(url.clone());
|
||||
debug!("Path source requirement already cached: {cached_dist}");
|
||||
local.push(CachedDist::Url(cached_dist.clone()));
|
||||
continue;
|
||||
}
|
||||
}
|
||||
Dist::Source(SourceDist::Git(sdist)) => {
|
||||
// Find the most-compatible wheel from the cache, since we don't know
|
||||
// the filename in advance.
|
||||
if let Ok(Some(reference)) = git_reference(&sdist.url) {
|
||||
let cache_entry = cache.entry(
|
||||
CacheBucket::BuiltWheels,
|
||||
WheelCache::Git(&sdist.url).wheel_dir(),
|
||||
reference.to_string(),
|
||||
);
|
||||
let index = BuiltWheelIndex::new(cache_entry.path(), tags);
|
||||
|
||||
if let Some(wheel) = index.find() {
|
||||
let cached_dist = wheel.into_url_dist(url.clone());
|
||||
debug!("Git source requirement already cached: {cached_dist}");
|
||||
local.push(CachedDist::Url(cached_dist.clone()));
|
||||
continue;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,91 +0,0 @@
|
|||
use std::collections::{BTreeMap, HashMap};
|
||||
|
||||
use fs_err as fs;
|
||||
use tracing::warn;
|
||||
|
||||
use distribution_types::{CachedRegistryDist, Metadata};
|
||||
use pep440_rs::Version;
|
||||
use platform_tags::Tags;
|
||||
use puffin_cache::{Cache, CacheBucket, WheelCache};
|
||||
use puffin_normalize::PackageName;
|
||||
use pypi_types::IndexUrls;
|
||||
|
||||
/// A local index of distributions that originate from a registry, like `PyPI`.
|
||||
#[derive(Debug, Default)]
|
||||
pub struct RegistryIndex(HashMap<PackageName, BTreeMap<Version, CachedRegistryDist>>);
|
||||
|
||||
impl RegistryIndex {
|
||||
/// Build an index of cached distributions from a directory.
|
||||
pub fn try_from_directory(cache: &Cache, tags: &Tags, index_urls: &IndexUrls) -> Self {
|
||||
let mut index: HashMap<PackageName, BTreeMap<Version, CachedRegistryDist>> = HashMap::new();
|
||||
|
||||
for index_url in index_urls {
|
||||
let wheel_dir = cache
|
||||
.bucket(CacheBucket::Wheels)
|
||||
.join(WheelCache::Index(index_url).wheel_dir());
|
||||
|
||||
let Ok(dir) = wheel_dir.read_dir() else {
|
||||
continue;
|
||||
};
|
||||
|
||||
for entry in dir {
|
||||
let path = match entry.map(|entry| entry.path()) {
|
||||
Ok(path) => path,
|
||||
Err(err) => {
|
||||
warn!(
|
||||
"Failed to read entry of cache at {}: {}",
|
||||
cache.root().display(),
|
||||
err
|
||||
);
|
||||
continue;
|
||||
}
|
||||
};
|
||||
|
||||
match CachedRegistryDist::try_from_path(&path) {
|
||||
Ok(None) => {}
|
||||
Ok(Some(dist_info)) => {
|
||||
// Pick the wheel with the highest priority
|
||||
let compatibility = dist_info.filename.compatibility(tags);
|
||||
if let Some(existing) = index
|
||||
.get_mut(dist_info.name())
|
||||
.and_then(|package| package.get_mut(&dist_info.filename.version))
|
||||
{
|
||||
// Override if we have better compatibility
|
||||
if compatibility > existing.filename.compatibility(tags) {
|
||||
*existing = dist_info;
|
||||
}
|
||||
} else if compatibility.is_some() {
|
||||
index
|
||||
.entry(dist_info.name().clone())
|
||||
.or_default()
|
||||
.insert(dist_info.filename.version.clone(), dist_info);
|
||||
}
|
||||
}
|
||||
Err(err) => {
|
||||
warn!("Invalid cache entry at {}, removing. {err}", path.display());
|
||||
let result = fs::remove_dir_all(&path);
|
||||
if let Err(err) = result {
|
||||
warn!(
|
||||
"Failed to remove invalid cache entry at {}: {err}",
|
||||
path.display()
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Self(index)
|
||||
}
|
||||
|
||||
/// Returns a distribution from the index, if it exists.
|
||||
pub fn by_name(
|
||||
&self,
|
||||
name: &PackageName,
|
||||
) -> impl Iterator<Item = (&Version, &CachedRegistryDist)> {
|
||||
// Using static to extend the lifetime
|
||||
static DEFAULT_MAP: BTreeMap<Version, CachedRegistryDist> = BTreeMap::new();
|
||||
// We should only query this
|
||||
self.0.get(name).unwrap_or(&DEFAULT_MAP).iter().rev()
|
||||
}
|
||||
}
|
Loading…
Add table
Add a link
Reference in a new issue