Enable PEP 517 builds for unnamed requirements (#2600)

## Summary

This PR enables the source distribution database to be used with unnamed
requirements (i.e., URLs without a package name). The (significant)
upside here is that we can now use PEP 517 hooks to resolve unnamed
requirement metadata _and_ reuse any computation in the cache.

The changes to `crates/uv-distribution/src/source/mod.rs` are quite
extensive, but mostly mechanical. The core idea is that we introduce a
new `BuildableSource` abstraction, which can either be a distribution,
or an unnamed URL:

```rust
/// A reference to a source that can be built into a built distribution.
///
/// This can either be a distribution (e.g., a package on a registry) or a direct URL.
///
/// Distributions can _also_ point to URLs in lieu of a registry; however, the primary distinction
/// here is that a distribution will always include a package name, while a URL will not.
#[derive(Debug, Clone, Copy)]
pub enum BuildableSource<'a> {
    Dist(&'a SourceDist),
    Url(SourceUrl<'a>),
}
```

All the methods on the source distribution database now accept
`BuildableSource`. `BuildableSource` has a `name()` method, but it
returns `Option<&PackageName>`, and everything is required to work with
and without a package name.

The main drawback of this approach (which isn't a terrible one) is that
we can no longer include the package name in the cache. (We do continue
to use the package name for registry-based distributions, since those
always have a name.). The package name was included in the cache route
for two reasons: (1) it's nice for debugging; and (2) we use it to power
`uv cache clean flask`, to identify the entries that are relevant for
Flask.

To solve this, I changed the `uv cache clean` code to look one level
deeper. So, when we want to determine whether to remove the cache entry
for a given URL, we now look into the directory to see if there are any
wheels that match the package name. This isn't as nice, but it does work
(and we have test coverage for it -- all passing).

I also considered removing the package name from the cache routes for
non-registry _wheels_, for consistency... But, it would require a cache
bump, and it didn't feel important enough to merit that.
This commit is contained in:
Charlie Marsh 2024-03-21 22:46:39 -04:00 committed by GitHub
parent 12192dd872
commit 5d7d7dce24
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
17 changed files with 473 additions and 318 deletions

2
Cargo.lock generated
View file

@ -4468,6 +4468,8 @@ dependencies = [
"distribution-types",
"fs-err",
"nanoid",
"pypi-types",
"rmp-serde",
"rustc-hash",
"serde",
"tempfile",

View file

@ -0,0 +1,113 @@
use std::path::Path;
use url::Url;
use uv_normalize::PackageName;
use crate::{GitSourceDist, Name, PathSourceDist, SourceDist};
/// A reference to a source that can be built into a built distribution.
///
/// This can either be a distribution (e.g., a package on a registry) or a direct URL.
///
/// Distributions can _also_ point to URLs in lieu of a registry; however, the primary distinction
/// here is that a distribution will always include a package name, while a URL will not.
#[derive(Debug, Clone, Copy)]
pub enum BuildableSource<'a> {
Dist(&'a SourceDist),
Url(SourceUrl<'a>),
}
impl BuildableSource<'_> {
/// Return the [`PackageName`] of the source, if available.
pub fn name(&self) -> Option<&PackageName> {
match self {
Self::Dist(dist) => Some(dist.name()),
Self::Url(_) => None,
}
}
/// Return the [`BuildableSource`] as a [`SourceDist`], if it is a distribution.
pub fn as_dist(&self) -> Option<&SourceDist> {
match self {
Self::Dist(dist) => Some(dist),
Self::Url(_) => None,
}
}
}
impl std::fmt::Display for BuildableSource<'_> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Self::Dist(dist) => write!(f, "{dist}"),
Self::Url(url) => write!(f, "{url}"),
}
}
}
/// A reference to a source distribution defined by a URL.
#[derive(Debug, Clone, Copy)]
pub enum SourceUrl<'a> {
Direct(DirectSourceUrl<'a>),
Git(GitSourceUrl<'a>),
Path(PathSourceUrl<'a>),
}
impl std::fmt::Display for SourceUrl<'_> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Self::Direct(url) => write!(f, "{url}"),
Self::Git(url) => write!(f, "{url}"),
Self::Path(url) => write!(f, "{url}"),
}
}
}
#[derive(Debug, Clone, Copy)]
pub struct DirectSourceUrl<'a> {
pub url: &'a Url,
}
impl std::fmt::Display for DirectSourceUrl<'_> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{url}", url = self.url)
}
}
#[derive(Debug, Clone, Copy)]
pub struct GitSourceUrl<'a> {
pub url: &'a Url,
}
impl std::fmt::Display for GitSourceUrl<'_> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{url}", url = self.url)
}
}
impl<'a> From<&'a GitSourceDist> for GitSourceUrl<'a> {
fn from(dist: &'a GitSourceDist) -> Self {
Self { url: &dist.url }
}
}
#[derive(Debug, Clone, Copy)]
pub struct PathSourceUrl<'a> {
pub url: &'a Url,
pub path: &'a Path,
}
impl std::fmt::Display for PathSourceUrl<'_> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{url}", url = self.url)
}
}
impl<'a> From<&'a PathSourceDist> for PathSourceUrl<'a> {
fn from(dist: &'a PathSourceDist) -> Self {
Self {
url: &dist.url,
path: &dist.path,
}
}
}

View file

@ -45,6 +45,7 @@ use pep508_rs::{Scheme, VerbatimUrl};
use uv_normalize::PackageName;
pub use crate::any::*;
pub use crate::buildable::*;
pub use crate::cached::*;
pub use crate::direct_url::*;
pub use crate::editable::*;
@ -58,6 +59,7 @@ pub use crate::resolution::*;
pub use crate::traits::*;
mod any;
mod buildable;
mod cached;
mod direct_url;
mod editable;

View file

@ -16,6 +16,7 @@ workspace = true
[dependencies]
cache-key = { workspace = true }
distribution-types = { workspace = true }
pypi-types = { workspace = true }
uv-fs = { workspace = true }
uv-normalize = { workspace = true }
@ -30,3 +31,4 @@ tempfile = { workspace = true }
tracing = { workspace = true }
url = { workspace = true }
walkdir = { workspace = true }
rmp-serde = { workspace = true }

View file

@ -12,6 +12,7 @@ use tempfile::{tempdir, TempDir};
use tracing::debug;
use distribution_types::InstalledDist;
use pypi_types::Metadata23;
use uv_fs::directories;
use uv_normalize::PackageName;
@ -590,7 +591,7 @@ pub enum CacheBucket {
impl CacheBucket {
fn to_str(self) -> &'static str {
match self {
Self::BuiltWheels => "built-wheels-v0",
Self::BuiltWheels => "built-wheels-v1",
Self::FlatIndex => "flat-index-v0",
Self::Git => "git-v0",
Self::Interpreter => "interpreter-v0",
@ -604,6 +605,17 @@ impl CacheBucket {
///
/// Returns the number of entries removed from the cache.
fn remove(self, cache: &Cache, name: &PackageName) -> Result<Removal, io::Error> {
/// Returns `true` if the [`Path`] represents a built wheel for the given package.
fn is_match(path: &Path, name: &PackageName) -> bool {
let Ok(metadata) = fs_err::read(path.join("metadata.msgpack")) else {
return false;
};
let Ok(metadata) = rmp_serde::from_slice::<Metadata23>(&metadata) else {
return false;
};
metadata.name == *name
}
let mut summary = Removal::default();
match self {
Self::Wheels => {
@ -637,26 +649,35 @@ impl CacheBucket {
summary += rm_rf(directory.join(name.to_string()))?;
}
// For direct URLs, we expect a directory for every index, followed by a
// directory per package (indexed by name).
// For direct URLs, we expect a directory for every URL, followed by a
// directory per version. To determine whether the URL is relevant, we need to
// search for a wheel matching the package name.
let root = cache.bucket(self).join(WheelCacheKind::Url);
for directory in directories(root) {
summary += rm_rf(directory.join(name.to_string()))?;
for url in directories(root) {
if directories(&url).any(|version| is_match(&version, name)) {
summary += rm_rf(url)?;
}
}
// For local dependencies, we expect a directory for every path, followed by a
// directory per package (indexed by name).
// directory per version. To determine whether the path is relevant, we need to
// search for a wheel matching the package name.
let root = cache.bucket(self).join(WheelCacheKind::Path);
for directory in directories(root) {
summary += rm_rf(directory.join(name.to_string()))?;
for path in directories(root) {
if directories(&path).any(|version| is_match(&version, name)) {
summary += rm_rf(path)?;
}
}
// For Git dependencies, we expect a directory for every repository, followed by a
// directory for every SHA, followed by a directory per package (indexed by name).
// directory for every SHA. To determine whether the SHA is relevant, we need to
// search for a wheel matching the package name.
let root = cache.bucket(self).join(WheelCacheKind::Git);
for directory in directories(root) {
for directory in directories(directory) {
summary += rm_rf(directory.join(name.to_string()))?;
for repository in directories(root) {
for sha in directories(repository) {
if is_match(&sha, name) {
summary += rm_rf(sha)?;
}
}
}
}

View file

@ -5,13 +5,7 @@ use url::Url;
use cache_key::{digest, CanonicalUrl};
use distribution_types::IndexUrl;
#[allow(unused_imports)] // For rustdoc
use crate::CacheBucket;
/// Cache wheels and their metadata, both from remote wheels and built from source distributions.
///
/// Use [`WheelCache::remote_wheel_dir`] for remote wheel metadata caching and
/// [`WheelCache::built_wheel_dir`] for built source distributions metadata caching.
#[derive(Debug, Clone)]
pub enum WheelCache<'a> {
/// Either PyPI or an alternative index, which we key by index URL.
@ -28,7 +22,8 @@ pub enum WheelCache<'a> {
}
impl<'a> WheelCache<'a> {
fn bucket(&self) -> PathBuf {
/// The root directory for a cache bucket.
pub fn root(&self) -> PathBuf {
match self {
WheelCache::Index(IndexUrl::Pypi(_)) => WheelCacheKind::Pypi.root(),
WheelCache::Index(url) => WheelCacheKind::Index
@ -47,14 +42,9 @@ impl<'a> WheelCache<'a> {
}
}
/// Metadata of a remote wheel. See [`CacheBucket::Wheels`]
pub fn remote_wheel_dir(&self, package_name: impl AsRef<Path>) -> PathBuf {
self.bucket().join(package_name)
}
/// Metadata of a built source distribution. See [`CacheBucket::BuiltWheels`]
pub fn built_wheel_dir(&self, filename: impl AsRef<Path>) -> PathBuf {
self.bucket().join(filename)
/// A subdirectory in a bucket for wheels for a specific package.
pub fn wheel_dir(&self, package_name: impl AsRef<Path>) -> PathBuf {
self.root().join(package_name)
}
}

View file

@ -412,7 +412,7 @@ impl RegistryClient {
let cache_entry = self.cache.entry(
CacheBucket::Wheels,
WheelCache::Index(index).remote_wheel_dir(filename.name.as_ref()),
WheelCache::Index(index).wheel_dir(filename.name.as_ref()),
format!("{}.msgpack", filename.stem()),
);
let cache_control = match self.connectivity {
@ -464,7 +464,7 @@ impl RegistryClient {
) -> Result<Metadata23, Error> {
let cache_entry = self.cache.entry(
CacheBucket::Wheels,
cache_shard.remote_wheel_dir(filename.name.as_ref()),
cache_shard.wheel_dir(filename.name.as_ref()),
format!("{}.msgpack", filename.stem()),
);
let cache_control = match self.connectivity {

View file

@ -285,20 +285,15 @@ impl<'a> BuildContext for BuildDispatch<'a> {
),
NoBuild::None => {}
NoBuild::Packages(packages) => {
// We can only prevent builds by name for packages with names. For editable
// packages and unnamed requirements, we can't prevent the build.
if let Some(dist) = dist {
// We can only prevent builds by name for packages with names
// which is unknown before build of editable source distributions
if packages.contains(dist.name()) {
bail!(
"Building source distributions for {} is disabled",
dist.name()
);
}
} else {
debug_assert!(
matches!(build_kind, BuildKind::Editable),
"Only editable builds are exempt from 'no build' checks"
);
}
}
}

View file

@ -11,7 +11,8 @@ use url::Url;
use distribution_filename::WheelFilename;
use distribution_types::{
BuiltDist, DirectGitUrl, Dist, FileLocation, IndexLocations, LocalEditable, Name, SourceDist,
BuildableSource, BuiltDist, DirectGitUrl, Dist, FileLocation, IndexLocations, LocalEditable,
Name, SourceDist,
};
use platform_tags::Tags;
use pypi_types::Metadata23;
@ -116,7 +117,7 @@ impl<'a, Context: BuildContext + Send + Sync> DistributionDatabase<'a, Context>
let url = Url::from_file_path(path).expect("path is absolute");
let cache_entry = self.cache.entry(
CacheBucket::Wheels,
WheelCache::Url(&url).remote_wheel_dir(wheel.name().as_ref()),
WheelCache::Url(&url).wheel_dir(wheel.name().as_ref()),
wheel.filename.stem(),
);
@ -154,7 +155,7 @@ impl<'a, Context: BuildContext + Send + Sync> DistributionDatabase<'a, Context>
// Create a cache entry for the wheel.
let wheel_entry = self.cache.entry(
CacheBucket::Wheels,
WheelCache::Index(&wheel.index).remote_wheel_dir(wheel.name().as_ref()),
WheelCache::Index(&wheel.index).wheel_dir(wheel.name().as_ref()),
wheel.filename.stem(),
);
@ -196,7 +197,7 @@ impl<'a, Context: BuildContext + Send + Sync> DistributionDatabase<'a, Context>
// Create a cache entry for the wheel.
let wheel_entry = self.cache.entry(
CacheBucket::Wheels,
WheelCache::Url(&wheel.url).remote_wheel_dir(wheel.name().as_ref()),
WheelCache::Url(&wheel.url).wheel_dir(wheel.name().as_ref()),
wheel.filename.stem(),
);
@ -247,7 +248,7 @@ impl<'a, Context: BuildContext + Send + Sync> DistributionDatabase<'a, Context>
let cache_entry = self.cache.entry(
CacheBucket::Wheels,
WheelCache::Url(&wheel.url).remote_wheel_dir(wheel.name().as_ref()),
WheelCache::Url(&wheel.url).wheel_dir(wheel.name().as_ref()),
wheel.filename.stem(),
);
@ -284,7 +285,11 @@ impl<'a, Context: BuildContext + Send + Sync> DistributionDatabase<'a, Context>
let lock = self.locks.acquire(&dist).await;
let _guard = lock.lock().await;
let built_wheel = self.builder.download_and_build(source_dist).boxed().await?;
let built_wheel = self
.builder
.download_and_build(BuildableSource::Dist(source_dist))
.boxed()
.await?;
// If the wheel was unzipped previously, respect it. Source distributions are
// cached under a unique build ID, so unzipped directories are never stale.
@ -358,7 +363,7 @@ impl<'a, Context: BuildContext + Send + Sync> DistributionDatabase<'a, Context>
let metadata = self
.builder
.download_and_build_metadata(&source_dist)
.download_and_build_metadata(BuildableSource::Dist(&source_dist))
.boxed()
.await?;
Ok((metadata, precise))

View file

@ -1,4 +1,4 @@
use distribution_types::{git_reference, DirectUrlSourceDist, GitSourceDist, Name, PathSourceDist};
use distribution_types::{git_reference, DirectUrlSourceDist, GitSourceDist, PathSourceDist};
use platform_tags::Tags;
use uv_cache::{ArchiveTimestamp, Cache, CacheBucket, CacheShard, WheelCache};
use uv_fs::symlinks;
@ -23,7 +23,7 @@ impl BuiltWheelIndex {
// For direct URLs, cache directly under the hash of the URL itself.
let cache_shard = cache.shard(
CacheBucket::BuiltWheels,
WheelCache::Url(source_dist.url.raw()).remote_wheel_dir(source_dist.name().as_ref()),
WheelCache::Url(source_dist.url.raw()).root(),
);
// Read the manifest from the cache. There's no need to enforce freshness, since we
@ -44,7 +44,7 @@ impl BuiltWheelIndex {
) -> Result<Option<CachedWheel>, Error> {
let cache_shard = cache.shard(
CacheBucket::BuiltWheels,
WheelCache::Path(&source_dist.url).remote_wheel_dir(source_dist.name().as_ref()),
WheelCache::Path(&source_dist.url).root(),
);
// Determine the last-modified time of the source distribution.
@ -72,8 +72,7 @@ impl BuiltWheelIndex {
let cache_shard = cache.shard(
CacheBucket::BuiltWheels,
WheelCache::Git(&source_dist.url, &git_sha.to_short_string())
.remote_wheel_dir(source_dist.name().as_ref()),
WheelCache::Git(&source_dist.url, &git_sha.to_short_string()).root(),
);
Self::find(&cache_shard, tags)

View file

@ -94,7 +94,7 @@ impl<'a> RegistryWheelIndex<'a> {
// Index all the wheels that were downloaded directly from the registry.
let wheel_dir = cache.shard(
CacheBucket::Wheels,
WheelCache::Index(index_url).remote_wheel_dir(package.to_string()),
WheelCache::Index(index_url).wheel_dir(package.to_string()),
);
Self::add_directory(&wheel_dir, tags, &mut versions);
@ -103,7 +103,7 @@ impl<'a> RegistryWheelIndex<'a> {
// from the registry.
let cache_shard = cache.shard(
CacheBucket::BuiltWheels,
WheelCache::Index(index_url).built_wheel_dir(package.to_string()),
WheelCache::Index(index_url).wheel_dir(package.to_string()),
);
// For registry wheels, the cache structure is: `<index>/<package-name>/<version>/`.

View file

@ -2,14 +2,14 @@ use std::sync::Arc;
use url::Url;
use distribution_types::SourceDist;
use distribution_types::BuildableSource;
pub trait Reporter: Send + Sync {
/// Callback to invoke when a source distribution build is kicked off.
fn on_build_start(&self, dist: &SourceDist) -> usize;
fn on_build_start(&self, source: BuildableSource) -> usize;
/// Callback to invoke when a source distribution build is complete.
fn on_build_complete(&self, dist: &SourceDist, id: usize);
fn on_build_complete(&self, source: BuildableSource, id: usize);
/// Callback to invoke when a repository checkout begins.
fn on_checkout_start(&self, url: &Url, rev: &str) -> usize;

View file

@ -16,11 +16,11 @@ use zip::ZipArchive;
use distribution_filename::WheelFilename;
use distribution_types::{
DirectArchiveUrl, DirectGitUrl, Dist, FileLocation, GitSourceDist, LocalEditable, Name,
PathSourceDist, RemoteSource, SourceDist,
BuildableSource, DirectArchiveUrl, DirectGitUrl, Dist, FileLocation, GitSourceUrl,
LocalEditable, PathSourceDist, PathSourceUrl, RemoteSource, SourceDist, SourceUrl,
};
use install_wheel_rs::metadata::read_archive_metadata;
use pep508_rs::{Scheme, VerbatimUrl};
use pep508_rs::Scheme;
use platform_tags::Tags;
use pypi_types::Metadata23;
use uv_cache::{
@ -80,24 +80,55 @@ impl<'a, T: BuildContext> SourceDistCachedBuilder<'a, T> {
/// Download and build a [`SourceDist`].
pub async fn download_and_build(
&self,
source_dist: &SourceDist,
source: BuildableSource<'_>,
) -> Result<BuiltWheelMetadata, Error> {
let built_wheel_metadata = match &source_dist {
SourceDist::DirectUrl(direct_url_source_dist) => {
let filename = direct_url_source_dist
.filename()
.expect("Distribution must have a filename");
let DirectArchiveUrl { url, subdirectory } =
DirectArchiveUrl::from(direct_url_source_dist.url.raw());
let built_wheel_metadata = match &source {
BuildableSource::Dist(SourceDist::Registry(dist)) => {
let url = match &dist.file.url {
FileLocation::RelativeUrl(base, url) => {
pypi_types::base_url_join_relative(base, url)?
}
FileLocation::AbsoluteUrl(url) => {
Url::parse(url).map_err(|err| Error::Url(url.clone(), err))?
}
FileLocation::Path(path) => {
let url = Url::from_file_path(path).expect("path is absolute");
// For direct URLs, cache directly under the hash of the URL itself.
// If necessary, extract the archive.
let extracted = extract_archive(path, self.build_context.cache()).await?;
return self
.path(source, PathSourceUrl { url: &url, path }, extracted.path())
.boxed()
.await;
}
};
// For registry source distributions, shard by package, then version, for
// convenience in debugging.
let cache_shard = self.build_context.cache().shard(
CacheBucket::BuiltWheels,
WheelCache::Url(&url).remote_wheel_dir(direct_url_source_dist.name().as_ref()),
WheelCache::Index(&dist.index)
.wheel_dir(dist.filename.name.as_ref())
.join(dist.filename.version.to_string()),
);
self.url(source, &dist.file.filename, &url, &cache_shard, None)
.boxed()
.await?
}
BuildableSource::Dist(SourceDist::DirectUrl(dist)) => {
let filename = dist.filename().expect("Distribution must have a filename");
let DirectArchiveUrl { url, subdirectory } = DirectArchiveUrl::from(dist.url.raw());
// For direct URLs, cache directly under the hash of the URL itself.
let cache_shard = self
.build_context
.cache()
.shard(CacheBucket::BuiltWheels, WheelCache::Url(&url).root());
self.url(
source_dist,
source,
&filename,
&url,
&cache_shard,
@ -106,64 +137,48 @@ impl<'a, T: BuildContext> SourceDistCachedBuilder<'a, T> {
.boxed()
.await?
}
SourceDist::Registry(registry_source_dist) => {
let url = match &registry_source_dist.file.url {
FileLocation::RelativeUrl(base, url) => {
pypi_types::base_url_join_relative(base, url)?
}
FileLocation::AbsoluteUrl(url) => {
Url::parse(url).map_err(|err| Error::Url(url.clone(), err))?
}
FileLocation::Path(path) => {
// Create a distribution to represent the local path.
let path_source_dist = PathSourceDist {
name: registry_source_dist.filename.name.clone(),
url: VerbatimUrl::unknown(
Url::from_file_path(path).expect("path is absolute"),
),
path: path.clone(),
editable: false,
};
BuildableSource::Dist(SourceDist::Git(dist)) => {
self.git(source, GitSourceUrl::from(dist)).boxed().await?
}
BuildableSource::Dist(SourceDist::Path(dist)) => {
// If necessary, extract the archive.
let extracted = extract_archive(&dist.path, self.build_context.cache()).await?;
// If necessary, extract the archive.
let extracted =
extract_archive(&path_source_dist.path, self.build_context.cache())
.await?;
self.path(source, PathSourceUrl::from(dist), extracted.path())
.boxed()
.await?
}
BuildableSource::Url(SourceUrl::Direct(resource)) => {
let filename = resource
.url
.filename()
.expect("Distribution must have a filename");
let DirectArchiveUrl { url, subdirectory } = DirectArchiveUrl::from(resource.url);
return self
.path(source_dist, &path_source_dist, extracted.path())
.boxed()
.await;
}
};
// For registry source distributions, shard by package, then version.
let cache_shard = self.build_context.cache().shard(
CacheBucket::BuiltWheels,
WheelCache::Index(&registry_source_dist.index)
.remote_wheel_dir(registry_source_dist.filename.name.as_ref())
.join(registry_source_dist.filename.version.to_string()),
);
// For direct URLs, cache directly under the hash of the URL itself.
let cache_shard = self
.build_context
.cache()
.shard(CacheBucket::BuiltWheels, WheelCache::Url(&url).root());
self.url(
source_dist,
&registry_source_dist.file.filename,
source,
&filename,
&url,
&cache_shard,
None,
subdirectory.as_deref(),
)
.boxed()
.await?
}
SourceDist::Git(git_source_dist) => {
self.git(source_dist, git_source_dist).boxed().await?
BuildableSource::Url(SourceUrl::Git(resource)) => {
self.git(source, *resource).boxed().await?
}
SourceDist::Path(path_source_dist) => {
BuildableSource::Url(SourceUrl::Path(resource)) => {
// If necessary, extract the archive.
let extracted =
extract_archive(&path_source_dist.path, self.build_context.cache()).await?;
let extracted = extract_archive(resource.path, self.build_context.cache()).await?;
self.path(source_dist, path_source_dist, extracted.path())
self.path(source, *resource, extracted.path())
.boxed()
.await?
}
@ -177,34 +192,11 @@ impl<'a, T: BuildContext> SourceDistCachedBuilder<'a, T> {
/// metadata without building the source distribution.
pub async fn download_and_build_metadata(
&self,
source_dist: &SourceDist,
source: BuildableSource<'_>,
) -> Result<Metadata23, Error> {
let metadata = match &source_dist {
SourceDist::DirectUrl(direct_url_source_dist) => {
let filename = direct_url_source_dist
.filename()
.expect("Distribution must have a filename");
let DirectArchiveUrl { url, subdirectory } =
DirectArchiveUrl::from(direct_url_source_dist.url.raw());
// For direct URLs, cache directly under the hash of the URL itself.
let cache_shard = self.build_context.cache().shard(
CacheBucket::BuiltWheels,
WheelCache::Url(&url).remote_wheel_dir(direct_url_source_dist.name().as_ref()),
);
self.url_metadata(
source_dist,
&filename,
&url,
&cache_shard,
subdirectory.as_deref(),
)
.boxed()
.await?
}
SourceDist::Registry(registry_source_dist) => {
let url = match &registry_source_dist.file.url {
let metadata = match &source {
BuildableSource::Dist(SourceDist::Registry(dist)) => {
let url = match &dist.file.url {
FileLocation::RelativeUrl(base, url) => {
pypi_types::base_url_join_relative(base, url)?
}
@ -212,23 +204,17 @@ impl<'a, T: BuildContext> SourceDistCachedBuilder<'a, T> {
Url::parse(url).map_err(|err| Error::Url(url.clone(), err))?
}
FileLocation::Path(path) => {
// Create a distribution to represent the local path.
let path_source_dist = PathSourceDist {
name: registry_source_dist.filename.name.clone(),
url: VerbatimUrl::unknown(
Url::from_file_path(path).expect("path is absolute"),
),
path: path.clone(),
editable: false,
};
let url = Url::from_file_path(path).expect("path is absolute");
// If necessary, extract the archive.
let extracted =
extract_archive(&path_source_dist.path, self.build_context.cache())
.await?;
let extracted = extract_archive(path, self.build_context.cache()).await?;
return self
.path_metadata(source_dist, &path_source_dist, extracted.path())
.path_metadata(
source,
PathSourceUrl { url: &url, path },
extracted.path(),
)
.boxed()
.await;
}
@ -237,32 +223,79 @@ impl<'a, T: BuildContext> SourceDistCachedBuilder<'a, T> {
// For registry source distributions, shard by package, then version.
let cache_shard = self.build_context.cache().shard(
CacheBucket::BuiltWheels,
WheelCache::Index(&registry_source_dist.index)
.remote_wheel_dir(registry_source_dist.filename.name.as_ref())
.join(registry_source_dist.filename.version.to_string()),
WheelCache::Index(&dist.index)
.wheel_dir(dist.filename.name.as_ref())
.join(dist.filename.version.to_string()),
);
self.url_metadata(source, &dist.file.filename, &url, &cache_shard, None)
.boxed()
.await?
}
BuildableSource::Dist(SourceDist::DirectUrl(dist)) => {
let filename = dist.filename().expect("Distribution must have a filename");
let DirectArchiveUrl { url, subdirectory } = DirectArchiveUrl::from(dist.url.raw());
// For direct URLs, cache directly under the hash of the URL itself.
let cache_shard = self
.build_context
.cache()
.shard(CacheBucket::BuiltWheels, WheelCache::Url(&url).root());
self.url_metadata(
source_dist,
&registry_source_dist.file.filename,
source,
&filename,
&url,
&cache_shard,
None,
subdirectory.as_deref(),
)
.boxed()
.await?
}
SourceDist::Git(git_source_dist) => {
self.git_metadata(source_dist, git_source_dist)
BuildableSource::Dist(SourceDist::Git(dist)) => {
self.git_metadata(source, GitSourceUrl::from(dist))
.boxed()
.await?
}
SourceDist::Path(path_source_dist) => {
BuildableSource::Dist(SourceDist::Path(dist)) => {
// If necessary, extract the archive.
let extracted =
extract_archive(&path_source_dist.path, self.build_context.cache()).await?;
let extracted = extract_archive(&dist.path, self.build_context.cache()).await?;
self.path_metadata(source_dist, path_source_dist, extracted.path())
self.path_metadata(source, PathSourceUrl::from(dist), extracted.path())
.boxed()
.await?
}
BuildableSource::Url(SourceUrl::Direct(resource)) => {
let filename = resource
.url
.filename()
.expect("Distribution must have a filename");
let DirectArchiveUrl { url, subdirectory } = DirectArchiveUrl::from(resource.url);
// For direct URLs, cache directly under the hash of the URL itself.
let cache_shard = self
.build_context
.cache()
.shard(CacheBucket::BuiltWheels, WheelCache::Url(&url).root());
self.url_metadata(
source,
&filename,
&url,
&cache_shard,
subdirectory.as_deref(),
)
.boxed()
.await?
}
BuildableSource::Url(SourceUrl::Git(resource)) => {
self.git_metadata(source, *resource).boxed().await?
}
BuildableSource::Url(SourceUrl::Path(resource)) => {
// If necessary, extract the archive.
let extracted = extract_archive(resource.path, self.build_context.cache()).await?;
self.path_metadata(source, *resource, extracted.path())
.boxed()
.await?
}
@ -275,7 +308,7 @@ impl<'a, T: BuildContext> SourceDistCachedBuilder<'a, T> {
#[allow(clippy::too_many_arguments)]
async fn url<'data>(
&self,
source_dist: &'data SourceDist,
source: BuildableSource<'data>,
filename: &'data str,
url: &'data Url,
cache_shard: &CacheShard,
@ -286,7 +319,7 @@ impl<'a, T: BuildContext> SourceDistCachedBuilder<'a, T> {
Connectivity::Online => CacheControl::from(
self.build_context
.cache()
.freshness(&cache_entry, Some(source_dist.name()))
.freshness(&cache_entry, source.name())
.map_err(Error::CacheRead)?,
),
Connectivity::Offline => CacheControl::AllowStale,
@ -299,15 +332,15 @@ impl<'a, T: BuildContext> SourceDistCachedBuilder<'a, T> {
let manifest = Manifest::new();
// Download the source distribution.
debug!("Downloading source distribution: {source_dist}");
debug!("Downloading source distribution: {source}");
let source_dist_entry = cache_shard.shard(manifest.id()).entry(filename);
self.persist_source_dist_url(response, source_dist, filename, &source_dist_entry)
self.persist_url(response, source, filename, &source_dist_entry)
.await?;
Ok(manifest)
}
.boxed()
.instrument(info_span!("download", source_dist = %source_dist))
.instrument(info_span!("download", source_dist = %source))
};
let req = self
.client
@ -345,22 +378,17 @@ impl<'a, T: BuildContext> SourceDistCachedBuilder<'a, T> {
let task = self
.reporter
.as_ref()
.map(|reporter| reporter.on_build_start(source_dist));
.map(|reporter| reporter.on_build_start(source));
// Build the source distribution.
let source_dist_entry = cache_shard.entry(filename);
let (disk_filename, wheel_filename, metadata) = self
.build_source_dist(
source_dist,
source_dist_entry.path(),
subdirectory,
&cache_shard,
)
.build_distribution(source, source_dist_entry.path(), subdirectory, &cache_shard)
.await?;
if let Some(task) = task {
if let Some(reporter) = self.reporter.as_ref() {
reporter.on_build_complete(source_dist, task);
reporter.on_build_complete(source, task);
}
}
@ -384,7 +412,7 @@ impl<'a, T: BuildContext> SourceDistCachedBuilder<'a, T> {
#[allow(clippy::too_many_arguments)]
async fn url_metadata<'data>(
&self,
source_dist: &'data SourceDist,
source: BuildableSource<'data>,
filename: &'data str,
url: &'data Url,
cache_shard: &CacheShard,
@ -395,7 +423,7 @@ impl<'a, T: BuildContext> SourceDistCachedBuilder<'a, T> {
Connectivity::Online => CacheControl::from(
self.build_context
.cache()
.freshness(&cache_entry, Some(source_dist.name()))
.freshness(&cache_entry, source.name())
.map_err(Error::CacheRead)?,
),
Connectivity::Offline => CacheControl::AllowStale,
@ -408,15 +436,15 @@ impl<'a, T: BuildContext> SourceDistCachedBuilder<'a, T> {
let manifest = Manifest::new();
// Download the source distribution.
debug!("Downloading source distribution: {source_dist}");
debug!("Downloading source distribution: {source}");
let source_dist_entry = cache_shard.shard(manifest.id()).entry(filename);
self.persist_source_dist_url(response, source_dist, filename, &source_dist_entry)
self.persist_url(response, source, filename, &source_dist_entry)
.await?;
Ok(manifest)
}
.boxed()
.instrument(info_span!("download", source_dist = %source_dist))
.instrument(info_span!("download", source_dist = %source))
};
let req = self
.client
@ -449,7 +477,7 @@ impl<'a, T: BuildContext> SourceDistCachedBuilder<'a, T> {
// If the cache contains compatible metadata, return it.
let metadata_entry = cache_shard.entry(METADATA);
if let Some(metadata) = read_cached_metadata(&metadata_entry).await? {
debug!("Using cached metadata for {source_dist}");
debug!("Using cached metadata for: {source}");
return Ok(metadata);
}
@ -458,7 +486,7 @@ impl<'a, T: BuildContext> SourceDistCachedBuilder<'a, T> {
// If the backend supports `prepare_metadata_for_build_wheel`, use it.
if let Some(metadata) = self
.build_source_dist_metadata(source_dist, source_dist_entry.path(), subdirectory)
.build_metadata(source, source_dist_entry.path(), subdirectory)
.boxed()
.await?
{
@ -477,16 +505,11 @@ impl<'a, T: BuildContext> SourceDistCachedBuilder<'a, T> {
let task = self
.reporter
.as_ref()
.map(|reporter| reporter.on_build_start(source_dist));
.map(|reporter| reporter.on_build_start(source));
// Build the source distribution.
let (_disk_filename, _wheel_filename, metadata) = self
.build_source_dist(
source_dist,
source_dist_entry.path(),
subdirectory,
&cache_shard,
)
.build_distribution(source, source_dist_entry.path(), subdirectory, &cache_shard)
.await?;
// Store the metadata.
@ -497,7 +520,7 @@ impl<'a, T: BuildContext> SourceDistCachedBuilder<'a, T> {
if let Some(task) = task {
if let Some(reporter) = self.reporter.as_ref() {
reporter.on_build_complete(source_dist, task);
reporter.on_build_complete(source, task);
}
}
@ -507,19 +530,18 @@ impl<'a, T: BuildContext> SourceDistCachedBuilder<'a, T> {
/// Build a source distribution from a local path.
async fn path(
&self,
source_dist: &SourceDist,
path_source_dist: &PathSourceDist,
source: BuildableSource<'_>,
resource: PathSourceUrl<'_>,
source_root: &Path,
) -> Result<BuiltWheelMetadata, Error> {
let cache_shard = self.build_context.cache().shard(
CacheBucket::BuiltWheels,
WheelCache::Path(&path_source_dist.url)
.remote_wheel_dir(path_source_dist.name().as_ref()),
WheelCache::Path(resource.url).root(),
);
// Determine the last-modified time of the source distribution.
let Some(modified) =
ArchiveTimestamp::from_path(&path_source_dist.path).map_err(Error::CacheRead)?
ArchiveTimestamp::from_path(resource.path).map_err(Error::CacheRead)?
else {
return Err(Error::DirWithoutEntrypoint);
};
@ -529,7 +551,7 @@ impl<'a, T: BuildContext> SourceDistCachedBuilder<'a, T> {
let manifest_freshness = self
.build_context
.cache()
.freshness(&manifest_entry, Some(source_dist.name()))
.freshness(&manifest_entry, source.name())
.map_err(Error::CacheRead)?;
let manifest =
refresh_timestamp_manifest(&manifest_entry, manifest_freshness, modified).await?;
@ -549,21 +571,21 @@ impl<'a, T: BuildContext> SourceDistCachedBuilder<'a, T> {
let task = self
.reporter
.as_ref()
.map(|reporter| reporter.on_build_start(source_dist));
.map(|reporter| reporter.on_build_start(source));
let (disk_filename, filename, metadata) = self
.build_source_dist(source_dist, source_root, None, &cache_shard)
.build_distribution(source, source_root, None, &cache_shard)
.await?;
if let Some(task) = task {
if let Some(reporter) = self.reporter.as_ref() {
reporter.on_build_complete(source_dist, task);
reporter.on_build_complete(source, task);
}
}
// Store the metadata.
let cache_entry = cache_shard.entry(METADATA);
write_atomic(cache_entry.path(), rmp_serde::to_vec(&metadata)?)
let metadata_entry = cache_shard.entry(METADATA);
write_atomic(metadata_entry.path(), rmp_serde::to_vec(&metadata)?)
.await
.map_err(Error::CacheWrite)?;
@ -580,19 +602,18 @@ impl<'a, T: BuildContext> SourceDistCachedBuilder<'a, T> {
/// building the wheel.
async fn path_metadata(
&self,
source_dist: &SourceDist,
path_source_dist: &PathSourceDist,
source: BuildableSource<'_>,
resource: PathSourceUrl<'_>,
source_root: &Path,
) -> Result<Metadata23, Error> {
let cache_shard = self.build_context.cache().shard(
CacheBucket::BuiltWheels,
WheelCache::Path(&path_source_dist.url)
.remote_wheel_dir(path_source_dist.name().as_ref()),
WheelCache::Path(resource.url).root(),
);
// Determine the last-modified time of the source distribution.
let Some(modified) =
ArchiveTimestamp::from_path(&path_source_dist.path).map_err(Error::CacheRead)?
ArchiveTimestamp::from_path(resource.path).map_err(Error::CacheRead)?
else {
return Err(Error::DirWithoutEntrypoint);
};
@ -602,7 +623,7 @@ impl<'a, T: BuildContext> SourceDistCachedBuilder<'a, T> {
let manifest_freshness = self
.build_context
.cache()
.freshness(&manifest_entry, Some(source_dist.name()))
.freshness(&manifest_entry, source.name())
.map_err(Error::CacheRead)?;
let manifest =
refresh_timestamp_manifest(&manifest_entry, manifest_freshness, modified).await?;
@ -618,18 +639,18 @@ impl<'a, T: BuildContext> SourceDistCachedBuilder<'a, T> {
if self
.build_context
.cache()
.freshness(&metadata_entry, Some(source_dist.name()))
.freshness(&metadata_entry, source.name())
.is_ok_and(Freshness::is_fresh)
{
if let Some(metadata) = read_cached_metadata(&metadata_entry).await? {
debug!("Using cached metadata for {source_dist}");
debug!("Using cached metadata for: {source}");
return Ok(metadata);
}
}
// If the backend supports `prepare_metadata_for_build_wheel`, use it.
if let Some(metadata) = self
.build_source_dist_metadata(source_dist, source_root, None)
.build_metadata(source, source_root, None)
.boxed()
.await?
{
@ -649,21 +670,21 @@ impl<'a, T: BuildContext> SourceDistCachedBuilder<'a, T> {
let task = self
.reporter
.as_ref()
.map(|reporter| reporter.on_build_start(source_dist));
.map(|reporter| reporter.on_build_start(source));
let (_disk_filename, _filename, metadata) = self
.build_source_dist(source_dist, source_root, None, &cache_shard)
.build_distribution(source, source_root, None, &cache_shard)
.await?;
if let Some(task) = task {
if let Some(reporter) = self.reporter.as_ref() {
reporter.on_build_complete(source_dist, task);
reporter.on_build_complete(source, task);
}
}
// Store the metadata.
let cache_entry = cache_shard.entry(METADATA);
write_atomic(cache_entry.path(), rmp_serde::to_vec(&metadata)?)
let metadata_entry = cache_shard.entry(METADATA);
write_atomic(metadata_entry.path(), rmp_serde::to_vec(&metadata)?)
.await
.map_err(Error::CacheWrite)?;
@ -673,11 +694,11 @@ impl<'a, T: BuildContext> SourceDistCachedBuilder<'a, T> {
/// Build a source distribution from a Git repository.
async fn git(
&self,
source_dist: &SourceDist,
git_source_dist: &GitSourceDist,
source: BuildableSource<'_>,
resource: GitSourceUrl<'_>,
) -> Result<BuiltWheelMetadata, Error> {
let (fetch, subdirectory) = fetch_git_archive(
&git_source_dist.url,
resource.url,
self.build_context.cache(),
self.reporter.as_ref(),
)
@ -686,8 +707,7 @@ impl<'a, T: BuildContext> SourceDistCachedBuilder<'a, T> {
let git_sha = fetch.git().precise().expect("Exact commit after checkout");
let cache_shard = self.build_context.cache().shard(
CacheBucket::BuiltWheels,
WheelCache::Git(&git_source_dist.url, &git_sha.to_short_string())
.remote_wheel_dir(git_source_dist.name().as_ref()),
WheelCache::Git(resource.url, &git_sha.to_short_string()).root(),
);
// If the cache contains a compatible wheel, return it.
@ -698,20 +718,15 @@ impl<'a, T: BuildContext> SourceDistCachedBuilder<'a, T> {
let task = self
.reporter
.as_ref()
.map(|reporter| reporter.on_build_start(source_dist));
.map(|reporter| reporter.on_build_start(source));
let (disk_filename, filename, metadata) = self
.build_source_dist(
source_dist,
fetch.path(),
subdirectory.as_deref(),
&cache_shard,
)
.build_distribution(source, fetch.path(), subdirectory.as_deref(), &cache_shard)
.await?;
if let Some(task) = task {
if let Some(reporter) = self.reporter.as_ref() {
reporter.on_build_complete(source_dist, task);
reporter.on_build_complete(source, task);
}
}
@ -734,11 +749,11 @@ impl<'a, T: BuildContext> SourceDistCachedBuilder<'a, T> {
/// building the wheel.
async fn git_metadata(
&self,
source_dist: &SourceDist,
git_source_dist: &GitSourceDist,
source: BuildableSource<'_>,
resource: GitSourceUrl<'_>,
) -> Result<Metadata23, Error> {
let (fetch, subdirectory) = fetch_git_archive(
&git_source_dist.url,
resource.url,
self.build_context.cache(),
self.reporter.as_ref(),
)
@ -747,8 +762,7 @@ impl<'a, T: BuildContext> SourceDistCachedBuilder<'a, T> {
let git_sha = fetch.git().precise().expect("Exact commit after checkout");
let cache_shard = self.build_context.cache().shard(
CacheBucket::BuiltWheels,
WheelCache::Git(&git_source_dist.url, &git_sha.to_short_string())
.remote_wheel_dir(git_source_dist.name().as_ref()),
WheelCache::Git(resource.url, &git_sha.to_short_string()).root(),
);
// If the cache contains compatible metadata, return it.
@ -756,18 +770,18 @@ impl<'a, T: BuildContext> SourceDistCachedBuilder<'a, T> {
if self
.build_context
.cache()
.freshness(&metadata_entry, Some(source_dist.name()))
.freshness(&metadata_entry, source.name())
.is_ok_and(Freshness::is_fresh)
{
if let Some(metadata) = read_cached_metadata(&metadata_entry).await? {
debug!("Using cached metadata for {source_dist}");
debug!("Using cached metadata for: {source}");
return Ok(metadata);
}
}
// If the backend supports `prepare_metadata_for_build_wheel`, use it.
if let Some(metadata) = self
.build_source_dist_metadata(source_dist, fetch.path(), subdirectory.as_deref())
.build_metadata(source, fetch.path(), subdirectory.as_deref())
.boxed()
.await?
{
@ -787,20 +801,15 @@ impl<'a, T: BuildContext> SourceDistCachedBuilder<'a, T> {
let task = self
.reporter
.as_ref()
.map(|reporter| reporter.on_build_start(source_dist));
.map(|reporter| reporter.on_build_start(source));
let (_disk_filename, _filename, metadata) = self
.build_source_dist(
source_dist,
fetch.path(),
subdirectory.as_deref(),
&cache_shard,
)
.build_distribution(source, fetch.path(), subdirectory.as_deref(), &cache_shard)
.await?;
if let Some(task) = task {
if let Some(reporter) = self.reporter.as_ref() {
reporter.on_build_complete(source_dist, task);
reporter.on_build_complete(source, task);
}
}
@ -814,22 +823,21 @@ impl<'a, T: BuildContext> SourceDistCachedBuilder<'a, T> {
}
/// Download and unzip a source distribution into the cache from an HTTP response.
async fn persist_source_dist_url<'data>(
async fn persist_url<'data>(
&self,
response: Response,
source_dist: &SourceDist,
source: BuildableSource<'_>,
filename: &str,
cache_entry: &'data CacheEntry,
) -> Result<&'data Path, Error> {
let cache_path = cache_entry.path();
if cache_path.is_dir() {
debug!("Distribution is already cached: {source_dist}");
debug!("Distribution is already cached: {source}");
return Ok(cache_path);
}
// Download and unzip the source distribution into a temporary directory.
let span =
info_span!("download_source_dist", filename = filename, source_dist = %source_dist);
let span = info_span!("download_source_dist", filename = filename, source_dist = %source);
let temp_dir =
tempfile::tempdir_in(self.build_context.cache().bucket(CacheBucket::BuiltWheels))
.map_err(Error::CacheWrite)?;
@ -862,20 +870,22 @@ impl<'a, T: BuildContext> SourceDistCachedBuilder<'a, T> {
///
/// Returns the un-normalized disk filename, the parsed, normalized filename and the metadata
#[instrument(skip_all, fields(dist))]
async fn build_source_dist(
async fn build_distribution(
&self,
dist: &SourceDist,
source_dist: &Path,
source: BuildableSource<'_>,
source_root: &Path,
subdirectory: Option<&Path>,
cache_shard: &CacheShard,
) -> Result<(String, WheelFilename, Metadata23), Error> {
debug!("Building: {dist}");
debug!("Building: {source}");
// Guard against build of source distributions when disabled
// Guard against build of source distributions when disabled.
let no_build = match self.build_context.no_build() {
NoBuild::All => true,
NoBuild::None => false,
NoBuild::Packages(packages) => packages.contains(dist.name()),
NoBuild::Packages(packages) => {
source.name().is_some_and(|name| packages.contains(name))
}
};
if no_build {
return Err(Error::NoBuild);
@ -888,61 +898,65 @@ impl<'a, T: BuildContext> SourceDistCachedBuilder<'a, T> {
let disk_filename = self
.build_context
.setup_build(
source_dist,
source_root,
subdirectory,
&dist.to_string(),
Some(dist),
&source.to_string(),
source.as_dist(),
BuildKind::Wheel,
)
.await
.map_err(|err| Error::Build(dist.to_string(), err))?
.map_err(|err| Error::Build(source.to_string(), err))?
.wheel(cache_shard)
.await
.map_err(|err| Error::Build(dist.to_string(), err))?;
.map_err(|err| Error::Build(source.to_string(), err))?;
// Read the metadata from the wheel.
let filename = WheelFilename::from_str(&disk_filename)?;
let metadata = read_wheel_metadata(&filename, cache_shard.join(&disk_filename))?;
// Validate the metadata.
if &metadata.name != dist.name() {
return Err(Error::NameMismatch {
metadata: metadata.name,
given: dist.name().clone(),
});
if let Some(name) = source.name() {
if metadata.name != *name {
return Err(Error::NameMismatch {
metadata: metadata.name,
given: name.clone(),
});
}
}
debug!("Finished building: {dist}");
debug!("Finished building: {source}");
Ok((disk_filename, filename, metadata))
}
/// Build the metadata for a source distribution.
#[instrument(skip_all, fields(dist))]
async fn build_source_dist_metadata(
async fn build_metadata(
&self,
dist: &SourceDist,
source_tree: &Path,
source: BuildableSource<'_>,
source_root: &Path,
subdirectory: Option<&Path>,
) -> Result<Option<Metadata23>, Error> {
debug!("Preparing metadata for: {dist}");
debug!("Preparing metadata for: {source}");
// Attempt to read static metadata from the source distribution.
match read_pkg_info(source_tree).await {
match read_pkg_info(source_root).await {
Ok(metadata) => {
debug!("Found static metadata for: {dist}");
debug!("Found static metadata for: {source}");
// Validate the metadata.
if &metadata.name != dist.name() {
return Err(Error::NameMismatch {
metadata: metadata.name,
given: dist.name().clone(),
});
if let Some(name) = source.name() {
if metadata.name != *name {
return Err(Error::NameMismatch {
metadata: metadata.name,
given: name.clone(),
});
}
}
return Ok(Some(metadata));
}
Err(err @ (Error::MissingPkgInfo | Error::DynamicPkgInfo(_))) => {
debug!("No static metadata available for: {dist} ({err:?})");
debug!("No static metadata available for: {source} ({err:?})");
}
Err(err) => return Err(err),
}
@ -951,37 +965,39 @@ impl<'a, T: BuildContext> SourceDistCachedBuilder<'a, T> {
let mut builder = self
.build_context
.setup_build(
source_tree,
source_root,
subdirectory,
&dist.to_string(),
Some(dist),
&source.to_string(),
source.as_dist(),
BuildKind::Wheel,
)
.await
.map_err(|err| Error::Build(dist.to_string(), err))?;
.map_err(|err| Error::Build(source.to_string(), err))?;
// Build the metadata.
let dist_info = builder
.metadata()
.await
.map_err(|err| Error::Build(dist.to_string(), err))?;
.map_err(|err| Error::Build(source.to_string(), err))?;
let Some(dist_info) = dist_info else {
return Ok(None);
};
// Read the metadata from disk.
debug!("Prepared metadata for: {dist}");
debug!("Prepared metadata for: {source}");
let content = fs::read(dist_info.join("METADATA"))
.await
.map_err(Error::CacheRead)?;
let metadata = Metadata23::parse_metadata(&content)?;
// Validate the metadata.
if &metadata.name != dist.name() {
return Err(Error::NameMismatch {
metadata: metadata.name,
given: dist.name().clone(),
});
if let Some(name) = source.name() {
if metadata.name != *name {
return Err(Error::NameMismatch {
metadata: metadata.name,
given: name.clone(),
});
}
}
Ok(Some(metadata))

View file

@ -8,7 +8,9 @@ use tokio::task::JoinError;
use tracing::instrument;
use url::Url;
use distribution_types::{CachedDist, Dist, Identifier, LocalEditable, RemoteSource, SourceDist};
use distribution_types::{
BuildableSource, CachedDist, Dist, Identifier, LocalEditable, RemoteSource,
};
use platform_tags::Tags;
use uv_cache::Cache;
use uv_client::RegistryClient;
@ -233,10 +235,10 @@ pub trait Reporter: Send + Sync {
fn on_complete(&self);
/// Callback to invoke when a source distribution build is kicked off.
fn on_build_start(&self, dist: &SourceDist) -> usize;
fn on_build_start(&self, source: BuildableSource) -> usize;
/// Callback to invoke when a source distribution build is complete.
fn on_build_complete(&self, dist: &SourceDist, id: usize);
fn on_build_complete(&self, source: BuildableSource, id: usize);
/// Callback to invoke when a editable build is kicked off.
fn on_editable_build_start(&self, dist: &LocalEditable) -> usize;
@ -263,12 +265,12 @@ impl From<Arc<dyn Reporter>> for Facade {
}
impl uv_distribution::Reporter for Facade {
fn on_build_start(&self, dist: &SourceDist) -> usize {
self.reporter.on_build_start(dist)
fn on_build_start(&self, source: BuildableSource) -> usize {
self.reporter.on_build_start(source)
}
fn on_build_complete(&self, dist: &SourceDist, id: usize) {
self.reporter.on_build_complete(dist, id);
fn on_build_complete(&self, source: BuildableSource, id: usize) {
self.reporter.on_build_complete(source, id);
}
fn on_checkout_start(&self, url: &Url, rev: &str) -> usize {

View file

@ -278,8 +278,7 @@ impl<'a> Planner<'a> {
let cache_entry = cache
.shard(
CacheBucket::Wheels,
WheelCache::Url(&wheel.url)
.remote_wheel_dir(wheel.name().as_ref()),
WheelCache::Url(&wheel.url).wheel_dir(wheel.name().as_ref()),
)
.entry(wheel.filename.stem());
@ -321,8 +320,7 @@ impl<'a> Planner<'a> {
let cache_entry = cache
.shard(
CacheBucket::Wheels,
WheelCache::Url(&wheel.url)
.remote_wheel_dir(wheel.name().as_ref()),
WheelCache::Url(&wheel.url).wheel_dir(wheel.name().as_ref()),
)
.entry(wheel.filename.stem());

View file

@ -2,7 +2,7 @@ use std::sync::Arc;
use url::Url;
use distribution_types::{SourceDist, VersionOrUrl};
use distribution_types::{BuildableSource, VersionOrUrl};
use uv_normalize::PackageName;
pub type BuildId = usize;
@ -15,10 +15,10 @@ pub trait Reporter: Send + Sync {
fn on_complete(&self);
/// Callback to invoke when a source distribution build is kicked off.
fn on_build_start(&self, dist: &SourceDist) -> usize;
fn on_build_start(&self, source: BuildableSource) -> usize;
/// Callback to invoke when a source distribution build is complete.
fn on_build_complete(&self, dist: &SourceDist, id: usize);
fn on_build_complete(&self, source: BuildableSource, id: usize);
/// Callback to invoke when a repository checkout begins.
fn on_checkout_start(&self, url: &Url, rev: &str) -> usize;
@ -33,12 +33,12 @@ pub(crate) struct Facade {
}
impl uv_distribution::Reporter for Facade {
fn on_build_start(&self, dist: &SourceDist) -> usize {
self.reporter.on_build_start(dist)
fn on_build_start(&self, source: BuildableSource) -> usize {
self.reporter.on_build_start(source)
}
fn on_build_complete(&self, dist: &SourceDist, id: usize) {
self.reporter.on_build_complete(dist, id);
fn on_build_complete(&self, source: BuildableSource, id: usize) {
self.reporter.on_build_complete(source, id);
}
fn on_checkout_start(&self, url: &Url, rev: &str) -> usize {

View file

@ -6,7 +6,8 @@ use owo_colors::OwoColorize;
use url::Url;
use distribution_types::{
CachedDist, Dist, DistributionMetadata, LocalEditable, Name, SourceDist, VersionOrUrl,
BuildableSource, CachedDist, Dist, DistributionMetadata, LocalEditable, Name, SourceDist,
VersionOrUrl,
};
use uv_normalize::PackageName;
@ -114,12 +115,12 @@ impl uv_installer::DownloadReporter for DownloadReporter {
self.progress.finish_and_clear();
}
fn on_build_start(&self, dist: &SourceDist) -> usize {
self.on_any_build_start(&dist.to_color_string())
fn on_build_start(&self, source: BuildableSource) -> usize {
self.on_any_build_start(&source.to_color_string())
}
fn on_build_complete(&self, dist: &SourceDist, index: usize) {
self.on_any_build_complete(&dist.to_color_string(), index);
fn on_build_complete(&self, source: BuildableSource, index: usize) {
self.on_any_build_complete(&source.to_color_string(), index);
}
fn on_editable_build_start(&self, dist: &LocalEditable) -> usize {
@ -243,7 +244,7 @@ impl uv_resolver::ResolverReporter for ResolverReporter {
self.progress.finish_and_clear();
}
fn on_build_start(&self, dist: &SourceDist) -> usize {
fn on_build_start(&self, source: BuildableSource) -> usize {
let progress = self.multi_progress.insert_before(
&self.progress,
ProgressBar::with_draw_target(None, self.printer.target()),
@ -253,7 +254,7 @@ impl uv_resolver::ResolverReporter for ResolverReporter {
progress.set_message(format!(
"{} {}",
"Building".bold().cyan(),
dist.to_color_string(),
source.to_color_string(),
));
let mut bars = self.bars.lock().unwrap();
@ -261,13 +262,13 @@ impl uv_resolver::ResolverReporter for ResolverReporter {
bars.len() - 1
}
fn on_build_complete(&self, dist: &SourceDist, index: usize) {
fn on_build_complete(&self, source: BuildableSource, index: usize) {
let bars = self.bars.lock().unwrap();
let progress = &bars[index];
progress.finish_with_message(format!(
" {} {}",
"Built".bold().green(),
dist.to_color_string(),
source.to_color_string(),
));
}
@ -316,6 +317,15 @@ impl ColorDisplay for SourceDist {
}
}
impl ColorDisplay for BuildableSource<'_> {
fn to_color_string(&self) -> String {
match self {
BuildableSource::Dist(dist) => dist.to_color_string(),
BuildableSource::Url(url) => url.to_string(),
}
}
}
impl ColorDisplay for LocalEditable {
fn to_color_string(&self) -> String {
format!("{}", self.to_string().dimmed())