Add zstandard support for wheels (#15645)

## Summary

This PR allows pyx to send down hashes for zstandard-compressed
tarballs. If the hash is present, then the file is assumed to be present
at `${wheel_url}.tar.zst`, similar in design to PEP 658
`${wheel_metadata}.metadata` files. The intent here is that the index
must include the wheel (to support all clients and support
random-access), but can optionally include a zstandard-compressed
version alongside it.
This commit is contained in:
Charlie Marsh 2025-09-02 21:38:31 -04:00 committed by GitHub
parent 7606f1ad3c
commit 4e48d759c4
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
13 changed files with 279 additions and 29 deletions

2
Cargo.lock generated
View file

@ -5675,6 +5675,7 @@ dependencies = [
"reqwest", "reqwest",
"rustc-hash", "rustc-hash",
"sha2", "sha2",
"tar",
"thiserror 2.0.16", "thiserror 2.0.16",
"tokio", "tokio",
"tokio-util", "tokio-util",
@ -5685,6 +5686,7 @@ dependencies = [
"uv-static", "uv-static",
"xz2", "xz2",
"zip", "zip",
"zstd",
] ]
[[package]] [[package]]

View file

@ -201,6 +201,7 @@ windows-sys = { version = "0.59.0", features = ["Win32_Foundation", "Win32_Secur
wiremock = { version = "0.6.4" } wiremock = { version = "0.6.4" }
xz2 = { version = "0.1.7" } xz2 = { version = "0.1.7" }
zip = { version = "2.2.3", default-features = false, features = ["deflate", "zstd", "bzip2", "lzma", "xz"] } zip = { version = "2.2.3", default-features = false, features = ["deflate", "zstd", "bzip2", "lzma", "xz"] }
zstd = { version = "0.13.3" }
# dev-dependencies # dev-dependencies
assert_cmd = { version = "2.0.16" } assert_cmd = { version = "2.0.16" }

View file

@ -305,6 +305,7 @@ impl<'a> FlatIndexClient<'a> {
upload_time_utc_ms: None, upload_time_utc_ms: None,
url: FileLocation::AbsoluteUrl(UrlString::from(url)), url: FileLocation::AbsoluteUrl(UrlString::from(url)),
yanked: None, yanked: None,
zstd: None,
}; };
let Some(filename) = DistFilename::try_from_normalized_filename(filename) else { let Some(filename) = DistFilename::try_from_normalized_filename(filename) else {

View file

@ -40,6 +40,7 @@ pub struct File {
pub upload_time_utc_ms: Option<i64>, pub upload_time_utc_ms: Option<i64>,
pub url: FileLocation, pub url: FileLocation,
pub yanked: Option<Box<Yanked>>, pub yanked: Option<Box<Yanked>>,
pub zstd: Option<Box<Zstd>>,
} }
impl File { impl File {
@ -63,6 +64,7 @@ impl File {
upload_time_utc_ms: file.upload_time.map(Timestamp::as_millisecond), upload_time_utc_ms: file.upload_time.map(Timestamp::as_millisecond),
url: FileLocation::new(file.url, base), url: FileLocation::new(file.url, base),
yanked: file.yanked, yanked: file.yanked,
zstd: None,
}) })
} }
@ -108,6 +110,13 @@ impl File {
upload_time_utc_ms: file.upload_time.map(Timestamp::as_millisecond), upload_time_utc_ms: file.upload_time.map(Timestamp::as_millisecond),
url: FileLocation::new(file.url, base), url: FileLocation::new(file.url, base),
yanked: file.yanked, yanked: file.yanked,
zstd: file
.zstd
.map(|zstd| Zstd {
hashes: HashDigests::from(zstd.hashes),
size: zstd.size,
})
.map(Box::new),
}) })
} }
} }
@ -289,6 +298,12 @@ pub enum ToUrlError {
}, },
} }
#[derive(Debug, Clone, PartialEq, Eq, Hash, rkyv::Archive, rkyv::Deserialize, rkyv::Serialize)]
pub struct Zstd {
pub hashes: HashDigests,
pub size: Option<u64>,
}
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*; use super::*;

View file

@ -20,8 +20,8 @@ use uv_client::{
}; };
use uv_distribution_filename::WheelFilename; use uv_distribution_filename::WheelFilename;
use uv_distribution_types::{ use uv_distribution_types::{
BuildInfo, BuildableSource, BuiltDist, Dist, HashPolicy, Hashed, IndexUrl, InstalledDist, Name, BuildInfo, BuildableSource, BuiltDist, Dist, File, HashPolicy, Hashed, IndexUrl, InstalledDist,
SourceDist, Name, SourceDist, ToUrlError,
}; };
use uv_extract::hash::Hasher; use uv_extract::hash::Hasher;
use uv_fs::write_atomic; use uv_fs::write_atomic;
@ -179,7 +179,11 @@ impl<'a, Context: BuildContext> DistributionDatabase<'a, Context> {
match dist { match dist {
BuiltDist::Registry(wheels) => { BuiltDist::Registry(wheels) => {
let wheel = wheels.best_wheel(); let wheel = wheels.best_wheel();
let url = wheel.file.url.to_url()?; let WheelTarget {
url,
extension,
size,
} = WheelTarget::try_from(&*wheel.file)?;
// Create a cache entry for the wheel. // Create a cache entry for the wheel.
let wheel_entry = self.build_context.cache().entry( let wheel_entry = self.build_context.cache().entry(
@ -194,7 +198,14 @@ impl<'a, Context: BuildContext> DistributionDatabase<'a, Context> {
.to_file_path() .to_file_path()
.map_err(|()| Error::NonFileUrl(url.clone()))?; .map_err(|()| Error::NonFileUrl(url.clone()))?;
return self return self
.load_wheel(&path, &wheel.filename, wheel_entry, dist, hashes) .load_wheel(
&path,
&wheel.filename,
WheelExtension::Whl,
wheel_entry,
dist,
hashes,
)
.await; .await;
} }
@ -204,7 +215,8 @@ impl<'a, Context: BuildContext> DistributionDatabase<'a, Context> {
url.clone(), url.clone(),
dist.index(), dist.index(),
&wheel.filename, &wheel.filename,
wheel.file.size, extension,
size,
&wheel_entry, &wheel_entry,
dist, dist,
hashes, hashes,
@ -241,7 +253,8 @@ impl<'a, Context: BuildContext> DistributionDatabase<'a, Context> {
url, url,
dist.index(), dist.index(),
&wheel.filename, &wheel.filename,
wheel.file.size, extension,
size,
&wheel_entry, &wheel_entry,
dist, dist,
hashes, hashes,
@ -279,6 +292,7 @@ impl<'a, Context: BuildContext> DistributionDatabase<'a, Context> {
wheel.url.raw().clone(), wheel.url.raw().clone(),
None, None,
&wheel.filename, &wheel.filename,
WheelExtension::Whl,
None, None,
&wheel_entry, &wheel_entry,
dist, dist,
@ -310,6 +324,7 @@ impl<'a, Context: BuildContext> DistributionDatabase<'a, Context> {
wheel.url.raw().clone(), wheel.url.raw().clone(),
None, None,
&wheel.filename, &wheel.filename,
WheelExtension::Whl,
None, None,
&wheel_entry, &wheel_entry,
dist, dist,
@ -343,6 +358,7 @@ impl<'a, Context: BuildContext> DistributionDatabase<'a, Context> {
self.load_wheel( self.load_wheel(
&wheel.install_path, &wheel.install_path,
&wheel.filename, &wheel.filename,
WheelExtension::Whl,
cache_entry, cache_entry,
dist, dist,
hashes, hashes,
@ -547,6 +563,7 @@ impl<'a, Context: BuildContext> DistributionDatabase<'a, Context> {
url: DisplaySafeUrl, url: DisplaySafeUrl,
index: Option<&IndexUrl>, index: Option<&IndexUrl>,
filename: &WheelFilename, filename: &WheelFilename,
extension: WheelExtension,
size: Option<u64>, size: Option<u64>,
wheel_entry: &CacheEntry, wheel_entry: &CacheEntry,
dist: &BuiltDist, dist: &BuiltDist,
@ -588,15 +605,31 @@ impl<'a, Context: BuildContext> DistributionDatabase<'a, Context> {
match progress { match progress {
Some((reporter, progress)) => { Some((reporter, progress)) => {
let mut reader = ProgressReader::new(&mut hasher, progress, &**reporter); let mut reader = ProgressReader::new(&mut hasher, progress, &**reporter);
uv_extract::stream::unzip(&mut reader, temp_dir.path()) match extension {
.await WheelExtension::Whl => {
.map_err(|err| Error::Extract(filename.to_string(), err))?; uv_extract::stream::unzip(&mut reader, temp_dir.path())
} .await
None => { .map_err(|err| Error::Extract(filename.to_string(), err))?;
uv_extract::stream::unzip(&mut hasher, temp_dir.path()) }
.await WheelExtension::WhlZst => {
.map_err(|err| Error::Extract(filename.to_string(), err))?; uv_extract::stream::untar_zst(&mut reader, temp_dir.path())
.await
.map_err(|err| Error::Extract(filename.to_string(), err))?;
}
}
} }
None => match extension {
WheelExtension::Whl => {
uv_extract::stream::unzip(&mut hasher, temp_dir.path())
.await
.map_err(|err| Error::Extract(filename.to_string(), err))?;
}
WheelExtension::WhlZst => {
uv_extract::stream::untar_zst(&mut hasher, temp_dir.path())
.await
.map_err(|err| Error::Extract(filename.to_string(), err))?;
}
},
} }
// If necessary, exhaust the reader to compute the hash. // If necessary, exhaust the reader to compute the hash.
@ -701,6 +734,7 @@ impl<'a, Context: BuildContext> DistributionDatabase<'a, Context> {
url: DisplaySafeUrl, url: DisplaySafeUrl,
index: Option<&IndexUrl>, index: Option<&IndexUrl>,
filename: &WheelFilename, filename: &WheelFilename,
extension: WheelExtension,
size: Option<u64>, size: Option<u64>,
wheel_entry: &CacheEntry, wheel_entry: &CacheEntry,
dist: &BuiltDist, dist: &BuiltDist,
@ -772,7 +806,14 @@ impl<'a, Context: BuildContext> DistributionDatabase<'a, Context> {
let target = temp_dir.path().to_owned(); let target = temp_dir.path().to_owned();
move || -> Result<(), uv_extract::Error> { move || -> Result<(), uv_extract::Error> {
// Unzip the wheel into a temporary directory. // Unzip the wheel into a temporary directory.
uv_extract::unzip(file, &target)?; match extension {
WheelExtension::Whl => {
uv_extract::unzip(file, &target)?;
}
WheelExtension::WhlZst => {
uv_extract::stream::untar_zst_file(file, &target)?;
}
}
Ok(()) Ok(())
} }
}) })
@ -785,9 +826,19 @@ impl<'a, Context: BuildContext> DistributionDatabase<'a, Context> {
let algorithms = hashes.algorithms(); let algorithms = hashes.algorithms();
let mut hashers = algorithms.into_iter().map(Hasher::from).collect::<Vec<_>>(); let mut hashers = algorithms.into_iter().map(Hasher::from).collect::<Vec<_>>();
let mut hasher = uv_extract::hash::HashReader::new(file, &mut hashers); let mut hasher = uv_extract::hash::HashReader::new(file, &mut hashers);
uv_extract::stream::unzip(&mut hasher, temp_dir.path())
.await match extension {
.map_err(|err| Error::Extract(filename.to_string(), err))?; WheelExtension::Whl => {
uv_extract::stream::unzip(&mut hasher, temp_dir.path())
.await
.map_err(|err| Error::Extract(filename.to_string(), err))?;
}
WheelExtension::WhlZst => {
uv_extract::stream::untar_zst(&mut hasher, temp_dir.path())
.await
.map_err(|err| Error::Extract(filename.to_string(), err))?;
}
}
// If necessary, exhaust the reader to compute the hash. // If necessary, exhaust the reader to compute the hash.
hasher.finish().await.map_err(Error::HashExhaustion)?; hasher.finish().await.map_err(Error::HashExhaustion)?;
@ -887,6 +938,7 @@ impl<'a, Context: BuildContext> DistributionDatabase<'a, Context> {
&self, &self,
path: &Path, path: &Path,
filename: &WheelFilename, filename: &WheelFilename,
extension: WheelExtension,
wheel_entry: CacheEntry, wheel_entry: CacheEntry,
dist: &BuiltDist, dist: &BuiltDist,
hashes: HashPolicy<'_>, hashes: HashPolicy<'_>,
@ -965,9 +1017,18 @@ impl<'a, Context: BuildContext> DistributionDatabase<'a, Context> {
let mut hasher = uv_extract::hash::HashReader::new(file, &mut hashers); let mut hasher = uv_extract::hash::HashReader::new(file, &mut hashers);
// Unzip the wheel to a temporary directory. // Unzip the wheel to a temporary directory.
uv_extract::stream::unzip(&mut hasher, temp_dir.path()) match extension {
.await WheelExtension::Whl => {
.map_err(|err| Error::Extract(filename.to_string(), err))?; uv_extract::stream::unzip(&mut hasher, temp_dir.path())
.await
.map_err(|err| Error::Extract(filename.to_string(), err))?;
}
WheelExtension::WhlZst => {
uv_extract::stream::untar_zst(&mut hasher, temp_dir.path())
.await
.map_err(|err| Error::Extract(filename.to_string(), err))?;
}
}
// Exhaust the reader to compute the hash. // Exhaust the reader to compute the hash.
hasher.finish().await.map_err(Error::HashExhaustion)?; hasher.finish().await.map_err(Error::HashExhaustion)?;
@ -1227,3 +1288,90 @@ impl LocalArchivePointer {
None None
} }
} }
#[derive(Debug, Clone)]
struct WheelTarget {
/// The URL from which the wheel can be downloaded.
url: DisplaySafeUrl,
/// The expected extension of the wheel file.
extension: WheelExtension,
/// The expected size of the wheel file, if known.
size: Option<u64>,
}
impl TryFrom<&File> for WheelTarget {
type Error = ToUrlError;
/// Determine the [`WheelTarget`] from a [`File`].
fn try_from(file: &File) -> Result<Self, Self::Error> {
let url = file.url.to_url()?;
if let Some(zstd) = file.zstd.as_ref() {
Ok(Self {
url: add_tar_zst_extension(url),
extension: WheelExtension::WhlZst,
size: zstd.size,
})
} else {
Ok(Self {
url,
extension: WheelExtension::Whl,
size: file.size,
})
}
}
}
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
enum WheelExtension {
/// A `.whl` file.
Whl,
/// A `.whl.tar.zst` file.
WhlZst,
}
/// Add `.tar.zst` to the end of the URL path, if it doesn't already exist.
#[must_use]
fn add_tar_zst_extension(mut url: DisplaySafeUrl) -> DisplaySafeUrl {
let mut path = url.path().to_string();
if !path.ends_with(".tar.zst") {
path.push_str(".tar.zst");
}
url.set_path(&path);
url
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_add_tar_zst_extension() {
let url =
DisplaySafeUrl::parse("https://files.pythonhosted.org/flask-3.1.0-py3-none-any.whl")
.unwrap();
assert_eq!(
add_tar_zst_extension(url).as_str(),
"https://files.pythonhosted.org/flask-3.1.0-py3-none-any.whl.tar.zst"
);
let url = DisplaySafeUrl::parse(
"https://files.pythonhosted.org/flask-3.1.0-py3-none-any.whl.tar.zst",
)
.unwrap();
assert_eq!(
add_tar_zst_extension(url).as_str(),
"https://files.pythonhosted.org/flask-3.1.0-py3-none-any.whl.tar.zst"
);
let url = DisplaySafeUrl::parse(
"https://files.pythonhosted.org/flask-3.1.0%2Bcu124-py3-none-any.whl",
)
.unwrap();
assert_eq!(
add_tar_zst_extension(url).as_str(),
"https://files.pythonhosted.org/flask-3.1.0%2Bcu124-py3-none-any.whl.tar.zst"
);
}
}

View file

@ -32,12 +32,14 @@ rayon = { workspace = true }
reqwest = { workspace = true } reqwest = { workspace = true }
rustc-hash = { workspace = true } rustc-hash = { workspace = true }
sha2 = { workspace = true } sha2 = { workspace = true }
tar = { workspace = true }
thiserror = { workspace = true } thiserror = { workspace = true }
tokio = { workspace = true } tokio = { workspace = true }
tokio-util = { workspace = true, features = ["compat"] } tokio-util = { workspace = true, features = ["compat"] }
tracing = { workspace = true } tracing = { workspace = true }
xz2 = { workspace = true } xz2 = { workspace = true }
zip = { workspace = true } zip = { workspace = true }
zstd = { workspace = true }
[features] [features]
default = [] default = []

View file

@ -686,6 +686,16 @@ pub async fn untar_zst<R: tokio::io::AsyncRead + Unpin>(
.map_err(Error::io_or_compression) .map_err(Error::io_or_compression)
} }
/// Unpack a `.tar.zst` archive from a file on disk into the target directory.
pub fn untar_zst_file<R: std::io::Read>(reader: R, target: impl AsRef<Path>) -> Result<(), Error> {
let reader = std::io::BufReader::with_capacity(DEFAULT_BUF_SIZE, reader);
let decompressed = zstd::Decoder::new(reader).map_err(Error::Io)?;
let mut archive = tar::Archive::new(decompressed);
archive.set_preserve_mtime(false);
archive.unpack(target).map_err(Error::io_or_compression)?;
Ok(())
}
/// Unpack a `.tar.xz` archive into the target directory, without requiring `Seek`. /// Unpack a `.tar.xz` archive into the target directory, without requiring `Seek`.
/// ///
/// This is useful for unpacking files as they're being downloaded. /// This is useful for unpacking files as they're being downloaded.

View file

@ -150,6 +150,7 @@ pub struct PyxFile {
pub upload_time: Option<Timestamp>, pub upload_time: Option<Timestamp>,
pub url: SmallString, pub url: SmallString,
pub yanked: Option<Box<Yanked>>, pub yanked: Option<Box<Yanked>>,
pub zstd: Option<Zstd>,
} }
impl<'de> Deserialize<'de> for PyxFile { impl<'de> Deserialize<'de> for PyxFile {
@ -178,6 +179,7 @@ impl<'de> Deserialize<'de> for PyxFile {
let mut upload_time = None; let mut upload_time = None;
let mut url = None; let mut url = None;
let mut yanked = None; let mut yanked = None;
let mut zstd = None;
while let Some(key) = access.next_key::<String>()? { while let Some(key) = access.next_key::<String>()? {
match key.as_str() { match key.as_str() {
@ -201,6 +203,9 @@ impl<'de> Deserialize<'de> for PyxFile {
"upload-time" => upload_time = Some(access.next_value()?), "upload-time" => upload_time = Some(access.next_value()?),
"url" => url = Some(access.next_value()?), "url" => url = Some(access.next_value()?),
"yanked" => yanked = Some(access.next_value()?), "yanked" => yanked = Some(access.next_value()?),
"zstd" => {
zstd = Some(access.next_value()?);
}
_ => { _ => {
let _: serde::de::IgnoredAny = access.next_value()?; let _: serde::de::IgnoredAny = access.next_value()?;
} }
@ -216,6 +221,7 @@ impl<'de> Deserialize<'de> for PyxFile {
upload_time, upload_time,
url: url.ok_or_else(|| serde::de::Error::missing_field("url"))?, url: url.ok_or_else(|| serde::de::Error::missing_field("url"))?,
yanked, yanked,
zstd,
}) })
} }
} }
@ -320,6 +326,13 @@ impl Default for Yanked {
} }
} }
#[derive(Debug, Clone, Eq, PartialEq, Default, Deserialize, Serialize)]
pub struct Zstd {
pub hashes: Hashes,
#[serde(skip_serializing_if = "Option::is_none")]
pub size: Option<u64>,
}
/// A dictionary mapping a hash name to a hex encoded digest of the file. /// A dictionary mapping a hash name to a hex encoded digest of the file.
/// ///
/// PEP 691 says multiple hashes can be included and the interpretation is left to the client. /// PEP 691 says multiple hashes can be included and the interpretation is left to the client.

View file

@ -1369,6 +1369,7 @@ impl PylockTomlWheel {
upload_time_utc_ms: self.upload_time.map(Timestamp::as_millisecond), upload_time_utc_ms: self.upload_time.map(Timestamp::as_millisecond),
url: FileLocation::AbsoluteUrl(file_url), url: FileLocation::AbsoluteUrl(file_url),
yanked: None, yanked: None,
zstd: None,
}); });
Ok(RegistryBuiltWheel { Ok(RegistryBuiltWheel {
@ -1525,6 +1526,7 @@ impl PylockTomlSdist {
upload_time_utc_ms: self.upload_time.map(Timestamp::as_millisecond), upload_time_utc_ms: self.upload_time.map(Timestamp::as_millisecond),
url: FileLocation::AbsoluteUrl(file_url), url: FileLocation::AbsoluteUrl(file_url),
yanked: None, yanked: None,
zstd: None,
}); });
Ok(RegistrySourceDist { Ok(RegistrySourceDist {

View file

@ -2754,6 +2754,7 @@ impl Package {
upload_time_utc_ms: sdist.upload_time().map(Timestamp::as_millisecond), upload_time_utc_ms: sdist.upload_time().map(Timestamp::as_millisecond),
url: FileLocation::AbsoluteUrl(file_url.clone()), url: FileLocation::AbsoluteUrl(file_url.clone()),
yanked: None, yanked: None,
zstd: None,
}); });
let index = IndexUrl::from(VerbatimUrl::from_url( let index = IndexUrl::from(VerbatimUrl::from_url(
@ -2828,6 +2829,7 @@ impl Package {
upload_time_utc_ms: sdist.upload_time().map(Timestamp::as_millisecond), upload_time_utc_ms: sdist.upload_time().map(Timestamp::as_millisecond),
url: file_url, url: file_url,
yanked: None, yanked: None,
zstd: None,
}); });
let index = IndexUrl::from( let index = IndexUrl::from(
@ -3076,6 +3078,9 @@ impl Package {
} }
for wheel in &self.wheels { for wheel in &self.wheels {
hashes.extend(wheel.hash.as_ref().map(|h| h.0.clone())); hashes.extend(wheel.hash.as_ref().map(|h| h.0.clone()));
if let Some(zstd) = wheel.zstd.as_ref() {
hashes.extend(zstd.hash.as_ref().map(|h| h.0.clone()));
}
} }
HashDigests::from(hashes) HashDigests::from(hashes)
} }
@ -3648,6 +3653,14 @@ impl Source {
} }
table.insert("source", value(source_table)); table.insert("source", value(source_table));
} }
/// Check if a package is local by examining its source.
pub(crate) fn is_local(&self) -> bool {
matches!(
self,
Self::Path(_) | Self::Directory(_) | Self::Editable(_) | Self::Virtual(_)
)
}
} }
impl Display for Source { impl Display for Source {
@ -3696,14 +3709,6 @@ impl Source {
} }
} }
} }
/// Check if a package is local by examining its source.
pub(crate) fn is_local(&self) -> bool {
matches!(
self,
Self::Path(_) | Self::Directory(_) | Self::Editable(_) | Self::Virtual(_)
)
}
} }
#[derive(Clone, Debug, serde::Deserialize)] #[derive(Clone, Debug, serde::Deserialize)]
@ -4315,6 +4320,12 @@ fn locked_git_url(git_dist: &GitSourceDist) -> DisplaySafeUrl {
url url
} }
#[derive(Clone, Debug, serde::Deserialize, PartialEq, Eq)]
struct ZstdWheel {
hash: Option<Hash>,
size: Option<u64>,
}
/// Inspired by: <https://discuss.python.org/t/lock-files-again-but-this-time-w-sdists/46593> /// Inspired by: <https://discuss.python.org/t/lock-files-again-but-this-time-w-sdists/46593>
#[derive(Clone, Debug, serde::Deserialize, PartialEq, Eq)] #[derive(Clone, Debug, serde::Deserialize, PartialEq, Eq)]
#[serde(try_from = "WheelWire")] #[serde(try_from = "WheelWire")]
@ -4345,6 +4356,8 @@ struct Wheel {
/// deserialization time. Not being able to extract a wheel filename from a /// deserialization time. Not being able to extract a wheel filename from a
/// wheel URL is thus a deserialization error. /// wheel URL is thus a deserialization error.
filename: WheelFilename, filename: WheelFilename,
/// The zstandard-compressed wheel metadata, if any.
zstd: Option<ZstdWheel>,
} }
impl Wheel { impl Wheel {
@ -4453,12 +4466,17 @@ impl Wheel {
.map(Timestamp::from_millisecond) .map(Timestamp::from_millisecond)
.transpose() .transpose()
.map_err(LockErrorKind::InvalidTimestamp)?; .map_err(LockErrorKind::InvalidTimestamp)?;
let zstd = wheel.file.zstd.as_ref().map(|zstd| ZstdWheel {
hash: zstd.hashes.iter().max().cloned().map(Hash::from),
size: zstd.size,
});
Ok(Self { Ok(Self {
url, url,
hash, hash,
size, size,
upload_time, upload_time,
filename, filename,
zstd,
}) })
} }
@ -4471,6 +4489,7 @@ impl Wheel {
size: None, size: None,
upload_time: None, upload_time: None,
filename: direct_dist.filename.clone(), filename: direct_dist.filename.clone(),
zstd: None,
} }
} }
@ -4483,6 +4502,7 @@ impl Wheel {
size: None, size: None,
upload_time: None, upload_time: None,
filename: path_dist.filename.clone(), filename: path_dist.filename.clone(),
zstd: None,
} }
} }
@ -4516,6 +4536,14 @@ impl Wheel {
upload_time_utc_ms: self.upload_time.map(Timestamp::as_millisecond), upload_time_utc_ms: self.upload_time.map(Timestamp::as_millisecond),
url: file_location, url: file_location,
yanked: None, yanked: None,
zstd: self
.zstd
.as_ref()
.map(|zstd| uv_distribution_types::Zstd {
hashes: zstd.hash.iter().map(|h| h.0.clone()).collect(),
size: zstd.size,
})
.map(Box::new),
}); });
let index = IndexUrl::from(VerbatimUrl::from_url( let index = IndexUrl::from(VerbatimUrl::from_url(
url.to_url().map_err(LockErrorKind::InvalidUrl)?, url.to_url().map_err(LockErrorKind::InvalidUrl)?,
@ -4558,6 +4586,14 @@ impl Wheel {
upload_time_utc_ms: self.upload_time.map(Timestamp::as_millisecond), upload_time_utc_ms: self.upload_time.map(Timestamp::as_millisecond),
url: file_location, url: file_location,
yanked: None, yanked: None,
zstd: self
.zstd
.as_ref()
.map(|zstd| uv_distribution_types::Zstd {
hashes: zstd.hash.iter().map(|h| h.0.clone()).collect(),
size: zstd.size,
})
.map(Box::new),
}); });
let index = IndexUrl::from( let index = IndexUrl::from(
VerbatimUrl::from_absolute_path(root.join(index_path)) VerbatimUrl::from_absolute_path(root.join(index_path))
@ -4593,6 +4629,9 @@ struct WheelWire {
/// This is only present for wheels that come from registries. /// This is only present for wheels that come from registries.
#[serde(alias = "upload_time")] #[serde(alias = "upload_time")]
upload_time: Option<Timestamp>, upload_time: Option<Timestamp>,
/// The zstandard-compressed wheel metadata, if any.
#[serde(alias = "zstd")]
zstd: Option<ZstdWheel>,
} }
#[derive(Clone, Debug, serde::Deserialize, PartialEq, Eq)] #[derive(Clone, Debug, serde::Deserialize, PartialEq, Eq)]
@ -4648,6 +4687,19 @@ impl Wheel {
if let Some(upload_time) = self.upload_time { if let Some(upload_time) = self.upload_time {
table.insert("upload-time", Value::from(upload_time.to_string())); table.insert("upload-time", Value::from(upload_time.to_string()));
} }
if let Some(zstd) = &self.zstd {
let mut inner = InlineTable::new();
if let Some(ref hash) = zstd.hash {
inner.insert("hash", Value::from(hash.to_string()));
}
if let Some(size) = zstd.size {
inner.insert(
"size",
toml_edit::ser::ValueSerializer::new().serialize_u64(size)?,
);
}
table.insert("zstd", Value::from(inner));
}
Ok(table) Ok(table)
} }
} }
@ -4682,6 +4734,7 @@ impl TryFrom<WheelWire> for Wheel {
hash: wire.hash, hash: wire.hash,
size: wire.size, size: wire.size,
upload_time: wire.upload_time, upload_time: wire.upload_time,
zstd: wire.zstd,
filename, filename,
}) })
} }

View file

@ -83,6 +83,7 @@ Ok(
}, },
}, },
}, },
zstd: None,
}, },
], ],
fork_markers: [], fork_markers: [],

View file

@ -90,6 +90,7 @@ Ok(
}, },
}, },
}, },
zstd: None,
}, },
], ],
fork_markers: [], fork_markers: [],

View file

@ -86,6 +86,7 @@ Ok(
}, },
}, },
}, },
zstd: None,
}, },
], ],
fork_markers: [], fork_markers: [],