mirror of
https://github.com/astral-sh/uv.git
synced 2025-11-03 05:03:46 +00:00
Support unnamed requirements in --require-hashes (#2993)
## Summary This PR enables `--require-hashes` with unnamed requirements. The key change is that `PackageId` becomes `VersionId` (since it refers to a package at a specific version), and the new `PackageId` consists of _either_ a package name _or_ a URL. The hashes are keyed by `PackageId`, so we can generate the `RequiredHashes` before we have names for all packages, and enforce them throughout. Closes #2979.
This commit is contained in:
parent
d56d142520
commit
96c3c2e774
25 changed files with 256 additions and 185 deletions
1
Cargo.lock
generated
1
Cargo.lock
generated
|
|
@ -4842,6 +4842,7 @@ dependencies = [
|
||||||
"serde",
|
"serde",
|
||||||
"serde_json",
|
"serde_json",
|
||||||
"thiserror",
|
"thiserror",
|
||||||
|
"url",
|
||||||
"uv-cache",
|
"uv-cache",
|
||||||
"uv-configuration",
|
"uv-configuration",
|
||||||
"uv-interpreter",
|
"uv-interpreter",
|
||||||
|
|
|
||||||
|
|
@ -1,32 +1,64 @@
|
||||||
use std::fmt::{Display, Formatter};
|
use std::fmt::{Display, Formatter};
|
||||||
|
|
||||||
|
use cache_key::CanonicalUrl;
|
||||||
use url::Url;
|
use url::Url;
|
||||||
|
|
||||||
use pep440_rs::Version;
|
use pep440_rs::Version;
|
||||||
use uv_normalize::PackageName;
|
use uv_normalize::PackageName;
|
||||||
|
|
||||||
/// A unique identifier for a package at a specific version (e.g., `black==23.10.0`).
|
/// A unique identifier for a package. A package can either be identified by a name (e.g., `black`)
|
||||||
|
/// or a URL (e.g., `git+https://github.com/psf/black`).
|
||||||
#[derive(Debug, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)]
|
#[derive(Debug, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)]
|
||||||
pub enum PackageId {
|
pub enum PackageId {
|
||||||
/// The identifier consists of a package name and version.
|
/// The identifier consists of a package name.
|
||||||
NameVersion(PackageName, Version),
|
Name(PackageName),
|
||||||
/// The identifier consists of a URL.
|
/// The identifier consists of a URL.
|
||||||
Url(String),
|
Url(String),
|
||||||
}
|
}
|
||||||
|
|
||||||
impl PackageId {
|
impl PackageId {
|
||||||
/// Create a new [`PackageId`] from a package name and version.
|
/// Create a new [`PackageId`] from a package name and version.
|
||||||
pub fn from_registry(name: PackageName, version: Version) -> Self {
|
pub fn from_registry(name: PackageName) -> Self {
|
||||||
Self::NameVersion(name, version)
|
Self::Name(name)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Create a new [`PackageId`] from a URL.
|
/// Create a new [`PackageId`] from a URL.
|
||||||
pub fn from_url(url: &Url) -> Self {
|
pub fn from_url(url: &Url) -> Self {
|
||||||
Self::Url(cache_key::digest(&cache_key::CanonicalUrl::new(url)))
|
Self::Url(cache_key::digest(&CanonicalUrl::new(url)))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Display for PackageId {
|
impl Display for PackageId {
|
||||||
|
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||||
|
match self {
|
||||||
|
Self::Name(name) => write!(f, "{name}"),
|
||||||
|
Self::Url(url) => write!(f, "{url}"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// A unique identifier for a package at a specific version (e.g., `black==23.10.0`).
|
||||||
|
#[derive(Debug, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)]
|
||||||
|
pub enum VersionId {
|
||||||
|
/// The identifier consists of a package name and version.
|
||||||
|
NameVersion(PackageName, Version),
|
||||||
|
/// The identifier consists of a URL.
|
||||||
|
Url(String),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl VersionId {
|
||||||
|
/// Create a new [`VersionId`] from a package name and version.
|
||||||
|
pub fn from_registry(name: PackageName, version: Version) -> Self {
|
||||||
|
Self::NameVersion(name, version)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Create a new [`VersionId`] from a URL.
|
||||||
|
pub fn from_url(url: &Url) -> Self {
|
||||||
|
Self::Url(cache_key::digest(&CanonicalUrl::new(url)))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Display for VersionId {
|
||||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||||
match self {
|
match self {
|
||||||
Self::NameVersion(name, version) => write!(f, "{name}-{version}"),
|
Self::NameVersion(name, version) => write!(f, "{name}-{version}"),
|
||||||
|
|
@ -73,7 +105,7 @@ impl ResourceId {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<&Self> for PackageId {
|
impl From<&Self> for VersionId {
|
||||||
/// Required for `WaitMap::wait`.
|
/// Required for `WaitMap::wait`.
|
||||||
fn from(value: &Self) -> Self {
|
fn from(value: &Self) -> Self {
|
||||||
value.clone()
|
value.clone()
|
||||||
|
|
|
||||||
|
|
@ -10,7 +10,8 @@ use crate::{
|
||||||
BuiltDist, CachedDirectUrlDist, CachedDist, CachedRegistryDist, DirectUrlBuiltDist,
|
BuiltDist, CachedDirectUrlDist, CachedDist, CachedRegistryDist, DirectUrlBuiltDist,
|
||||||
DirectUrlSourceDist, Dist, DistributionId, GitSourceDist, InstalledDirectUrlDist,
|
DirectUrlSourceDist, Dist, DistributionId, GitSourceDist, InstalledDirectUrlDist,
|
||||||
InstalledDist, InstalledRegistryDist, InstalledVersion, LocalDist, PackageId, PathBuiltDist,
|
InstalledDist, InstalledRegistryDist, InstalledVersion, LocalDist, PackageId, PathBuiltDist,
|
||||||
PathSourceDist, RegistryBuiltDist, RegistrySourceDist, ResourceId, SourceDist, VersionOrUrl,
|
PathSourceDist, RegistryBuiltDist, RegistrySourceDist, ResourceId, SourceDist, VersionId,
|
||||||
|
VersionOrUrl,
|
||||||
};
|
};
|
||||||
|
|
||||||
pub trait Name {
|
pub trait Name {
|
||||||
|
|
@ -25,16 +26,29 @@ pub trait DistributionMetadata: Name {
|
||||||
/// for URL-based distributions.
|
/// for URL-based distributions.
|
||||||
fn version_or_url(&self) -> VersionOrUrl;
|
fn version_or_url(&self) -> VersionOrUrl;
|
||||||
|
|
||||||
/// Returns a unique identifier for the package (e.g., `black==23.10.0`).
|
/// Returns a unique identifier for the package at the given version (e.g., `black==23.10.0`).
|
||||||
///
|
///
|
||||||
/// Note that this is not equivalent to a unique identifier for the _distribution_, as multiple
|
/// Note that this is not equivalent to a unique identifier for the _distribution_, as multiple
|
||||||
/// registry-based distributions (e.g., different wheels for the same package and version)
|
/// registry-based distributions (e.g., different wheels for the same package and version)
|
||||||
/// will return the same package ID, but different distribution IDs.
|
/// will return the same version ID, but different distribution IDs.
|
||||||
fn package_id(&self) -> PackageId {
|
fn version_id(&self) -> VersionId {
|
||||||
match self.version_or_url() {
|
match self.version_or_url() {
|
||||||
VersionOrUrl::Version(version) => {
|
VersionOrUrl::Version(version) => {
|
||||||
PackageId::from_registry(self.name().clone(), version.clone())
|
VersionId::from_registry(self.name().clone(), version.clone())
|
||||||
}
|
}
|
||||||
|
VersionOrUrl::Url(url) => VersionId::from_url(url),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns a unique identifier for a package. A package can either be identified by a name
|
||||||
|
/// (e.g., `black`) or a URL (e.g., `git+https://github.com/psf/black`).
|
||||||
|
///
|
||||||
|
/// Note that this is not equivalent to a unique identifier for the _distribution_, as multiple
|
||||||
|
/// registry-based distributions (e.g., different wheels for the same package and version)
|
||||||
|
/// will return the same version ID, but different distribution IDs.
|
||||||
|
fn package_id(&self) -> PackageId {
|
||||||
|
match self.version_or_url() {
|
||||||
|
VersionOrUrl::Version(_) => PackageId::from_registry(self.name().clone()),
|
||||||
VersionOrUrl::Url(url) => PackageId::from_url(url),
|
VersionOrUrl::Url(url) => PackageId::from_url(url),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -112,7 +112,7 @@ pub enum MissingLibrary {
|
||||||
#[derive(Debug, Error)]
|
#[derive(Debug, Error)]
|
||||||
pub struct MissingHeaderCause {
|
pub struct MissingHeaderCause {
|
||||||
missing_library: MissingLibrary,
|
missing_library: MissingLibrary,
|
||||||
package_id: String,
|
version_id: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Display for MissingHeaderCause {
|
impl Display for MissingHeaderCause {
|
||||||
|
|
@ -122,22 +122,22 @@ impl Display for MissingHeaderCause {
|
||||||
write!(
|
write!(
|
||||||
f,
|
f,
|
||||||
"This error likely indicates that you need to install a library that provides \"{}\" for {}",
|
"This error likely indicates that you need to install a library that provides \"{}\" for {}",
|
||||||
header, self.package_id
|
header, self.version_id
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
MissingLibrary::Linker(library) => {
|
MissingLibrary::Linker(library) => {
|
||||||
write!(
|
write!(
|
||||||
f,
|
f,
|
||||||
"This error likely indicates that you need to install the library that provides a shared library \
|
"This error likely indicates that you need to install the library that provides a shared library \
|
||||||
for {library} for {package_id} (e.g. lib{library}-dev)",
|
for {library} for {version_id} (e.g. lib{library}-dev)",
|
||||||
library = library, package_id = self.package_id
|
library = library, version_id = self.version_id
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
MissingLibrary::PythonPackage(package) => {
|
MissingLibrary::PythonPackage(package) => {
|
||||||
write!(
|
write!(
|
||||||
f,
|
f,
|
||||||
"This error likely indicates that you need to `uv pip install {package}` into the build environment for {package_id}",
|
"This error likely indicates that you need to `uv pip install {package}` into the build environment for {version_id}",
|
||||||
package = package, package_id = self.package_id
|
package = package, version_id = self.version_id
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -148,7 +148,7 @@ impl Error {
|
||||||
fn from_command_output(
|
fn from_command_output(
|
||||||
message: String,
|
message: String,
|
||||||
output: &Output,
|
output: &Output,
|
||||||
package_id: impl Into<String>,
|
version_id: impl Into<String>,
|
||||||
) -> Self {
|
) -> Self {
|
||||||
let stdout = String::from_utf8_lossy(&output.stdout).trim().to_string();
|
let stdout = String::from_utf8_lossy(&output.stdout).trim().to_string();
|
||||||
let stderr = String::from_utf8_lossy(&output.stderr).trim().to_string();
|
let stderr = String::from_utf8_lossy(&output.stderr).trim().to_string();
|
||||||
|
|
@ -178,7 +178,7 @@ impl Error {
|
||||||
stderr,
|
stderr,
|
||||||
missing_header_cause: MissingHeaderCause {
|
missing_header_cause: MissingHeaderCause {
|
||||||
missing_library,
|
missing_library,
|
||||||
package_id: package_id.into(),
|
version_id: version_id.into(),
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
@ -364,7 +364,7 @@ pub struct SourceBuild {
|
||||||
/// > it created.
|
/// > it created.
|
||||||
metadata_directory: Option<PathBuf>,
|
metadata_directory: Option<PathBuf>,
|
||||||
/// Package id such as `foo-1.2.3`, for error reporting
|
/// Package id such as `foo-1.2.3`, for error reporting
|
||||||
package_id: String,
|
version_id: String,
|
||||||
/// Whether we do a regular PEP 517 build or an PEP 660 editable build
|
/// Whether we do a regular PEP 517 build or an PEP 660 editable build
|
||||||
build_kind: BuildKind,
|
build_kind: BuildKind,
|
||||||
/// Modified PATH that contains the `venv_bin`, `user_path` and `system_path` variables in that order
|
/// Modified PATH that contains the `venv_bin`, `user_path` and `system_path` variables in that order
|
||||||
|
|
@ -385,7 +385,7 @@ impl SourceBuild {
|
||||||
interpreter: &Interpreter,
|
interpreter: &Interpreter,
|
||||||
build_context: &impl BuildContext,
|
build_context: &impl BuildContext,
|
||||||
source_build_context: SourceBuildContext,
|
source_build_context: SourceBuildContext,
|
||||||
package_id: String,
|
version_id: String,
|
||||||
setup_py: SetupPyStrategy,
|
setup_py: SetupPyStrategy,
|
||||||
config_settings: ConfigSettings,
|
config_settings: ConfigSettings,
|
||||||
build_isolation: BuildIsolation<'_>,
|
build_isolation: BuildIsolation<'_>,
|
||||||
|
|
@ -477,7 +477,7 @@ impl SourceBuild {
|
||||||
&venv,
|
&venv,
|
||||||
pep517_backend,
|
pep517_backend,
|
||||||
build_context,
|
build_context,
|
||||||
&package_id,
|
&version_id,
|
||||||
build_kind,
|
build_kind,
|
||||||
&config_settings,
|
&config_settings,
|
||||||
&environment_variables,
|
&environment_variables,
|
||||||
|
|
@ -497,7 +497,7 @@ impl SourceBuild {
|
||||||
build_kind,
|
build_kind,
|
||||||
config_settings,
|
config_settings,
|
||||||
metadata_directory: None,
|
metadata_directory: None,
|
||||||
package_id,
|
version_id,
|
||||||
environment_variables,
|
environment_variables,
|
||||||
modified_path,
|
modified_path,
|
||||||
})
|
})
|
||||||
|
|
@ -695,7 +695,7 @@ impl SourceBuild {
|
||||||
return Err(Error::from_command_output(
|
return Err(Error::from_command_output(
|
||||||
"Build backend failed to determine metadata through `prepare_metadata_for_build_wheel`".to_string(),
|
"Build backend failed to determine metadata through `prepare_metadata_for_build_wheel`".to_string(),
|
||||||
&output,
|
&output,
|
||||||
&self.package_id,
|
&self.version_id,
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -714,7 +714,7 @@ impl SourceBuild {
|
||||||
/// dir.
|
/// dir.
|
||||||
///
|
///
|
||||||
/// <https://packaging.python.org/en/latest/specifications/source-distribution-format/>
|
/// <https://packaging.python.org/en/latest/specifications/source-distribution-format/>
|
||||||
#[instrument(skip_all, fields(package_id = self.package_id))]
|
#[instrument(skip_all, fields(version_id = self.version_id))]
|
||||||
pub async fn build_wheel(&self, wheel_dir: &Path) -> Result<String, Error> {
|
pub async fn build_wheel(&self, wheel_dir: &Path) -> Result<String, Error> {
|
||||||
// The build scripts run with the extracted root as cwd, so they need the absolute path.
|
// The build scripts run with the extracted root as cwd, so they need the absolute path.
|
||||||
let wheel_dir = fs::canonicalize(wheel_dir)?;
|
let wheel_dir = fs::canonicalize(wheel_dir)?;
|
||||||
|
|
@ -750,7 +750,7 @@ impl SourceBuild {
|
||||||
return Err(Error::from_command_output(
|
return Err(Error::from_command_output(
|
||||||
"Failed building wheel through setup.py".to_string(),
|
"Failed building wheel through setup.py".to_string(),
|
||||||
&output,
|
&output,
|
||||||
&self.package_id,
|
&self.version_id,
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
let dist = fs::read_dir(self.source_tree.join("dist"))?;
|
let dist = fs::read_dir(self.source_tree.join("dist"))?;
|
||||||
|
|
@ -761,7 +761,7 @@ impl SourceBuild {
|
||||||
"Expected exactly wheel in `dist/` after invoking setup.py, found {dist_dir:?}"
|
"Expected exactly wheel in `dist/` after invoking setup.py, found {dist_dir:?}"
|
||||||
),
|
),
|
||||||
&output,
|
&output,
|
||||||
&self.package_id)
|
&self.version_id)
|
||||||
);
|
);
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
@ -831,7 +831,7 @@ impl SourceBuild {
|
||||||
self.build_kind
|
self.build_kind
|
||||||
),
|
),
|
||||||
&output,
|
&output,
|
||||||
&self.package_id,
|
&self.version_id,
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -843,7 +843,7 @@ impl SourceBuild {
|
||||||
self.build_kind
|
self.build_kind
|
||||||
),
|
),
|
||||||
&output,
|
&output,
|
||||||
&self.package_id,
|
&self.version_id,
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
Ok(distribution_filename)
|
Ok(distribution_filename)
|
||||||
|
|
@ -873,7 +873,7 @@ async fn create_pep517_build_environment(
|
||||||
venv: &PythonEnvironment,
|
venv: &PythonEnvironment,
|
||||||
pep517_backend: &Pep517Backend,
|
pep517_backend: &Pep517Backend,
|
||||||
build_context: &impl BuildContext,
|
build_context: &impl BuildContext,
|
||||||
package_id: &str,
|
version_id: &str,
|
||||||
build_kind: BuildKind,
|
build_kind: BuildKind,
|
||||||
config_settings: &ConfigSettings,
|
config_settings: &ConfigSettings,
|
||||||
environment_variables: &FxHashMap<OsString, OsString>,
|
environment_variables: &FxHashMap<OsString, OsString>,
|
||||||
|
|
@ -927,7 +927,7 @@ async fn create_pep517_build_environment(
|
||||||
return Err(Error::from_command_output(
|
return Err(Error::from_command_output(
|
||||||
format!("Build backend failed to determine extra requires with `build_{build_kind}()`"),
|
format!("Build backend failed to determine extra requires with `build_{build_kind}()`"),
|
||||||
&output,
|
&output,
|
||||||
package_id,
|
version_id,
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -938,7 +938,7 @@ async fn create_pep517_build_environment(
|
||||||
"Build backend failed to read extra requires from `get_requires_for_build_{build_kind}`: {err}"
|
"Build backend failed to read extra requires from `get_requires_for_build_{build_kind}`: {err}"
|
||||||
),
|
),
|
||||||
&output,
|
&output,
|
||||||
package_id,
|
version_id,
|
||||||
)
|
)
|
||||||
})?;
|
})?;
|
||||||
|
|
||||||
|
|
@ -949,7 +949,7 @@ async fn create_pep517_build_environment(
|
||||||
"Build backend failed to return extra requires with `get_requires_for_build_{build_kind}`: {err}"
|
"Build backend failed to return extra requires with `get_requires_for_build_{build_kind}`: {err}"
|
||||||
),
|
),
|
||||||
&output,
|
&output,
|
||||||
package_id,
|
version_id,
|
||||||
)
|
)
|
||||||
})?;
|
})?;
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -269,12 +269,12 @@ impl<'a> BuildContext for BuildDispatch<'a> {
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[instrument(skip_all, fields(package_id = package_id, subdirectory = ?subdirectory))]
|
#[instrument(skip_all, fields(version_id = version_id, subdirectory = ?subdirectory))]
|
||||||
async fn setup_build<'data>(
|
async fn setup_build<'data>(
|
||||||
&'data self,
|
&'data self,
|
||||||
source: &'data Path,
|
source: &'data Path,
|
||||||
subdirectory: Option<&'data Path>,
|
subdirectory: Option<&'data Path>,
|
||||||
package_id: &'data str,
|
version_id: &'data str,
|
||||||
dist: Option<&'data SourceDist>,
|
dist: Option<&'data SourceDist>,
|
||||||
build_kind: BuildKind,
|
build_kind: BuildKind,
|
||||||
) -> Result<SourceBuild> {
|
) -> Result<SourceBuild> {
|
||||||
|
|
@ -304,7 +304,7 @@ impl<'a> BuildContext for BuildDispatch<'a> {
|
||||||
self.interpreter,
|
self.interpreter,
|
||||||
self,
|
self,
|
||||||
self.source_build_context.clone(),
|
self.source_build_context.clone(),
|
||||||
package_id.to_string(),
|
version_id.to_string(),
|
||||||
self.setup_py,
|
self.setup_py,
|
||||||
self.config_settings.clone(),
|
self.config_settings.clone(),
|
||||||
self.build_isolation,
|
self.build_isolation,
|
||||||
|
|
|
||||||
|
|
@ -47,7 +47,7 @@ impl<'a> BuiltWheelIndex<'a> {
|
||||||
|
|
||||||
// Enforce hash-checking by omitting any wheels that don't satisfy the required hashes.
|
// Enforce hash-checking by omitting any wheels that don't satisfy the required hashes.
|
||||||
let revision = pointer.into_revision();
|
let revision = pointer.into_revision();
|
||||||
if !revision.satisfies(self.hasher.get(&source_dist.name)) {
|
if !revision.satisfies(self.hasher.get(source_dist)) {
|
||||||
return Ok(None);
|
return Ok(None);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -81,7 +81,7 @@ impl<'a> BuiltWheelIndex<'a> {
|
||||||
|
|
||||||
// Enforce hash-checking by omitting any wheels that don't satisfy the required hashes.
|
// Enforce hash-checking by omitting any wheels that don't satisfy the required hashes.
|
||||||
let revision = pointer.into_revision();
|
let revision = pointer.into_revision();
|
||||||
if !revision.satisfies(self.hasher.get(&source_dist.name)) {
|
if !revision.satisfies(self.hasher.get(source_dist)) {
|
||||||
return Ok(None);
|
return Ok(None);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -91,7 +91,7 @@ impl<'a> BuiltWheelIndex<'a> {
|
||||||
/// Return the most compatible [`CachedWheel`] for a given source distribution at a git URL.
|
/// Return the most compatible [`CachedWheel`] for a given source distribution at a git URL.
|
||||||
pub fn git(&self, source_dist: &GitSourceDist) -> Option<CachedWheel> {
|
pub fn git(&self, source_dist: &GitSourceDist) -> Option<CachedWheel> {
|
||||||
// Enforce hash-checking, which isn't supported for Git distributions.
|
// Enforce hash-checking, which isn't supported for Git distributions.
|
||||||
if self.hasher.get(&source_dist.name).is_validate() {
|
if self.hasher.get(source_dist).is_validate() {
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -123,7 +123,7 @@ impl<'a> RegistryWheelIndex<'a> {
|
||||||
CachedWheel::from_http_pointer(wheel_dir.join(file), cache)
|
CachedWheel::from_http_pointer(wheel_dir.join(file), cache)
|
||||||
{
|
{
|
||||||
// Enforce hash-checking based on the built distribution.
|
// Enforce hash-checking based on the built distribution.
|
||||||
if wheel.satisfies(hasher.get(package)) {
|
if wheel.satisfies(hasher.get_package(package)) {
|
||||||
Self::add_wheel(wheel, tags, &mut versions);
|
Self::add_wheel(wheel, tags, &mut versions);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -139,7 +139,7 @@ impl<'a> RegistryWheelIndex<'a> {
|
||||||
CachedWheel::from_local_pointer(wheel_dir.join(file), cache)
|
CachedWheel::from_local_pointer(wheel_dir.join(file), cache)
|
||||||
{
|
{
|
||||||
// Enforce hash-checking based on the built distribution.
|
// Enforce hash-checking based on the built distribution.
|
||||||
if wheel.satisfies(hasher.get(package)) {
|
if wheel.satisfies(hasher.get_package(package)) {
|
||||||
Self::add_wheel(wheel, tags, &mut versions);
|
Self::add_wheel(wheel, tags, &mut versions);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -184,7 +184,7 @@ impl<'a> RegistryWheelIndex<'a> {
|
||||||
|
|
||||||
if let Some(revision) = revision {
|
if let Some(revision) = revision {
|
||||||
// Enforce hash-checking based on the source distribution.
|
// Enforce hash-checking based on the source distribution.
|
||||||
if revision.satisfies(hasher.get(package)) {
|
if revision.satisfies(hasher.get_package(package)) {
|
||||||
for wheel_dir in symlinks(cache_shard.join(revision.id())) {
|
for wheel_dir in symlinks(cache_shard.join(revision.id())) {
|
||||||
if let Some(wheel) = CachedWheel::from_built_source(wheel_dir) {
|
if let Some(wheel) = CachedWheel::from_built_source(wheel_dir) {
|
||||||
Self::add_wheel(wheel, tags, &mut versions);
|
Self::add_wheel(wheel, tags, &mut versions);
|
||||||
|
|
|
||||||
|
|
@ -8,7 +8,7 @@ use tracing::instrument;
|
||||||
use url::Url;
|
use url::Url;
|
||||||
|
|
||||||
use distribution_types::{
|
use distribution_types::{
|
||||||
BuildableSource, CachedDist, Dist, Hashed, Identifier, LocalEditable, LocalEditables, Name,
|
BuildableSource, CachedDist, Dist, Hashed, Identifier, LocalEditable, LocalEditables,
|
||||||
RemoteSource,
|
RemoteSource,
|
||||||
};
|
};
|
||||||
use platform_tags::Tags;
|
use platform_tags::Tags;
|
||||||
|
|
@ -170,7 +170,7 @@ impl<'a, Context: BuildContext + Send + Sync> Downloader<'a, Context> {
|
||||||
pub async fn get_wheel(&self, dist: Dist, in_flight: &InFlight) -> Result<CachedDist, Error> {
|
pub async fn get_wheel(&self, dist: Dist, in_flight: &InFlight) -> Result<CachedDist, Error> {
|
||||||
let id = dist.distribution_id();
|
let id = dist.distribution_id();
|
||||||
if in_flight.downloads.register(id.clone()) {
|
if in_flight.downloads.register(id.clone()) {
|
||||||
let policy = self.hashes.get(dist.name());
|
let policy = self.hashes.get(&dist);
|
||||||
let result = self
|
let result = self
|
||||||
.database
|
.database
|
||||||
.get_or_build_wheel(&dist, self.tags, policy)
|
.get_or_build_wheel(&dist, self.tags, policy)
|
||||||
|
|
|
||||||
|
|
@ -13,7 +13,6 @@ use distribution_types::{
|
||||||
use pep508_rs::{Requirement, VersionOrUrl};
|
use pep508_rs::{Requirement, VersionOrUrl};
|
||||||
use platform_tags::Tags;
|
use platform_tags::Tags;
|
||||||
use uv_cache::{ArchiveTarget, ArchiveTimestamp, Cache, CacheBucket, WheelCache};
|
use uv_cache::{ArchiveTarget, ArchiveTimestamp, Cache, CacheBucket, WheelCache};
|
||||||
|
|
||||||
use uv_configuration::{NoBinary, Reinstall};
|
use uv_configuration::{NoBinary, Reinstall};
|
||||||
use uv_distribution::{
|
use uv_distribution::{
|
||||||
BuiltWheelIndex, HttpArchivePointer, LocalArchivePointer, RegistryWheelIndex,
|
BuiltWheelIndex, HttpArchivePointer, LocalArchivePointer, RegistryWheelIndex,
|
||||||
|
|
@ -259,7 +258,7 @@ impl<'a> Planner<'a> {
|
||||||
// Read the HTTP pointer.
|
// Read the HTTP pointer.
|
||||||
if let Some(pointer) = HttpArchivePointer::read_from(&cache_entry)? {
|
if let Some(pointer) = HttpArchivePointer::read_from(&cache_entry)? {
|
||||||
let archive = pointer.into_archive();
|
let archive = pointer.into_archive();
|
||||||
if archive.satisfies(hasher.get(&requirement.name)) {
|
if archive.satisfies(hasher.get(&wheel)) {
|
||||||
let cached_dist = CachedDirectUrlDist::from_url(
|
let cached_dist = CachedDirectUrlDist::from_url(
|
||||||
wheel.filename,
|
wheel.filename,
|
||||||
wheel.url,
|
wheel.url,
|
||||||
|
|
@ -301,7 +300,7 @@ impl<'a> Planner<'a> {
|
||||||
let timestamp = ArchiveTimestamp::from_file(&wheel.path)?;
|
let timestamp = ArchiveTimestamp::from_file(&wheel.path)?;
|
||||||
if pointer.is_up_to_date(timestamp) {
|
if pointer.is_up_to_date(timestamp) {
|
||||||
let archive = pointer.into_archive();
|
let archive = pointer.into_archive();
|
||||||
if archive.satisfies(hasher.get(&requirement.name)) {
|
if archive.satisfies(hasher.get(&wheel)) {
|
||||||
let cached_dist = CachedDirectUrlDist::from_url(
|
let cached_dist = CachedDirectUrlDist::from_url(
|
||||||
wheel.filename,
|
wheel.filename,
|
||||||
wheel.url,
|
wheel.url,
|
||||||
|
|
|
||||||
|
|
@ -5,7 +5,7 @@ use futures::stream::FuturesUnordered;
|
||||||
use futures::StreamExt;
|
use futures::StreamExt;
|
||||||
use rustc_hash::FxHashSet;
|
use rustc_hash::FxHashSet;
|
||||||
|
|
||||||
use distribution_types::{Dist, DistributionMetadata, LocalEditable, Name};
|
use distribution_types::{Dist, DistributionMetadata, LocalEditable};
|
||||||
use pep508_rs::{MarkerEnvironment, Requirement, VersionOrUrl};
|
use pep508_rs::{MarkerEnvironment, Requirement, VersionOrUrl};
|
||||||
use pypi_types::Metadata23;
|
use pypi_types::Metadata23;
|
||||||
use uv_client::RegistryClient;
|
use uv_client::RegistryClient;
|
||||||
|
|
@ -138,7 +138,7 @@ impl<'a, Context: BuildContext + Send + Sync> LookaheadResolver<'a, Context> {
|
||||||
|
|
||||||
// Fetch the metadata for the distribution.
|
// Fetch the metadata for the distribution.
|
||||||
let requires_dist = {
|
let requires_dist = {
|
||||||
let id = dist.package_id();
|
let id = dist.version_id();
|
||||||
if let Some(archive) = self
|
if let Some(archive) = self
|
||||||
.index
|
.index
|
||||||
.get_metadata(&id)
|
.get_metadata(&id)
|
||||||
|
|
@ -157,7 +157,7 @@ impl<'a, Context: BuildContext + Send + Sync> LookaheadResolver<'a, Context> {
|
||||||
// Run the PEP 517 build process to extract metadata from the source distribution.
|
// Run the PEP 517 build process to extract metadata from the source distribution.
|
||||||
let archive = self
|
let archive = self
|
||||||
.database
|
.database
|
||||||
.get_or_build_wheel_metadata(&dist, self.hasher.get(dist.name()))
|
.get_or_build_wheel_metadata(&dist, self.hasher.get(&dist))
|
||||||
.await
|
.await
|
||||||
.with_context(|| match &dist {
|
.with_context(|| match &dist {
|
||||||
Dist::Built(built) => format!("Failed to download: {built}"),
|
Dist::Built(built) => format!("Failed to download: {built}"),
|
||||||
|
|
|
||||||
|
|
@ -5,7 +5,7 @@ use anyhow::{Context, Result};
|
||||||
use futures::{StreamExt, TryStreamExt};
|
use futures::{StreamExt, TryStreamExt};
|
||||||
use url::Url;
|
use url::Url;
|
||||||
|
|
||||||
use distribution_types::{BuildableSource, HashPolicy, PackageId, PathSourceUrl, SourceUrl};
|
use distribution_types::{BuildableSource, HashPolicy, PathSourceUrl, SourceUrl, VersionId};
|
||||||
use pep508_rs::Requirement;
|
use pep508_rs::Requirement;
|
||||||
use uv_client::RegistryClient;
|
use uv_client::RegistryClient;
|
||||||
use uv_distribution::{DistributionDatabase, Reporter};
|
use uv_distribution::{DistributionDatabase, Reporter};
|
||||||
|
|
@ -92,8 +92,7 @@ impl<'a, Context: BuildContext + Send + Sync> SourceTreeResolver<'a, Context> {
|
||||||
let hashes = match self.hasher {
|
let hashes = match self.hasher {
|
||||||
HashStrategy::None => HashPolicy::None,
|
HashStrategy::None => HashPolicy::None,
|
||||||
HashStrategy::Generate => HashPolicy::Generate,
|
HashStrategy::Generate => HashPolicy::Generate,
|
||||||
HashStrategy::Validate(_) => {
|
HashStrategy::Validate { .. } => {
|
||||||
// TODO(charlie): Support `--require-hashes` for unnamed requirements.
|
|
||||||
return Err(anyhow::anyhow!(
|
return Err(anyhow::anyhow!(
|
||||||
"Hash-checking is not supported for local directories: {}",
|
"Hash-checking is not supported for local directories: {}",
|
||||||
source_tree.user_display()
|
source_tree.user_display()
|
||||||
|
|
@ -103,7 +102,7 @@ impl<'a, Context: BuildContext + Send + Sync> SourceTreeResolver<'a, Context> {
|
||||||
|
|
||||||
// Fetch the metadata for the distribution.
|
// Fetch the metadata for the distribution.
|
||||||
let metadata = {
|
let metadata = {
|
||||||
let id = PackageId::from_url(source.url());
|
let id = VersionId::from_url(source.url());
|
||||||
if let Some(archive) = self
|
if let Some(archive) = self
|
||||||
.index
|
.index
|
||||||
.get_metadata(&id)
|
.get_metadata(&id)
|
||||||
|
|
|
||||||
|
|
@ -10,8 +10,8 @@ use tracing::debug;
|
||||||
|
|
||||||
use distribution_filename::{SourceDistFilename, WheelFilename};
|
use distribution_filename::{SourceDistFilename, WheelFilename};
|
||||||
use distribution_types::{
|
use distribution_types::{
|
||||||
BuildableSource, DirectSourceUrl, GitSourceUrl, HashPolicy, PackageId, PathSourceUrl,
|
BuildableSource, DirectSourceUrl, GitSourceUrl, PathSourceUrl, RemoteSource, SourceUrl,
|
||||||
RemoteSource, SourceUrl,
|
VersionId,
|
||||||
};
|
};
|
||||||
use pep508_rs::{
|
use pep508_rs::{
|
||||||
Requirement, RequirementsTxtRequirement, Scheme, UnnamedRequirement, VersionOrUrl,
|
Requirement, RequirementsTxtRequirement, Scheme, UnnamedRequirement, VersionOrUrl,
|
||||||
|
|
@ -241,7 +241,7 @@ impl<'a, Context: BuildContext + Send + Sync> NamedRequirementsResolver<'a, Cont
|
||||||
|
|
||||||
// Fetch the metadata for the distribution.
|
// Fetch the metadata for the distribution.
|
||||||
let name = {
|
let name = {
|
||||||
let id = PackageId::from_url(source.url());
|
let id = VersionId::from_url(source.url());
|
||||||
if let Some(archive) = index.get_metadata(&id).as_deref().and_then(|response| {
|
if let Some(archive) = index.get_metadata(&id).as_deref().and_then(|response| {
|
||||||
if let MetadataResponse::Found(archive) = response {
|
if let MetadataResponse::Found(archive) = response {
|
||||||
Some(archive)
|
Some(archive)
|
||||||
|
|
@ -252,20 +252,8 @@ impl<'a, Context: BuildContext + Send + Sync> NamedRequirementsResolver<'a, Cont
|
||||||
// If the metadata is already in the index, return it.
|
// If the metadata is already in the index, return it.
|
||||||
archive.metadata.name.clone()
|
archive.metadata.name.clone()
|
||||||
} else {
|
} else {
|
||||||
// Determine the hash policy. Since we don't have a package name, we perform a
|
|
||||||
// manual match.
|
|
||||||
let hashes = match hasher {
|
|
||||||
HashStrategy::None => HashPolicy::None,
|
|
||||||
HashStrategy::Generate => HashPolicy::Generate,
|
|
||||||
HashStrategy::Validate(_) => {
|
|
||||||
// TODO(charlie): Support `--require-hashes` for unnamed requirements.
|
|
||||||
return Err(anyhow::anyhow!(
|
|
||||||
"Unnamed requirements are not supported with `--require-hashes`"
|
|
||||||
));
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
// Run the PEP 517 build process to extract metadata from the source distribution.
|
// Run the PEP 517 build process to extract metadata from the source distribution.
|
||||||
|
let hashes = hasher.get_url(source.url());
|
||||||
let source = BuildableSource::Url(source);
|
let source = BuildableSource::Url(source);
|
||||||
let archive = database.build_wheel_metadata(&source, hashes).await?;
|
let archive = database.build_wheel_metadata(&source, hashes).await?;
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -132,7 +132,7 @@ impl FlatIndex {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check if hashes line up
|
// Check if hashes line up
|
||||||
let hash = if let HashPolicy::Validate(required) = hasher.get(&filename.name) {
|
let hash = if let HashPolicy::Validate(required) = hasher.get_package(&filename.name) {
|
||||||
if hashes.is_empty() {
|
if hashes.is_empty() {
|
||||||
Hash::Missing
|
Hash::Missing
|
||||||
} else if required.iter().any(|hash| hashes.contains(hash)) {
|
} else if required.iter().any(|hash| hashes.contains(hash)) {
|
||||||
|
|
@ -174,7 +174,7 @@ impl FlatIndex {
|
||||||
};
|
};
|
||||||
|
|
||||||
// Check if hashes line up
|
// Check if hashes line up
|
||||||
let hash = if let HashPolicy::Validate(required) = hasher.get(&filename.name) {
|
let hash = if let HashPolicy::Validate(required) = hasher.get_package(&filename.name) {
|
||||||
if hashes.is_empty() {
|
if hashes.is_empty() {
|
||||||
Hash::Missing
|
Hash::Missing
|
||||||
} else if required.iter().any(|hash| hashes.contains(hash)) {
|
} else if required.iter().any(|hash| hashes.contains(hash)) {
|
||||||
|
|
|
||||||
|
|
@ -12,7 +12,7 @@ use pubgrub::type_aliases::SelectedDependencies;
|
||||||
use rustc_hash::{FxHashMap, FxHashSet};
|
use rustc_hash::{FxHashMap, FxHashSet};
|
||||||
|
|
||||||
use distribution_types::{
|
use distribution_types::{
|
||||||
Dist, DistributionMetadata, IndexUrl, LocalEditable, Name, PackageId, ResolvedDist, Verbatim,
|
Dist, DistributionMetadata, IndexUrl, LocalEditable, Name, ResolvedDist, Verbatim, VersionId,
|
||||||
VersionOrUrl,
|
VersionOrUrl,
|
||||||
};
|
};
|
||||||
use once_map::OnceMap;
|
use once_map::OnceMap;
|
||||||
|
|
@ -66,7 +66,7 @@ impl ResolutionGraph {
|
||||||
selection: &SelectedDependencies<UvDependencyProvider>,
|
selection: &SelectedDependencies<UvDependencyProvider>,
|
||||||
pins: &FilePins,
|
pins: &FilePins,
|
||||||
packages: &OnceMap<PackageName, VersionsResponse>,
|
packages: &OnceMap<PackageName, VersionsResponse>,
|
||||||
distributions: &OnceMap<PackageId, MetadataResponse>,
|
distributions: &OnceMap<VersionId, MetadataResponse>,
|
||||||
state: &State<UvDependencyProvider>,
|
state: &State<UvDependencyProvider>,
|
||||||
preferences: &Preferences,
|
preferences: &Preferences,
|
||||||
editables: Editables,
|
editables: Editables,
|
||||||
|
|
@ -135,7 +135,7 @@ impl ResolutionGraph {
|
||||||
{
|
{
|
||||||
hashes.insert(package_name.clone(), digests.to_vec());
|
hashes.insert(package_name.clone(), digests.to_vec());
|
||||||
} else if let Some(metadata_response) =
|
} else if let Some(metadata_response) =
|
||||||
distributions.get(&pinned_package.package_id())
|
distributions.get(&pinned_package.version_id())
|
||||||
{
|
{
|
||||||
if let MetadataResponse::Found(ref archive) = *metadata_response {
|
if let MetadataResponse::Found(ref archive) = *metadata_response {
|
||||||
let mut digests = archive.hashes.clone();
|
let mut digests = archive.hashes.clone();
|
||||||
|
|
@ -168,17 +168,17 @@ impl ResolutionGraph {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
let response = distributions.get(&dist.package_id()).unwrap_or_else(|| {
|
let response = distributions.get(&dist.version_id()).unwrap_or_else(|| {
|
||||||
panic!(
|
panic!(
|
||||||
"Every package should have metadata: {:?}",
|
"Every package should have metadata: {:?}",
|
||||||
dist.package_id()
|
dist.version_id()
|
||||||
)
|
)
|
||||||
});
|
});
|
||||||
|
|
||||||
let MetadataResponse::Found(archive) = &*response else {
|
let MetadataResponse::Found(archive) = &*response else {
|
||||||
panic!(
|
panic!(
|
||||||
"Every package should have metadata: {:?}",
|
"Every package should have metadata: {:?}",
|
||||||
dist.package_id()
|
dist.version_id()
|
||||||
)
|
)
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
@ -222,17 +222,17 @@ impl ResolutionGraph {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
let response = distributions.get(&dist.package_id()).unwrap_or_else(|| {
|
let response = distributions.get(&dist.version_id()).unwrap_or_else(|| {
|
||||||
panic!(
|
panic!(
|
||||||
"Every package should have metadata: {:?}",
|
"Every package should have metadata: {:?}",
|
||||||
dist.package_id()
|
dist.version_id()
|
||||||
)
|
)
|
||||||
});
|
});
|
||||||
|
|
||||||
let MetadataResponse::Found(archive) = &*response else {
|
let MetadataResponse::Found(archive) = &*response else {
|
||||||
panic!(
|
panic!(
|
||||||
"Every package should have metadata: {:?}",
|
"Every package should have metadata: {:?}",
|
||||||
dist.package_id()
|
dist.version_id()
|
||||||
)
|
)
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
@ -429,20 +429,20 @@ impl ResolutionGraph {
|
||||||
let mut seen_marker_values = FxHashSet::default();
|
let mut seen_marker_values = FxHashSet::default();
|
||||||
for i in self.petgraph.node_indices() {
|
for i in self.petgraph.node_indices() {
|
||||||
let dist = &self.petgraph[i];
|
let dist = &self.petgraph[i];
|
||||||
let package_id = match dist.version_or_url() {
|
let version_id = match dist.version_or_url() {
|
||||||
VersionOrUrl::Version(version) => {
|
VersionOrUrl::Version(version) => {
|
||||||
PackageId::from_registry(dist.name().clone(), version.clone())
|
VersionId::from_registry(dist.name().clone(), version.clone())
|
||||||
}
|
}
|
||||||
VersionOrUrl::Url(verbatim_url) => PackageId::from_url(verbatim_url.raw()),
|
VersionOrUrl::Url(verbatim_url) => VersionId::from_url(verbatim_url.raw()),
|
||||||
};
|
};
|
||||||
let res = index
|
let res = index
|
||||||
.distributions
|
.distributions
|
||||||
.get(&package_id)
|
.get(&version_id)
|
||||||
.expect("every package in resolution graph has metadata");
|
.expect("every package in resolution graph has metadata");
|
||||||
let MetadataResponse::Found(archive, ..) = &*res else {
|
let MetadataResponse::Found(archive, ..) = &*res else {
|
||||||
panic!(
|
panic!(
|
||||||
"Every package should have metadata: {:?}",
|
"Every package should have metadata: {:?}",
|
||||||
dist.package_id()
|
dist.version_id()
|
||||||
)
|
)
|
||||||
};
|
};
|
||||||
for req in manifest.apply(&archive.metadata.requires_dist) {
|
for req in manifest.apply(&archive.metadata.requires_dist) {
|
||||||
|
|
|
||||||
|
|
@ -141,7 +141,7 @@ impl BatchPrefetcher {
|
||||||
dist
|
dist
|
||||||
);
|
);
|
||||||
prefetch_count += 1;
|
prefetch_count += 1;
|
||||||
if index.distributions.register(candidate.package_id()) {
|
if index.distributions.register(candidate.version_id()) {
|
||||||
let request = match dist {
|
let request = match dist {
|
||||||
ResolvedDistRef::Installable(dist) => Request::Dist(dist.clone()),
|
ResolvedDistRef::Installable(dist) => Request::Dist(dist.clone()),
|
||||||
ResolvedDistRef::Installed(dist) => Request::Installed(dist.clone()),
|
ResolvedDistRef::Installed(dist) => Request::Installed(dist.clone()),
|
||||||
|
|
|
||||||
|
|
@ -1,6 +1,6 @@
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
use distribution_types::PackageId;
|
use distribution_types::VersionId;
|
||||||
use once_map::OnceMap;
|
use once_map::OnceMap;
|
||||||
use uv_normalize::PackageName;
|
use uv_normalize::PackageName;
|
||||||
|
|
||||||
|
|
@ -14,7 +14,7 @@ pub struct InMemoryIndex {
|
||||||
pub(crate) packages: OnceMap<PackageName, VersionsResponse>,
|
pub(crate) packages: OnceMap<PackageName, VersionsResponse>,
|
||||||
|
|
||||||
/// A map from package ID to metadata for that distribution.
|
/// A map from package ID to metadata for that distribution.
|
||||||
pub(crate) distributions: OnceMap<PackageId, MetadataResponse>,
|
pub(crate) distributions: OnceMap<VersionId, MetadataResponse>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl InMemoryIndex {
|
impl InMemoryIndex {
|
||||||
|
|
@ -24,8 +24,8 @@ impl InMemoryIndex {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Insert a [`Metadata23`] into the index.
|
/// Insert a [`Metadata23`] into the index.
|
||||||
pub fn insert_metadata(&self, package_id: PackageId, response: MetadataResponse) {
|
pub fn insert_metadata(&self, version_id: VersionId, response: MetadataResponse) {
|
||||||
self.distributions.done(package_id, response);
|
self.distributions.done(version_id, response);
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Get the [`VersionsResponse`] for a given package name, without waiting.
|
/// Get the [`VersionsResponse`] for a given package name, without waiting.
|
||||||
|
|
@ -34,7 +34,7 @@ impl InMemoryIndex {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Get the [`MetadataResponse`] for a given package ID, without waiting.
|
/// Get the [`MetadataResponse`] for a given package ID, without waiting.
|
||||||
pub fn get_metadata(&self, package_id: &PackageId) -> Option<Arc<MetadataResponse>> {
|
pub fn get_metadata(&self, version_id: &VersionId) -> Option<Arc<MetadataResponse>> {
|
||||||
self.distributions.get(package_id)
|
self.distributions.get(version_id)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -523,29 +523,27 @@ impl<
|
||||||
match package {
|
match package {
|
||||||
PubGrubPackage::Root(_) => {}
|
PubGrubPackage::Root(_) => {}
|
||||||
PubGrubPackage::Python(_) => {}
|
PubGrubPackage::Python(_) => {}
|
||||||
PubGrubPackage::Package(package_name, _extra, None) => {
|
PubGrubPackage::Package(name, _extra, None) => {
|
||||||
// Validate that the package is permitted under hash-checking mode.
|
// Verify that the package is allowed under the hash-checking policy.
|
||||||
if !self.hasher.allows(package_name) {
|
if !self.hasher.allows_package(name) {
|
||||||
return Err(ResolveError::UnhashedPackage(package_name.clone()));
|
return Err(ResolveError::UnhashedPackage(name.clone()));
|
||||||
}
|
}
|
||||||
|
|
||||||
// Emit a request to fetch the metadata for this package.
|
// Emit a request to fetch the metadata for this package.
|
||||||
if self.index.packages.register(package_name.clone()) {
|
if self.index.packages.register(name.clone()) {
|
||||||
priorities.add(package_name.clone());
|
priorities.add(name.clone());
|
||||||
request_sink
|
request_sink.send(Request::Package(name.clone())).await?;
|
||||||
.send(Request::Package(package_name.clone()))
|
|
||||||
.await?;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
PubGrubPackage::Package(package_name, _extra, Some(url)) => {
|
PubGrubPackage::Package(name, _extra, Some(url)) => {
|
||||||
// Validate that the package is permitted under hash-checking mode.
|
// Verify that the package is allowed under the hash-checking policy.
|
||||||
if !self.hasher.allows(package_name) {
|
if !self.hasher.allows_url(url) {
|
||||||
return Err(ResolveError::UnhashedPackage(package_name.clone()));
|
return Err(ResolveError::UnhashedPackage(name.clone()));
|
||||||
}
|
}
|
||||||
|
|
||||||
// Emit a request to fetch the metadata for this distribution.
|
// Emit a request to fetch the metadata for this distribution.
|
||||||
let dist = Dist::from_url(package_name.clone(), url.clone())?;
|
let dist = Dist::from_url(name.clone(), url.clone())?;
|
||||||
if self.index.distributions.register(dist.package_id()) {
|
if self.index.distributions.register(dist.version_id()) {
|
||||||
priorities.add(dist.name().clone());
|
priorities.add(dist.name().clone());
|
||||||
request_sink.send(Request::Dist(dist)).await?;
|
request_sink.send(Request::Dist(dist)).await?;
|
||||||
}
|
}
|
||||||
|
|
@ -646,7 +644,7 @@ impl<
|
||||||
let response = self
|
let response = self
|
||||||
.index
|
.index
|
||||||
.distributions
|
.distributions
|
||||||
.wait(&dist.package_id())
|
.wait(&dist.version_id())
|
||||||
.await
|
.await
|
||||||
.ok_or(ResolveError::Unregistered)?;
|
.ok_or(ResolveError::Unregistered)?;
|
||||||
|
|
||||||
|
|
@ -796,7 +794,7 @@ impl<
|
||||||
let version = candidate.version().clone();
|
let version = candidate.version().clone();
|
||||||
|
|
||||||
// Emit a request to fetch the metadata for this version.
|
// Emit a request to fetch the metadata for this version.
|
||||||
if self.index.distributions.register(candidate.package_id()) {
|
if self.index.distributions.register(candidate.version_id()) {
|
||||||
let request = match dist.for_resolution() {
|
let request = match dist.for_resolution() {
|
||||||
ResolvedDistRef::Installable(dist) => Request::Dist(dist.clone()),
|
ResolvedDistRef::Installable(dist) => Request::Dist(dist.clone()),
|
||||||
ResolvedDistRef::Installed(dist) => Request::Installed(dist.clone()),
|
ResolvedDistRef::Installed(dist) => Request::Installed(dist.clone()),
|
||||||
|
|
@ -880,13 +878,13 @@ impl<
|
||||||
Some(url) => PubGrubDistribution::from_url(package_name, url),
|
Some(url) => PubGrubDistribution::from_url(package_name, url),
|
||||||
None => PubGrubDistribution::from_registry(package_name, version),
|
None => PubGrubDistribution::from_registry(package_name, version),
|
||||||
};
|
};
|
||||||
let package_id = dist.package_id();
|
let version_id = dist.version_id();
|
||||||
|
|
||||||
// Wait for the metadata to be available.
|
// Wait for the metadata to be available.
|
||||||
self.index
|
self.index
|
||||||
.distributions
|
.distributions
|
||||||
.wait(&package_id)
|
.wait(&version_id)
|
||||||
.instrument(info_span!("distributions_wait", %package_id))
|
.instrument(info_span!("distributions_wait", %version_id))
|
||||||
.await
|
.await
|
||||||
.ok_or(ResolveError::Unregistered)?;
|
.ok_or(ResolveError::Unregistered)?;
|
||||||
}
|
}
|
||||||
|
|
@ -931,7 +929,7 @@ impl<
|
||||||
Some(url) => PubGrubDistribution::from_url(package_name, url),
|
Some(url) => PubGrubDistribution::from_url(package_name, url),
|
||||||
None => PubGrubDistribution::from_registry(package_name, version),
|
None => PubGrubDistribution::from_registry(package_name, version),
|
||||||
};
|
};
|
||||||
let package_id = dist.package_id();
|
let version_id = dist.version_id();
|
||||||
|
|
||||||
// If the package does not exist in the registry or locally, we cannot fetch its dependencies
|
// If the package does not exist in the registry or locally, we cannot fetch its dependencies
|
||||||
if self.unavailable_packages.get(package_name).is_some()
|
if self.unavailable_packages.get(package_name).is_some()
|
||||||
|
|
@ -953,8 +951,8 @@ impl<
|
||||||
let response = self
|
let response = self
|
||||||
.index
|
.index
|
||||||
.distributions
|
.distributions
|
||||||
.wait(&package_id)
|
.wait(&version_id)
|
||||||
.instrument(info_span!("distributions_wait", %package_id))
|
.instrument(info_span!("distributions_wait", %version_id))
|
||||||
.await
|
.await
|
||||||
.ok_or(ResolveError::Unregistered)?;
|
.ok_or(ResolveError::Unregistered)?;
|
||||||
|
|
||||||
|
|
@ -1061,7 +1059,7 @@ impl<
|
||||||
Some(Response::Installed { dist, metadata }) => {
|
Some(Response::Installed { dist, metadata }) => {
|
||||||
trace!("Received installed distribution metadata for: {dist}");
|
trace!("Received installed distribution metadata for: {dist}");
|
||||||
self.index.distributions.done(
|
self.index.distributions.done(
|
||||||
dist.package_id(),
|
dist.version_id(),
|
||||||
MetadataResponse::Found(ArchiveMetadata::from(metadata)),
|
MetadataResponse::Found(ArchiveMetadata::from(metadata)),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
@ -1079,7 +1077,7 @@ impl<
|
||||||
}
|
}
|
||||||
_ => {}
|
_ => {}
|
||||||
}
|
}
|
||||||
self.index.distributions.done(dist.package_id(), metadata);
|
self.index.distributions.done(dist.version_id(), metadata);
|
||||||
}
|
}
|
||||||
Some(Response::Dist {
|
Some(Response::Dist {
|
||||||
dist: Dist::Source(dist),
|
dist: Dist::Source(dist),
|
||||||
|
|
@ -1095,7 +1093,7 @@ impl<
|
||||||
}
|
}
|
||||||
_ => {}
|
_ => {}
|
||||||
}
|
}
|
||||||
self.index.distributions.done(dist.package_id(), metadata);
|
self.index.distributions.done(dist.version_id(), metadata);
|
||||||
}
|
}
|
||||||
None => {}
|
None => {}
|
||||||
}
|
}
|
||||||
|
|
@ -1200,7 +1198,7 @@ impl<
|
||||||
};
|
};
|
||||||
|
|
||||||
// Emit a request to fetch the metadata for this version.
|
// Emit a request to fetch the metadata for this version.
|
||||||
if self.index.distributions.register(candidate.package_id()) {
|
if self.index.distributions.register(candidate.version_id()) {
|
||||||
let dist = dist.for_resolution().to_owned();
|
let dist = dist.for_resolution().to_owned();
|
||||||
|
|
||||||
let response = match dist {
|
let response = match dist {
|
||||||
|
|
|
||||||
|
|
@ -3,7 +3,7 @@ use std::future::Future;
|
||||||
use anyhow::Result;
|
use anyhow::Result;
|
||||||
use chrono::{DateTime, Utc};
|
use chrono::{DateTime, Utc};
|
||||||
|
|
||||||
use distribution_types::{Dist, IndexLocations, Name};
|
use distribution_types::{Dist, IndexLocations};
|
||||||
use platform_tags::Tags;
|
use platform_tags::Tags;
|
||||||
|
|
||||||
use uv_client::RegistryClient;
|
use uv_client::RegistryClient;
|
||||||
|
|
@ -181,7 +181,7 @@ impl<'a, Context: BuildContext + Send + Sync> ResolverProvider
|
||||||
async fn get_or_build_wheel_metadata<'io>(&'io self, dist: &'io Dist) -> WheelMetadataResult {
|
async fn get_or_build_wheel_metadata<'io>(&'io self, dist: &'io Dist) -> WheelMetadataResult {
|
||||||
match self
|
match self
|
||||||
.fetcher
|
.fetcher
|
||||||
.get_or_build_wheel_metadata(dist, self.hasher.get(dist.name()))
|
.get_or_build_wheel_metadata(dist, self.hasher.get(dist))
|
||||||
.await
|
.await
|
||||||
{
|
{
|
||||||
Ok(metadata) => Ok(MetadataResponse::Found(metadata)),
|
Ok(metadata) => Ok(MetadataResponse::Found(metadata)),
|
||||||
|
|
|
||||||
|
|
@ -112,7 +112,7 @@ impl VersionMap {
|
||||||
.allowed_versions(package_name)
|
.allowed_versions(package_name)
|
||||||
.cloned()
|
.cloned()
|
||||||
.unwrap_or_default();
|
.unwrap_or_default();
|
||||||
let required_hashes = hasher.get(package_name).digests().to_vec();
|
let required_hashes = hasher.get_package(package_name).digests().to_vec();
|
||||||
Self {
|
Self {
|
||||||
inner: VersionMapInner::Lazy(VersionMapLazy {
|
inner: VersionMapInner::Lazy(VersionMapLazy {
|
||||||
map,
|
map,
|
||||||
|
|
|
||||||
|
|
@ -30,6 +30,7 @@ rustc-hash = { workspace = true }
|
||||||
serde = { workspace = true, optional = true }
|
serde = { workspace = true, optional = true }
|
||||||
serde_json = { workspace = true, optional = true }
|
serde_json = { workspace = true, optional = true }
|
||||||
thiserror = { workspace = true }
|
thiserror = { workspace = true }
|
||||||
|
url = { workspace = true }
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
default = []
|
default = []
|
||||||
|
|
|
||||||
|
|
@ -1,8 +1,10 @@
|
||||||
use distribution_types::HashPolicy;
|
|
||||||
use rustc_hash::FxHashMap;
|
|
||||||
use std::str::FromStr;
|
use std::str::FromStr;
|
||||||
|
|
||||||
use pep508_rs::{MarkerEnvironment, Requirement, VersionOrUrl};
|
use rustc_hash::FxHashMap;
|
||||||
|
use url::Url;
|
||||||
|
|
||||||
|
use distribution_types::{DistributionMetadata, HashPolicy, PackageId};
|
||||||
|
use pep508_rs::{MarkerEnvironment, RequirementsTxtRequirement, VersionOrUrl};
|
||||||
use pypi_types::{HashDigest, HashError};
|
use pypi_types::{HashDigest, HashError};
|
||||||
use uv_normalize::PackageName;
|
use uv_normalize::PackageName;
|
||||||
|
|
||||||
|
|
@ -14,74 +16,115 @@ pub enum HashStrategy {
|
||||||
Generate,
|
Generate,
|
||||||
/// Hashes should be validated against a pre-defined list of hashes. If necessary, hashes should
|
/// Hashes should be validated against a pre-defined list of hashes. If necessary, hashes should
|
||||||
/// be generated so as to ensure that the archive is valid.
|
/// be generated so as to ensure that the archive is valid.
|
||||||
Validate(FxHashMap<PackageName, Vec<HashDigest>>),
|
Validate(FxHashMap<PackageId, Vec<HashDigest>>),
|
||||||
}
|
}
|
||||||
|
|
||||||
impl HashStrategy {
|
impl HashStrategy {
|
||||||
/// Return the [`HashPolicy`] for the given package.
|
/// Return the [`HashPolicy`] for the given distribution.
|
||||||
pub fn get(&self, package_name: &PackageName) -> HashPolicy {
|
pub fn get<T: DistributionMetadata>(&self, distribution: &T) -> HashPolicy {
|
||||||
match self {
|
match self {
|
||||||
Self::None => HashPolicy::None,
|
Self::None => HashPolicy::None,
|
||||||
Self::Generate => HashPolicy::Generate,
|
Self::Generate => HashPolicy::Generate,
|
||||||
Self::Validate(hashes) => hashes
|
Self::Validate(hashes) => hashes
|
||||||
.get(package_name)
|
.get(&distribution.package_id())
|
||||||
.map(Vec::as_slice)
|
.map(Vec::as_slice)
|
||||||
.map_or(HashPolicy::None, HashPolicy::Validate),
|
.map_or(HashPolicy::None, HashPolicy::Validate),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns `true` if the given package is allowed.
|
/// Return the [`HashPolicy`] for the given registry-based package.
|
||||||
pub fn allows(&self, package_name: &PackageName) -> bool {
|
pub fn get_package(&self, name: &PackageName) -> HashPolicy {
|
||||||
match self {
|
match self {
|
||||||
Self::None => true,
|
Self::None => HashPolicy::None,
|
||||||
Self::Generate => true,
|
Self::Generate => HashPolicy::Generate,
|
||||||
Self::Validate(hashes) => hashes.contains_key(package_name),
|
Self::Validate(hashes) => hashes
|
||||||
|
.get(&PackageId::from_registry(name.clone()))
|
||||||
|
.map(Vec::as_slice)
|
||||||
|
.map_or(HashPolicy::None, HashPolicy::Validate),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Generate the required hashes from a set of [`Requirement`] entries.
|
/// Return the [`HashPolicy`] for the given direct URL package.
|
||||||
|
pub fn get_url(&self, url: &Url) -> HashPolicy {
|
||||||
|
match self {
|
||||||
|
Self::None => HashPolicy::None,
|
||||||
|
Self::Generate => HashPolicy::Generate,
|
||||||
|
Self::Validate(hashes) => hashes
|
||||||
|
.get(&PackageId::from_url(url))
|
||||||
|
.map(Vec::as_slice)
|
||||||
|
.map_or(HashPolicy::None, HashPolicy::Validate),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns `true` if the given registry-based package is allowed.
|
||||||
|
pub fn allows_package(&self, name: &PackageName) -> bool {
|
||||||
|
match self {
|
||||||
|
Self::None => true,
|
||||||
|
Self::Generate => true,
|
||||||
|
Self::Validate(hashes) => hashes.contains_key(&PackageId::from_registry(name.clone())),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns `true` if the given direct URL package is allowed.
|
||||||
|
pub fn allows_url(&self, url: &Url) -> bool {
|
||||||
|
match self {
|
||||||
|
Self::None => true,
|
||||||
|
Self::Generate => true,
|
||||||
|
Self::Validate(hashes) => hashes.contains_key(&PackageId::from_url(url)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Generate the required hashes from a set of [`RequirementsTxtRequirement`] entries.
|
||||||
pub fn from_requirements(
|
pub fn from_requirements(
|
||||||
requirements: impl Iterator<Item = (Requirement, Vec<String>)>,
|
requirements: impl Iterator<Item = (RequirementsTxtRequirement, Vec<String>)>,
|
||||||
markers: &MarkerEnvironment,
|
markers: &MarkerEnvironment,
|
||||||
) -> Result<Self, HashStrategyError> {
|
) -> Result<Self, HashStrategyError> {
|
||||||
let mut hashes = FxHashMap::<PackageName, Vec<HashDigest>>::default();
|
let mut hashes = FxHashMap::<PackageId, Vec<HashDigest>>::default();
|
||||||
|
|
||||||
// For each requirement, map from name to allowed hashes. We use the last entry for each
|
// For each requirement, map from name to allowed hashes. We use the last entry for each
|
||||||
// package.
|
// package.
|
||||||
//
|
|
||||||
// For now, unnamed requirements are unsupported. This should be fine, since `--require-hashes`
|
|
||||||
// tends to be used after `pip-compile`, which will always output named requirements.
|
|
||||||
//
|
|
||||||
// TODO(charlie): Preserve hashes from `requirements.txt` through to this pass, so that we
|
|
||||||
// can iterate over requirements directly, rather than iterating over the entries.
|
|
||||||
for (requirement, digests) in requirements {
|
for (requirement, digests) in requirements {
|
||||||
if !requirement.evaluate_markers(markers, &[]) {
|
if !requirement.evaluate_markers(markers, &[]) {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Every requirement must be either a pinned version or a direct URL.
|
// Every requirement must be either a pinned version or a direct URL.
|
||||||
match requirement.version_or_url.as_ref() {
|
let id = match &requirement {
|
||||||
Some(VersionOrUrl::Url(_)) => {
|
RequirementsTxtRequirement::Pep508(requirement) => {
|
||||||
// Direct URLs are always allowed.
|
match requirement.version_or_url.as_ref() {
|
||||||
}
|
Some(VersionOrUrl::Url(url)) => {
|
||||||
Some(VersionOrUrl::VersionSpecifier(specifiers)) => {
|
// Direct URLs are always allowed.
|
||||||
if specifiers
|
PackageId::from_url(url)
|
||||||
.iter()
|
}
|
||||||
.any(|specifier| matches!(specifier.operator(), pep440_rs::Operator::Equal))
|
Some(VersionOrUrl::VersionSpecifier(specifiers)) => {
|
||||||
{
|
// Must be a single specifier.
|
||||||
// Pinned versions are allowed.
|
let [specifier] = specifiers.as_ref() else {
|
||||||
} else {
|
return Err(HashStrategyError::UnpinnedRequirement(
|
||||||
return Err(HashStrategyError::UnpinnedRequirement(
|
requirement.to_string(),
|
||||||
requirement.to_string(),
|
));
|
||||||
));
|
};
|
||||||
|
|
||||||
|
// Must be pinned to a specific version.
|
||||||
|
if *specifier.operator() != pep440_rs::Operator::Equal {
|
||||||
|
return Err(HashStrategyError::UnpinnedRequirement(
|
||||||
|
requirement.to_string(),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
PackageId::from_registry(requirement.name.clone())
|
||||||
|
}
|
||||||
|
None => {
|
||||||
|
return Err(HashStrategyError::UnpinnedRequirement(
|
||||||
|
requirement.to_string(),
|
||||||
|
))
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
None => {
|
RequirementsTxtRequirement::Unnamed(requirement) => {
|
||||||
return Err(HashStrategyError::UnpinnedRequirement(
|
// Direct URLs are always allowed.
|
||||||
requirement.to_string(),
|
PackageId::from_url(&requirement.url)
|
||||||
))
|
|
||||||
}
|
}
|
||||||
}
|
};
|
||||||
|
|
||||||
// Every requirement must include a hash.
|
// Every requirement must include a hash.
|
||||||
if digests.is_empty() {
|
if digests.is_empty() {
|
||||||
|
|
@ -95,8 +138,7 @@ impl HashStrategy {
|
||||||
.collect::<Result<Vec<_>, _>>()
|
.collect::<Result<Vec<_>, _>>()
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
// TODO(charlie): Extract hashes from URL fragments.
|
hashes.insert(id, digests);
|
||||||
hashes.insert(requirement.name, digests);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(Self::Validate(hashes))
|
Ok(Self::Validate(hashes))
|
||||||
|
|
@ -107,8 +149,6 @@ impl HashStrategy {
|
||||||
pub enum HashStrategyError {
|
pub enum HashStrategyError {
|
||||||
#[error(transparent)]
|
#[error(transparent)]
|
||||||
Hash(#[from] HashError),
|
Hash(#[from] HashError),
|
||||||
#[error("Unnamed requirements are not supported in `--require-hashes`")]
|
|
||||||
UnnamedRequirement,
|
|
||||||
#[error("In `--require-hashes` mode, all requirement must have their versions pinned with `==`, but found: {0}")]
|
#[error("In `--require-hashes` mode, all requirement must have their versions pinned with `==`, but found: {0}")]
|
||||||
UnpinnedRequirement(String),
|
UnpinnedRequirement(String),
|
||||||
#[error("In `--require-hashes` mode, all requirement must have a hash, but none were provided for: {0}")]
|
#[error("In `--require-hashes` mode, all requirement must have a hash, but none were provided for: {0}")]
|
||||||
|
|
|
||||||
|
|
@ -95,13 +95,13 @@ pub trait BuildContext: Sync {
|
||||||
///
|
///
|
||||||
/// For PEP 517 builds, this calls `get_requires_for_build_wheel`.
|
/// For PEP 517 builds, this calls `get_requires_for_build_wheel`.
|
||||||
///
|
///
|
||||||
/// `package_id` is for error reporting only.
|
/// `version_id` is for error reporting only.
|
||||||
/// `dist` is for safety checks and may be null for editable builds.
|
/// `dist` is for safety checks and may be null for editable builds.
|
||||||
fn setup_build<'a>(
|
fn setup_build<'a>(
|
||||||
&'a self,
|
&'a self,
|
||||||
source: &'a Path,
|
source: &'a Path,
|
||||||
subdirectory: Option<&'a Path>,
|
subdirectory: Option<&'a Path>,
|
||||||
package_id: &'a str,
|
version_id: &'a str,
|
||||||
dist: Option<&'a SourceDist>,
|
dist: Option<&'a SourceDist>,
|
||||||
build_kind: BuildKind,
|
build_kind: BuildKind,
|
||||||
) -> impl Future<Output = Result<Self::SourceDistBuilder>> + Send + 'a;
|
) -> impl Future<Output = Result<Self::SourceDistBuilder>> + Send + 'a;
|
||||||
|
|
|
||||||
|
|
@ -15,7 +15,7 @@ use distribution_types::{
|
||||||
LocalEditables, Name, Resolution,
|
LocalEditables, Name, Resolution,
|
||||||
};
|
};
|
||||||
use install_wheel_rs::linker::LinkMode;
|
use install_wheel_rs::linker::LinkMode;
|
||||||
use pep508_rs::{MarkerEnvironment, Requirement, RequirementsTxtRequirement};
|
use pep508_rs::{MarkerEnvironment, Requirement};
|
||||||
use platform_tags::Tags;
|
use platform_tags::Tags;
|
||||||
use pypi_types::{Metadata23, Yanked};
|
use pypi_types::{Metadata23, Yanked};
|
||||||
use requirements_txt::EditableRequirement;
|
use requirements_txt::EditableRequirement;
|
||||||
|
|
@ -191,10 +191,7 @@ pub(crate) async fn pip_install(
|
||||||
HashStrategy::from_requirements(
|
HashStrategy::from_requirements(
|
||||||
entries
|
entries
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.filter_map(|requirement| match requirement.requirement {
|
.map(|entry| (entry.requirement, entry.hashes)),
|
||||||
RequirementsTxtRequirement::Pep508(req) => Some((req, requirement.hashes)),
|
|
||||||
RequirementsTxtRequirement::Unnamed(_) => None,
|
|
||||||
}),
|
|
||||||
markers,
|
markers,
|
||||||
)?
|
)?
|
||||||
} else {
|
} else {
|
||||||
|
|
|
||||||
|
|
@ -10,7 +10,7 @@ use distribution_types::{
|
||||||
IndexLocations, InstalledMetadata, LocalDist, LocalEditable, LocalEditables, Name, ResolvedDist,
|
IndexLocations, InstalledMetadata, LocalDist, LocalEditable, LocalEditables, Name, ResolvedDist,
|
||||||
};
|
};
|
||||||
use install_wheel_rs::linker::LinkMode;
|
use install_wheel_rs::linker::LinkMode;
|
||||||
use pep508_rs::RequirementsTxtRequirement;
|
|
||||||
use platform_tags::Tags;
|
use platform_tags::Tags;
|
||||||
use pypi_types::Yanked;
|
use pypi_types::Yanked;
|
||||||
use requirements_txt::EditableRequirement;
|
use requirements_txt::EditableRequirement;
|
||||||
|
|
@ -140,10 +140,7 @@ pub(crate) async fn pip_sync(
|
||||||
HashStrategy::from_requirements(
|
HashStrategy::from_requirements(
|
||||||
entries
|
entries
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.filter_map(|requirement| match requirement.requirement {
|
.map(|entry| (entry.requirement, entry.hashes)),
|
||||||
RequirementsTxtRequirement::Pep508(req) => Some((req, requirement.hashes)),
|
|
||||||
RequirementsTxtRequirement::Unnamed(_) => None,
|
|
||||||
}),
|
|
||||||
markers,
|
markers,
|
||||||
)?
|
)?
|
||||||
} else {
|
} else {
|
||||||
|
|
|
||||||
|
|
@ -3855,24 +3855,29 @@ fn require_hashes_source_path_mismatch() -> Result<()> {
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// `--require-hashes` isn't supported for unnamed requirements (yet).
|
/// We allow `--require-hashes` for direct URL dependencies.
|
||||||
#[test]
|
#[test]
|
||||||
fn require_hashes_unnamed() -> Result<()> {
|
fn require_hashes_unnamed() -> Result<()> {
|
||||||
let context = TestContext::new("3.12");
|
let context = TestContext::new("3.12");
|
||||||
|
|
||||||
let requirements_txt = context.temp_dir.child("requirements.txt");
|
let requirements_txt = context.temp_dir.child("requirements.txt");
|
||||||
requirements_txt
|
requirements_txt
|
||||||
.write_str("https://foo.com --hash=sha256:cfdb2b588b9fc25ede96d8db56ed50848b0b649dca3dd1df0b11f683bb9e0b5f")?;
|
.write_str(indoc::indoc! {r"
|
||||||
|
https://files.pythonhosted.org/packages/36/55/ad4de788d84a630656ece71059665e01ca793c04294c463fd84132f40fe6/anyio-4.0.0-py3-none-any.whl --hash=sha256:cfdb2b588b9fc25ede96d8db56ed50848b0b649dca3dd1df0b11f683bb9e0b5f
|
||||||
|
"} )?;
|
||||||
|
|
||||||
uv_snapshot!(command(&context)
|
uv_snapshot!(command(&context)
|
||||||
.arg("requirements.txt")
|
.arg("requirements.txt")
|
||||||
.arg("--require-hashes"), @r###"
|
.arg("--require-hashes"), @r###"
|
||||||
success: false
|
success: true
|
||||||
exit_code: 2
|
exit_code: 0
|
||||||
----- stdout -----
|
----- stdout -----
|
||||||
|
|
||||||
----- stderr -----
|
----- stderr -----
|
||||||
error: Unnamed requirements are not supported with `--require-hashes`
|
Resolved 1 package in [TIME]
|
||||||
|
Downloaded 1 package in [TIME]
|
||||||
|
Installed 1 package in [TIME]
|
||||||
|
+ anyio==4.0.0 (from https://files.pythonhosted.org/packages/36/55/ad4de788d84a630656ece71059665e01ca793c04294c463fd84132f40fe6/anyio-4.0.0-py3-none-any.whl)
|
||||||
"###
|
"###
|
||||||
);
|
);
|
||||||
|
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue