Add conversion from lockfile Distribution to Metadata (#4706)

## Summary

Splitting this out from https://github.com/astral-sh/uv/pull/4495
because it's also useful to reuse the `uv pip tree` code for `uv tree`.
This commit is contained in:
Ibraheem Ahmed 2024-07-02 14:03:20 -04:00 committed by GitHub
parent 4b19319485
commit a380e8e4df
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
2 changed files with 176 additions and 28 deletions

View file

@ -253,6 +253,12 @@ impl RequirementSource {
} }
} }
/// Construct a [`RequirementSource`] for a URL source, given a URL parsed into components.
pub fn from_verbatim_parsed_url(parsed_url: ParsedUrl) -> Self {
let verbatim_url = VerbatimUrl::from_url(Url::from(parsed_url.clone()));
RequirementSource::from_parsed_url(parsed_url, verbatim_url)
}
/// Convert the source to a [`VerbatimParsedUrl`], if it's a URL source. /// Convert the source to a [`VerbatimParsedUrl`], if it's a URL source.
pub fn to_verbatim_parsed_url(&self) -> Option<VerbatimParsedUrl> { pub fn to_verbatim_parsed_url(&self) -> Option<VerbatimParsedUrl> {
match &self { match &self {

View file

@ -19,12 +19,16 @@ use distribution_types::{
GitSourceDist, IndexUrl, PathBuiltDist, PathSourceDist, RegistryBuiltDist, RegistryBuiltWheel, GitSourceDist, IndexUrl, PathBuiltDist, PathSourceDist, RegistryBuiltDist, RegistryBuiltWheel,
RegistrySourceDist, RemoteSource, Resolution, ResolvedDist, ToUrlError, RegistrySourceDist, RemoteSource, Resolution, ResolvedDist, ToUrlError,
}; };
use pep440_rs::Version; use pep440_rs::{Version, VersionSpecifiers};
use pep508_rs::{MarkerEnvironment, MarkerTree, VerbatimUrl, VerbatimUrlError}; use pep508_rs::{
ExtraOperator, MarkerEnvironment, MarkerExpression, MarkerTree, VerbatimUrl, VerbatimUrlError,
};
use platform_tags::{TagCompatibility, TagPriority, Tags}; use platform_tags::{TagCompatibility, TagPriority, Tags};
use pypi_types::{HashDigest, ParsedArchiveUrl, ParsedGitUrl}; use pypi_types::{
HashDigest, ParsedArchiveUrl, ParsedGitUrl, ParsedUrl, Requirement, RequirementSource,
};
use uv_configuration::ExtrasSpecification; use uv_configuration::ExtrasSpecification;
use uv_distribution::VirtualProject; use uv_distribution::{Metadata, VirtualProject};
use uv_git::{GitReference, GitSha, RepositoryReference, ResolvedRepositoryReference}; use uv_git::{GitReference, GitSha, RepositoryReference, ResolvedRepositoryReference};
use uv_normalize::{ExtraName, GroupName, PackageName}; use uv_normalize::{ExtraName, GroupName, PackageName};
@ -573,12 +577,7 @@ impl Distribution {
let filename: WheelFilename = self.wheels[best_wheel_index].filename.clone(); let filename: WheelFilename = self.wheels[best_wheel_index].filename.clone();
let path_dist = PathBuiltDist { let path_dist = PathBuiltDist {
filename, filename,
url: VerbatimUrl::from_path(workspace_root.join(path)).map_err(|err| { url: verbatim_url(workspace_root.join(path), &self.id)?,
LockErrorKind::VerbatimUrl {
id: self.id.clone(),
err,
}
})?,
path: path.clone(), path: path.clone(),
}; };
let built_dist = BuiltDist::Path(path_dist); let built_dist = BuiltDist::Path(path_dist);
@ -620,12 +619,7 @@ impl Distribution {
Source::Path(path) => { Source::Path(path) => {
let path_dist = PathSourceDist { let path_dist = PathSourceDist {
name: self.id.name.clone(), name: self.id.name.clone(),
url: VerbatimUrl::from_path(workspace_root.join(path)).map_err(|err| { url: verbatim_url(workspace_root.join(path), &self.id)?,
LockErrorKind::VerbatimUrl {
id: self.id.clone(),
err,
}
})?,
install_path: workspace_root.join(path), install_path: workspace_root.join(path),
lock_path: path.clone(), lock_path: path.clone(),
}; };
@ -635,12 +629,7 @@ impl Distribution {
Source::Directory(path) => { Source::Directory(path) => {
let dir_dist = DirectorySourceDist { let dir_dist = DirectorySourceDist {
name: self.id.name.clone(), name: self.id.name.clone(),
url: VerbatimUrl::from_path(workspace_root.join(path)).map_err(|err| { url: verbatim_url(workspace_root.join(path), &self.id)?,
LockErrorKind::VerbatimUrl {
id: self.id.clone(),
err,
}
})?,
install_path: workspace_root.join(path), install_path: workspace_root.join(path),
lock_path: path.clone(), lock_path: path.clone(),
editable: false, editable: false,
@ -651,12 +640,7 @@ impl Distribution {
Source::Editable(path) => { Source::Editable(path) => {
let dir_dist = DirectorySourceDist { let dir_dist = DirectorySourceDist {
name: self.id.name.clone(), name: self.id.name.clone(),
url: VerbatimUrl::from_path(workspace_root.join(path)).map_err(|err| { url: verbatim_url(workspace_root.join(path), &self.id)?,
LockErrorKind::VerbatimUrl {
id: self.id.clone(),
err,
}
})?,
install_path: workspace_root.join(path), install_path: workspace_root.join(path),
lock_path: path.clone(), lock_path: path.clone(),
editable: true, editable: true,
@ -740,6 +724,84 @@ impl Distribution {
.into()) .into())
} }
/// Convert the [`Distribution`] to [`Metadata`] that can be used for resolution.
pub fn into_metadata(self, workspace_root: &Path) -> Result<Metadata, LockError> {
let name = self.name().clone();
let version = self.id.version.clone();
let provides_extras = self.optional_dependencies.keys().cloned().collect();
let mut dependency_extras = FxHashMap::default();
let mut requires_dist = self
.dependencies
.into_iter()
.filter_map(|dep| {
dep.into_requirement(workspace_root, &mut dependency_extras)
.transpose()
})
.collect::<Result<Vec<_>, LockError>>()?;
// Denormalize optional dependencies.
for (extra, deps) in self.optional_dependencies {
for dep in deps {
if let Some(mut dep) =
dep.into_requirement(workspace_root, &mut dependency_extras)?
{
// Add back the extra marker expression.
let marker = MarkerTree::Expression(MarkerExpression::Extra {
operator: ExtraOperator::Equal,
name: extra.clone(),
});
match dep.marker {
Some(ref mut tree) => tree.and(marker),
None => dep.marker = Some(marker),
}
requires_dist.push(dep);
}
}
}
// Denormalize extras for each dependency.
for req in &mut requires_dist {
if let Some(extras) = dependency_extras.remove(&req.name) {
req.extras = extras;
}
}
let dev_dependencies = self
.dev_dependencies
.into_iter()
.map(|(group, deps)| {
let mut dependency_extras = FxHashMap::default();
let mut deps = deps
.into_iter()
.filter_map(|dep| {
dep.into_requirement(workspace_root, &mut dependency_extras)
.transpose()
})
.collect::<Result<Vec<_>, LockError>>()?;
// Denormalize extras for each development dependency.
for dep in &mut deps {
if let Some(extras) = dependency_extras.remove(&dep.name) {
dep.extras = extras;
}
}
Ok((group, deps))
})
.collect::<Result<_, LockError>>()?;
Ok(Metadata {
name,
version,
requires_dist,
dev_dependencies,
provides_extras,
requires_python: None,
})
}
fn to_toml(&self, dist_count_by_name: &FxHashMap<PackageName, u64>) -> anyhow::Result<Table> { fn to_toml(&self, dist_count_by_name: &FxHashMap<PackageName, u64>) -> anyhow::Result<Table> {
let mut table = Table::new(); let mut table = Table::new();
@ -836,6 +898,16 @@ impl Distribution {
} }
} }
/// Attempts to construct a `VerbatimUrl` from the given `Path`.
fn verbatim_url(path: PathBuf, id: &DistributionId) -> Result<VerbatimUrl, LockError> {
let url = VerbatimUrl::from_path(path).map_err(|err| LockErrorKind::VerbatimUrl {
id: id.clone(),
err,
})?;
Ok(url)
}
#[derive(Clone, Debug, serde::Deserialize)] #[derive(Clone, Debug, serde::Deserialize)]
#[serde(rename_all = "kebab-case")] #[serde(rename_all = "kebab-case")]
struct DistributionWire { struct DistributionWire {
@ -1813,6 +1885,76 @@ impl Dependency {
} }
} }
/// Convert the [`Dependency`] to a [`Requirement`] that can be used for resolution.
pub(crate) fn into_requirement(
self,
workspace_root: &Path,
extras: &mut FxHashMap<PackageName, Vec<ExtraName>>,
) -> Result<Option<Requirement>, LockError> {
// Keep track of extras, these will be denormalized later.
if let Some(extra) = self.extra {
extras
.entry(self.distribution_id.name)
.or_default()
.push(extra);
return Ok(None);
}
// Reconstruct the `RequirementSource` from the `Source`.
let source = match self.distribution_id.source {
Source::Registry(_) => RequirementSource::Registry {
specifier: VersionSpecifiers::empty(),
index: None,
},
Source::Git(repository, git) => {
let git_url =
uv_git::GitUrl::new(repository.clone(), GitReference::from(git.kind.clone()))
.with_precise(git.precise);
let parsed_url = ParsedUrl::Git(ParsedGitUrl {
url: git_url.clone(),
subdirectory: git.subdirectory.as_ref().map(PathBuf::from),
});
RequirementSource::from_verbatim_parsed_url(parsed_url)
}
Source::Direct(url, direct) => {
let parsed_url = ParsedUrl::Archive(ParsedArchiveUrl {
url: url.clone(),
subdirectory: direct.subdirectory.as_ref().map(PathBuf::from),
});
RequirementSource::from_verbatim_parsed_url(parsed_url)
}
Source::Path(ref path) => RequirementSource::Path {
lock_path: path.clone(),
install_path: workspace_root.join(path),
url: verbatim_url(workspace_root.join(path), &self.distribution_id)?,
},
Source::Directory(ref path) => RequirementSource::Directory {
editable: false,
lock_path: path.clone(),
install_path: workspace_root.join(path),
url: verbatim_url(workspace_root.join(path), &self.distribution_id)?,
},
Source::Editable(ref path) => RequirementSource::Directory {
editable: true,
lock_path: path.clone(),
install_path: workspace_root.join(path),
url: verbatim_url(workspace_root.join(path), &self.distribution_id)?,
},
};
let requirement = Requirement {
name: self.distribution_id.name.clone(),
marker: self.marker,
origin: None,
extras: Vec::new(),
source,
};
Ok(Some(requirement))
}
/// Returns the TOML representation of this dependency. /// Returns the TOML representation of this dependency.
fn to_toml(&self, dist_count_by_name: &FxHashMap<PackageName, u64>) -> Table { fn to_toml(&self, dist_count_by_name: &FxHashMap<PackageName, u64>) -> Table {
let mut table = Table::new(); let mut table = Table::new();