Avoid building packages with dynamic versions (#4058)

## Summary

This PR separates "gathering the requirements" from the rest of the
metadata (e.g., version), which isn't required when installing a
package's _dependencies_ (as opposed to installing the package itself).
It thus ensures that we don't need to build a package when a static
`pyproject.toml` is provided in `pip compile`.

Closes https://github.com/astral-sh/uv/issues/4040.
This commit is contained in:
Charlie Marsh 2024-06-05 14:11:58 -04:00 committed by GitHub
parent a0173760f1
commit 191f9556b7
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
8 changed files with 609 additions and 382 deletions

View file

@ -327,6 +327,73 @@ fn parse_version(metadata_version: &str) -> Result<(u8, u8), MetadataError> {
Ok((major, minor))
}
/// Python Package Metadata 2.3 as specified in
/// <https://packaging.python.org/specifications/core-metadata/>.
///
/// This is a subset of [`Metadata23`]; specifically, it omits the `version` and `requires-python`
/// fields, which aren't necessary when extracting the requirements of a package without installing
/// the package itself.
#[derive(Serialize, Deserialize, Debug, Clone)]
#[serde(rename_all = "kebab-case")]
pub struct RequiresDist {
pub name: PackageName,
pub requires_dist: Vec<Requirement<VerbatimParsedUrl>>,
pub provides_extras: Vec<ExtraName>,
}
impl RequiresDist {
/// Extract the [`RequiresDist`] from a `pyproject.toml` file, as specified in PEP 621.
pub fn parse_pyproject_toml(contents: &str) -> Result<Self, MetadataError> {
let pyproject_toml: PyProjectToml = toml::from_str(contents)?;
let project = pyproject_toml
.project
.ok_or(MetadataError::FieldNotFound("project"))?;
// If any of the fields we need were declared as dynamic, we can't use the `pyproject.toml`
// file.
let dynamic = project.dynamic.unwrap_or_default();
for field in dynamic {
match field.as_str() {
"dependencies" => return Err(MetadataError::DynamicField("dependencies")),
"optional-dependencies" => {
return Err(MetadataError::DynamicField("optional-dependencies"))
}
_ => (),
}
}
let name = project.name;
// Extract the requirements.
let mut requires_dist = project
.dependencies
.unwrap_or_default()
.into_iter()
.map(Requirement::from)
.collect::<Vec<_>>();
// Extract the optional dependencies.
let mut provides_extras: Vec<ExtraName> = Vec::new();
for (extra, requirements) in project.optional_dependencies.unwrap_or_default() {
requires_dist.extend(
requirements
.into_iter()
.map(Requirement::from)
.map(|requirement| requirement.with_extra_marker(&extra))
.collect::<Vec<_>>(),
);
provides_extras.push(extra);
}
Ok(Self {
name,
requires_dist,
provides_extras,
})
}
}
/// The headers of a distribution metadata file.
#[derive(Debug)]
struct Headers<'a>(Vec<mailparse::MailHeader<'a>>);

View file

@ -34,7 +34,7 @@ use crate::archive::Archive;
use crate::locks::Locks;
use crate::metadata::{ArchiveMetadata, Metadata};
use crate::source::SourceDistributionBuilder;
use crate::{Error, LocalWheel, Reporter};
use crate::{Error, LocalWheel, Reporter, RequiresDist};
/// A cached high-level interface to convert distributions (a requirement resolved to a location)
/// to a wheel or wheel metadata.
@ -434,6 +434,11 @@ impl<'a, Context: BuildContext> DistributionDatabase<'a, Context> {
Ok(metadata)
}
/// Return the [`RequiresDist`] from a `pyproject.toml`, if it can be statically extracted.
pub async fn requires_dist(&self, project_root: &Path) -> Result<RequiresDist, Error> {
self.builder.requires_dist(project_root).await
}
/// Stream a wheel from a URL, unzipping it into the cache as it's downloaded.
async fn stream_wheel(
&self,

View file

@ -2,7 +2,7 @@ pub use distribution_database::{DistributionDatabase, HttpArchivePointer, LocalA
pub use download::LocalWheel;
pub use error::Error;
pub use index::{BuiltWheelIndex, RegistryWheelIndex};
pub use metadata::{ArchiveMetadata, Metadata};
pub use metadata::{ArchiveMetadata, Metadata, RequiresDist};
pub use reporter::Reporter;
pub use workspace::{ProjectWorkspace, Workspace, WorkspaceError, WorkspaceMember};

View file

@ -1,17 +1,18 @@
mod lowering;
use std::collections::BTreeMap;
use std::path::Path;
use thiserror::Error;
use pep440_rs::{Version, VersionSpecifiers};
use pypi_types::{HashDigest, Metadata23};
pub use requires_dist::RequiresDist;
use uv_configuration::PreviewMode;
use uv_normalize::{ExtraName, PackageName};
use crate::metadata::lowering::{lower_requirement, LoweringError};
use crate::{ProjectWorkspace, WorkspaceError};
use crate::metadata::lowering::LoweringError;
use crate::WorkspaceError;
mod lowering;
mod requires_dist;
#[derive(Debug, Error)]
pub enum MetadataLoweringError {
@ -56,55 +57,29 @@ impl Metadata {
project_root: &Path,
preview_mode: PreviewMode,
) -> Result<Self, MetadataLoweringError> {
// TODO(konsti): Limit discovery for Git checkouts to Git root.
// TODO(konsti): Cache workspace discovery.
let Some(project_workspace) =
ProjectWorkspace::from_maybe_project_root(project_root, None).await?
else {
return Ok(Self::from_metadata23(metadata));
};
Self::from_project_workspace(metadata, &project_workspace, preview_mode)
}
fn from_project_workspace(
metadata: Metadata23,
project_workspace: &ProjectWorkspace,
preview_mode: PreviewMode,
) -> Result<Metadata, MetadataLoweringError> {
let empty = BTreeMap::default();
let sources = project_workspace
.current_project()
.pyproject_toml()
.tool
.as_ref()
.and_then(|tool| tool.uv.as_ref())
.and_then(|uv| uv.sources.as_ref())
.unwrap_or(&empty);
let requires_dist = metadata
.requires_dist
.into_iter()
.map(|requirement| {
let requirement_name = requirement.name.clone();
lower_requirement(
requirement,
&metadata.name,
project_workspace.project_root(),
sources,
project_workspace.workspace(),
// Lower the requirements.
let RequiresDist {
name,
requires_dist,
provides_extras,
} = RequiresDist::from_workspace(
pypi_types::RequiresDist {
name: metadata.name,
requires_dist: metadata.requires_dist,
provides_extras: metadata.provides_extras,
},
project_root,
preview_mode,
)
.map_err(|err| MetadataLoweringError::LoweringError(requirement_name.clone(), err))
})
.collect::<Result<_, _>>()?;
.await?;
// Combine with the remaining metadata.
Ok(Self {
name: metadata.name,
name,
version: metadata.version,
requires_dist,
requires_python: metadata.requires_python,
provides_extras: metadata.provides_extras,
provides_extras,
})
}
}
@ -137,265 +112,3 @@ impl From<Metadata> for ArchiveMetadata {
}
}
}
#[cfg(test)]
mod test {
use anyhow::Context;
use std::path::Path;
use indoc::indoc;
use insta::assert_snapshot;
use pypi_types::Metadata23;
use uv_configuration::PreviewMode;
use crate::metadata::Metadata;
use crate::pyproject::PyProjectToml;
use crate::ProjectWorkspace;
async fn metadata_from_pyproject_toml(contents: &str) -> anyhow::Result<Metadata> {
let pyproject_toml: PyProjectToml = toml::from_str(contents)?;
let path = Path::new("pyproject.toml");
let project_workspace = ProjectWorkspace::from_project(
path,
pyproject_toml
.project
.as_ref()
.context("metadata field project not found")?,
&pyproject_toml,
Some(path),
)
.await?;
let metadata = Metadata23::parse_pyproject_toml(contents)?;
Ok(Metadata::from_project_workspace(
metadata,
&project_workspace,
PreviewMode::Enabled,
)?)
}
async fn format_err(input: &str) -> String {
let err = metadata_from_pyproject_toml(input).await.unwrap_err();
let mut causes = err.chain();
let mut message = String::new();
message.push_str(&format!("error: {}\n", causes.next().unwrap()));
for err in causes {
message.push_str(&format!(" Caused by: {err}\n"));
}
message
}
#[tokio::test]
async fn conflict_project_and_sources() {
let input = indoc! {r#"
[project]
name = "foo"
version = "0.0.0"
dependencies = [
"tqdm @ git+https://github.com/tqdm/tqdm",
]
[tool.uv.sources]
tqdm = { url = "https://files.pythonhosted.org/packages/a5/d6/502a859bac4ad5e274255576cd3e15ca273cdb91731bc39fb840dd422ee9/tqdm-4.66.0-py3-none-any.whl" }
"#};
assert_snapshot!(format_err(input).await, @r###"
error: Failed to parse entry for: `tqdm`
Caused by: Can't combine URLs from both `project.dependencies` and `tool.uv.sources`
"###);
}
#[tokio::test]
async fn too_many_git_specs() {
let input = indoc! {r#"
[project]
name = "foo"
version = "0.0.0"
dependencies = [
"tqdm",
]
[tool.uv.sources]
tqdm = { git = "https://github.com/tqdm/tqdm", rev = "baaaaaab", tag = "v1.0.0" }
"#};
assert_snapshot!(format_err(input).await, @r###"
error: Failed to parse entry for: `tqdm`
Caused by: Can only specify one of: `rev`, `tag`, or `branch`
"###);
}
#[tokio::test]
async fn too_many_git_typo() {
let input = indoc! {r#"
[project]
name = "foo"
version = "0.0.0"
dependencies = [
"tqdm",
]
[tool.uv.sources]
tqdm = { git = "https://github.com/tqdm/tqdm", ref = "baaaaaab" }
"#};
// TODO(konsti): This should tell you the set of valid fields
assert_snapshot!(format_err(input).await, @r###"
error: TOML parse error at line 8, column 8
|
8 | tqdm = { git = "https://github.com/tqdm/tqdm", ref = "baaaaaab" }
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
data did not match any variant of untagged enum Source
"###);
}
#[tokio::test]
async fn you_cant_mix_those() {
let input = indoc! {r#"
[project]
name = "foo"
version = "0.0.0"
dependencies = [
"tqdm",
]
[tool.uv.sources]
tqdm = { path = "tqdm", index = "torch" }
"#};
// TODO(konsti): This should tell you the set of valid fields
assert_snapshot!(format_err(input).await, @r###"
error: TOML parse error at line 8, column 8
|
8 | tqdm = { path = "tqdm", index = "torch" }
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
data did not match any variant of untagged enum Source
"###);
}
#[tokio::test]
async fn missing_constraint() {
let input = indoc! {r#"
[project]
name = "foo"
version = "0.0.0"
dependencies = [
"tqdm",
]
"#};
assert!(metadata_from_pyproject_toml(input).await.is_ok());
}
#[tokio::test]
async fn invalid_syntax() {
let input = indoc! {r#"
[project]
name = "foo"
version = "0.0.0"
dependencies = [
"tqdm ==4.66.0",
]
[tool.uv.sources]
tqdm = { url = invalid url to tqdm-4.66.0-py3-none-any.whl" }
"#};
assert_snapshot!(format_err(input).await, @r###"
error: TOML parse error at line 8, column 16
|
8 | tqdm = { url = invalid url to tqdm-4.66.0-py3-none-any.whl" }
| ^
invalid string
expected `"`, `'`
"###);
}
#[tokio::test]
async fn invalid_url() {
let input = indoc! {r#"
[project]
name = "foo"
version = "0.0.0"
dependencies = [
"tqdm ==4.66.0",
]
[tool.uv.sources]
tqdm = { url = "§invalid#+#*Ä" }
"#};
assert_snapshot!(format_err(input).await, @r###"
error: TOML parse error at line 8, column 8
|
8 | tqdm = { url = "§invalid#+#*Ä" }
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^
data did not match any variant of untagged enum Source
"###);
}
#[tokio::test]
async fn workspace_and_url_spec() {
let input = indoc! {r#"
[project]
name = "foo"
version = "0.0.0"
dependencies = [
"tqdm @ git+https://github.com/tqdm/tqdm",
]
[tool.uv.sources]
tqdm = { workspace = true }
"#};
assert_snapshot!(format_err(input).await, @r###"
error: Failed to parse entry for: `tqdm`
Caused by: Can't combine URLs from both `project.dependencies` and `tool.uv.sources`
"###);
}
#[tokio::test]
async fn missing_workspace_package() {
let input = indoc! {r#"
[project]
name = "foo"
version = "0.0.0"
dependencies = [
"tqdm ==4.66.0",
]
[tool.uv.sources]
tqdm = { workspace = true }
"#};
assert_snapshot!(format_err(input).await, @r###"
error: Failed to parse entry for: `tqdm`
Caused by: Package is not included as workspace package in `tool.uv.workspace`
"###);
}
#[tokio::test]
async fn cant_be_dynamic() {
let input = indoc! {r#"
[project]
name = "foo"
version = "0.0.0"
dynamic = [
"dependencies"
]
[tool.uv.sources]
tqdm = { workspace = true }
"#};
assert_snapshot!(format_err(input).await, @r###"
error: The following field was marked as dynamic: dependencies
"###);
}
#[tokio::test]
async fn missing_project_section() {
let input = indoc! {"
[tool.uv.sources]
tqdm = { workspace = true }
"};
assert_snapshot!(format_err(input).await, @r###"
error: metadata field project not found
"###);
}
}

View file

@ -0,0 +1,359 @@
use std::collections::BTreeMap;
use std::path::Path;
use uv_configuration::PreviewMode;
use uv_normalize::{ExtraName, PackageName};
use crate::metadata::lowering::lower_requirement;
use crate::metadata::MetadataLoweringError;
use crate::{Metadata, ProjectWorkspace};
#[derive(Debug, Clone)]
pub struct RequiresDist {
pub name: PackageName,
pub requires_dist: Vec<pypi_types::Requirement>,
pub provides_extras: Vec<ExtraName>,
}
impl RequiresDist {
/// Lower without considering `tool.uv` in `pyproject.toml`, used for index and other archive
/// dependencies.
pub fn from_metadata23(metadata: pypi_types::RequiresDist) -> Self {
Self {
name: metadata.name,
requires_dist: metadata
.requires_dist
.into_iter()
.map(pypi_types::Requirement::from)
.collect(),
provides_extras: metadata.provides_extras,
}
}
/// Lower by considering `tool.uv` in `pyproject.toml` if present, used for Git and directory
/// dependencies.
pub async fn from_workspace(
metadata: pypi_types::RequiresDist,
project_root: &Path,
preview_mode: PreviewMode,
) -> Result<Self, MetadataLoweringError> {
// TODO(konsti): Limit discovery for Git checkouts to Git root.
// TODO(konsti): Cache workspace discovery.
let Some(project_workspace) =
ProjectWorkspace::from_maybe_project_root(project_root, None).await?
else {
return Ok(Self::from_metadata23(metadata));
};
Self::from_project_workspace(metadata, &project_workspace, preview_mode)
}
pub fn from_project_workspace(
metadata: pypi_types::RequiresDist,
project_workspace: &ProjectWorkspace,
preview_mode: PreviewMode,
) -> Result<Self, MetadataLoweringError> {
let empty = BTreeMap::default();
let sources = project_workspace
.current_project()
.pyproject_toml()
.tool
.as_ref()
.and_then(|tool| tool.uv.as_ref())
.and_then(|uv| uv.sources.as_ref())
.unwrap_or(&empty);
let requires_dist = metadata
.requires_dist
.into_iter()
.map(|requirement| {
let requirement_name = requirement.name.clone();
lower_requirement(
requirement,
&metadata.name,
project_workspace.project_root(),
sources,
project_workspace.workspace(),
preview_mode,
)
.map_err(|err| MetadataLoweringError::LoweringError(requirement_name.clone(), err))
})
.collect::<Result<_, _>>()?;
Ok(Self {
name: metadata.name,
requires_dist,
provides_extras: metadata.provides_extras,
})
}
}
impl From<Metadata> for RequiresDist {
fn from(metadata: Metadata) -> Self {
Self {
name: metadata.name,
requires_dist: metadata.requires_dist,
provides_extras: metadata.provides_extras,
}
}
}
#[cfg(test)]
mod test {
use std::path::Path;
use anyhow::Context;
use indoc::indoc;
use insta::assert_snapshot;
use uv_configuration::PreviewMode;
use crate::pyproject::PyProjectToml;
use crate::{ProjectWorkspace, RequiresDist};
async fn requires_dist_from_pyproject_toml(contents: &str) -> anyhow::Result<RequiresDist> {
let pyproject_toml: PyProjectToml = toml::from_str(contents)?;
let path = Path::new("pyproject.toml");
let project_workspace = ProjectWorkspace::from_project(
path,
pyproject_toml
.project
.as_ref()
.context("metadata field project not found")?,
&pyproject_toml,
Some(path),
)
.await?;
let requires_dist = pypi_types::RequiresDist::parse_pyproject_toml(contents)?;
Ok(RequiresDist::from_project_workspace(
requires_dist,
&project_workspace,
PreviewMode::Enabled,
)?)
}
async fn format_err(input: &str) -> String {
let err = requires_dist_from_pyproject_toml(input).await.unwrap_err();
let mut causes = err.chain();
let mut message = String::new();
message.push_str(&format!("error: {}\n", causes.next().unwrap()));
for err in causes {
message.push_str(&format!(" Caused by: {err}\n"));
}
message
}
#[tokio::test]
async fn conflict_project_and_sources() {
let input = indoc! {r#"
[project]
name = "foo"
version = "0.0.0"
dependencies = [
"tqdm @ git+https://github.com/tqdm/tqdm",
]
[tool.uv.sources]
tqdm = { url = "https://files.pythonhosted.org/packages/a5/d6/502a859bac4ad5e274255576cd3e15ca273cdb91731bc39fb840dd422ee9/tqdm-4.66.0-py3-none-any.whl" }
"#};
assert_snapshot!(format_err(input).await, @r###"
error: Failed to parse entry for: `tqdm`
Caused by: Can't combine URLs from both `project.dependencies` and `tool.uv.sources`
"###);
}
#[tokio::test]
async fn too_many_git_specs() {
let input = indoc! {r#"
[project]
name = "foo"
version = "0.0.0"
dependencies = [
"tqdm",
]
[tool.uv.sources]
tqdm = { git = "https://github.com/tqdm/tqdm", rev = "baaaaaab", tag = "v1.0.0" }
"#};
assert_snapshot!(format_err(input).await, @r###"
error: Failed to parse entry for: `tqdm`
Caused by: Can only specify one of: `rev`, `tag`, or `branch`
"###);
}
#[tokio::test]
async fn too_many_git_typo() {
let input = indoc! {r#"
[project]
name = "foo"
version = "0.0.0"
dependencies = [
"tqdm",
]
[tool.uv.sources]
tqdm = { git = "https://github.com/tqdm/tqdm", ref = "baaaaaab" }
"#};
// TODO(konsti): This should tell you the set of valid fields
assert_snapshot!(format_err(input).await, @r###"
error: TOML parse error at line 8, column 8
|
8 | tqdm = { git = "https://github.com/tqdm/tqdm", ref = "baaaaaab" }
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
data did not match any variant of untagged enum Source
"###);
}
#[tokio::test]
async fn you_cant_mix_those() {
let input = indoc! {r#"
[project]
name = "foo"
version = "0.0.0"
dependencies = [
"tqdm",
]
[tool.uv.sources]
tqdm = { path = "tqdm", index = "torch" }
"#};
// TODO(konsti): This should tell you the set of valid fields
assert_snapshot!(format_err(input).await, @r###"
error: TOML parse error at line 8, column 8
|
8 | tqdm = { path = "tqdm", index = "torch" }
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
data did not match any variant of untagged enum Source
"###);
}
#[tokio::test]
async fn missing_constraint() {
let input = indoc! {r#"
[project]
name = "foo"
version = "0.0.0"
dependencies = [
"tqdm",
]
"#};
assert!(requires_dist_from_pyproject_toml(input).await.is_ok());
}
#[tokio::test]
async fn invalid_syntax() {
let input = indoc! {r#"
[project]
name = "foo"
version = "0.0.0"
dependencies = [
"tqdm ==4.66.0",
]
[tool.uv.sources]
tqdm = { url = invalid url to tqdm-4.66.0-py3-none-any.whl" }
"#};
assert_snapshot!(format_err(input).await, @r###"
error: TOML parse error at line 8, column 16
|
8 | tqdm = { url = invalid url to tqdm-4.66.0-py3-none-any.whl" }
| ^
invalid string
expected `"`, `'`
"###);
}
#[tokio::test]
async fn invalid_url() {
let input = indoc! {r#"
[project]
name = "foo"
version = "0.0.0"
dependencies = [
"tqdm ==4.66.0",
]
[tool.uv.sources]
tqdm = { url = "§invalid#+#*Ä" }
"#};
assert_snapshot!(format_err(input).await, @r###"
error: TOML parse error at line 8, column 8
|
8 | tqdm = { url = "§invalid#+#*Ä" }
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^
data did not match any variant of untagged enum Source
"###);
}
#[tokio::test]
async fn workspace_and_url_spec() {
let input = indoc! {r#"
[project]
name = "foo"
version = "0.0.0"
dependencies = [
"tqdm @ git+https://github.com/tqdm/tqdm",
]
[tool.uv.sources]
tqdm = { workspace = true }
"#};
assert_snapshot!(format_err(input).await, @r###"
error: Failed to parse entry for: `tqdm`
Caused by: Can't combine URLs from both `project.dependencies` and `tool.uv.sources`
"###);
}
#[tokio::test]
async fn missing_workspace_package() {
let input = indoc! {r#"
[project]
name = "foo"
version = "0.0.0"
dependencies = [
"tqdm ==4.66.0",
]
[tool.uv.sources]
tqdm = { workspace = true }
"#};
assert_snapshot!(format_err(input).await, @r###"
error: Failed to parse entry for: `tqdm`
Caused by: Package is not included as workspace package in `tool.uv.workspace`
"###);
}
#[tokio::test]
async fn cant_be_dynamic() {
let input = indoc! {r#"
[project]
name = "foo"
version = "0.0.0"
dynamic = [
"dependencies"
]
[tool.uv.sources]
tqdm = { workspace = true }
"#};
assert_snapshot!(format_err(input).await, @r###"
error: The following field was marked as dynamic: dependencies
"###);
}
#[tokio::test]
async fn missing_project_section() {
let input = indoc! {"
[tool.uv.sources]
tqdm = { workspace = true }
"};
assert_snapshot!(format_err(input).await, @r###"
error: metadata field project not found
"###);
}
}

View file

@ -39,7 +39,7 @@ use crate::metadata::{ArchiveMetadata, Metadata};
use crate::reporter::Facade;
use crate::source::built_wheel_metadata::BuiltWheelMetadata;
use crate::source::revision::Revision;
use crate::Reporter;
use crate::{Reporter, RequiresDist};
mod built_wheel_metadata;
mod revision;
@ -385,6 +385,14 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
Ok(metadata)
}
/// Return the [`RequiresDist`] from a `pyproject.toml`, if it can be statically extracted.
pub(crate) async fn requires_dist(&self, project_root: &Path) -> Result<RequiresDist, Error> {
let requires_dist = read_requires_dist(project_root).await?;
let requires_dist =
RequiresDist::from_workspace(requires_dist, project_root, self.preview_mode).await?;
Ok(requires_dist)
}
/// Build a source distribution from a remote URL.
#[allow(clippy::too_many_arguments)]
async fn url<'data>(
@ -1625,6 +1633,25 @@ async fn read_pyproject_toml(
Ok(metadata)
}
/// Return the [`pypi_types::RequiresDist`] from a `pyproject.toml`, if it can be statically extracted.
async fn read_requires_dist(project_root: &Path) -> Result<pypi_types::RequiresDist, Error> {
// Read the `pyproject.toml` file.
let pyproject_toml = project_root.join("pyproject.toml");
let content = match fs::read_to_string(pyproject_toml).await {
Ok(content) => content,
Err(err) if err.kind() == std::io::ErrorKind::NotFound => {
return Err(Error::MissingPyprojectToml);
}
Err(err) => return Err(Error::CacheRead(err)),
};
// Parse the metadata.
let requires_dist = pypi_types::RequiresDist::parse_pyproject_toml(&content)
.map_err(Error::DynamicPyprojectToml)?;
Ok(requires_dist)
}
/// Read an existing cached [`Metadata23`], if it exists.
async fn read_cached_metadata(cache_entry: &CacheEntry) -> Result<Option<Metadata23>, Error> {
match fs::read(&cache_entry.path()).await {

View file

@ -11,7 +11,7 @@ use distribution_types::{BuildableSource, DirectorySourceUrl, HashPolicy, Source
use pep508_rs::RequirementOrigin;
use pypi_types::Requirement;
use uv_configuration::ExtrasSpecification;
use uv_distribution::{DistributionDatabase, Reporter};
use uv_distribution::{DistributionDatabase, Reporter, RequiresDist};
use uv_fs::Simplified;
use uv_normalize::{ExtraName, PackageName};
use uv_resolver::{InMemoryIndex, MetadataResponse};
@ -85,73 +85,7 @@ impl<'a, Context: BuildContext> SourceTreeResolver<'a, Context> {
/// Infer the dependencies for a directory dependency.
async fn resolve_source_tree(&self, path: &Path) -> Result<SourceTreeResolution> {
// Convert to a buildable source.
let source_tree = fs_err::canonicalize(path).with_context(|| {
format!(
"Failed to canonicalize path to source tree: {}",
path.user_display()
)
})?;
let source_tree = source_tree.parent().ok_or_else(|| {
anyhow::anyhow!(
"The file `{}` appears to be a `setup.py` or `setup.cfg` file, which must be in a directory",
path.user_display()
)
})?;
let Ok(url) = Url::from_directory_path(source_tree) else {
return Err(anyhow::anyhow!("Failed to convert path to URL"));
};
let source = SourceUrl::Directory(DirectorySourceUrl {
url: &url,
path: Cow::Borrowed(source_tree),
editable: false,
});
// Determine the hash policy. Since we don't have a package name, we perform a
// manual match.
let hashes = match self.hasher {
HashStrategy::None => HashPolicy::None,
HashStrategy::Generate => HashPolicy::Generate,
HashStrategy::Validate { .. } => {
return Err(anyhow::anyhow!(
"Hash-checking is not supported for local directories: {}",
path.user_display()
));
}
};
// Fetch the metadata for the distribution.
let metadata = {
let id = VersionId::from_url(source.url());
if let Some(archive) =
self.index
.distributions()
.get(&id)
.as_deref()
.and_then(|response| {
if let MetadataResponse::Found(archive) = response {
Some(archive)
} else {
None
}
})
{
// If the metadata is already in the index, return it.
archive.metadata.clone()
} else {
// Run the PEP 517 build process to extract metadata from the source distribution.
let source = BuildableSource::Url(source);
let archive = self.database.build_wheel_metadata(&source, hashes).await?;
// Insert the metadata into the index.
self.index
.distributions()
.done(id, Arc::new(MetadataResponse::Found(archive.clone())));
archive.metadata
}
};
let metadata = self.resolve_requires_dist(path).await?;
let origin = RequirementOrigin::Project(path.to_path_buf(), metadata.name.clone());
@ -208,4 +142,85 @@ impl<'a, Context: BuildContext> SourceTreeResolver<'a, Context> {
extras,
})
}
/// Resolve the [`RequiresDist`] metadata for a given source tree. Attempts to resolve the
/// requirements without building the distribution, even if the project contains (e.g.) a
/// dynamic version since, critically, we don't need to install the package itself; only its
/// dependencies.
async fn resolve_requires_dist(&self, path: &Path) -> Result<RequiresDist> {
// Convert to a buildable source.
let source_tree = fs_err::canonicalize(path).with_context(|| {
format!(
"Failed to canonicalize path to source tree: {}",
path.user_display()
)
})?;
let source_tree = source_tree.parent().ok_or_else(|| {
anyhow::anyhow!(
"The file `{}` appears to be a `pyproject.toml`, `setup.py`, or `setup.cfg` file, which must be in a directory",
path.user_display()
)
})?;
// If the path is a `pyproject.toml`, attempt to extract the requirements statically.
if let Ok(metadata) = self.database.requires_dist(source_tree).await {
return Ok(metadata);
}
let Ok(url) = Url::from_directory_path(source_tree) else {
return Err(anyhow::anyhow!("Failed to convert path to URL"));
};
let source = SourceUrl::Directory(DirectorySourceUrl {
url: &url,
path: Cow::Borrowed(source_tree),
editable: false,
});
// Determine the hash policy. Since we don't have a package name, we perform a
// manual match.
let hashes = match self.hasher {
HashStrategy::None => HashPolicy::None,
HashStrategy::Generate => HashPolicy::Generate,
HashStrategy::Validate { .. } => {
return Err(anyhow::anyhow!(
"Hash-checking is not supported for local directories: {}",
path.user_display()
));
}
};
// Fetch the metadata for the distribution.
let metadata = {
let id = VersionId::from_url(source.url());
if let Some(archive) =
self.index
.distributions()
.get(&id)
.as_deref()
.and_then(|response| {
if let MetadataResponse::Found(archive) = response {
Some(archive)
} else {
None
}
})
{
// If the metadata is already in the index, return it.
archive.metadata.clone()
} else {
// Run the PEP 517 build process to extract metadata from the source distribution.
let source = BuildableSource::Url(source);
let archive = self.database.build_wheel_metadata(&source, hashes).await?;
// Insert the metadata into the index.
self.index
.distributions()
.done(id, Arc::new(MetadataResponse::Found(archive.clone())));
archive.metadata
}
};
Ok(RequiresDist::from(metadata))
}
}

View file

@ -205,6 +205,47 @@ dependencies = [
Ok(())
}
/// Resolve a specific version of `anyio` from a `pyproject.toml` file. Despite the version being
/// dynamic, we shouldn't need to build the package, since the requirements are static.
#[test]
fn compile_pyproject_toml_dynamic_version() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"[build-system]
requires = ["setuptools", "wheel"]
[project]
name = "project"
dynamic = ["version"]
dependencies = [
"anyio==3.7.0",
]
"#,
)?;
uv_snapshot!(context.compile()
.arg("pyproject.toml"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z pyproject.toml
anyio==3.7.0
# via project (pyproject.toml)
idna==3.6
# via anyio
sniffio==1.3.1
# via anyio
----- stderr -----
Resolved 3 packages in [TIME]
"###
);
Ok(())
}
/// Resolve a specific version of `anyio` from a `pyproject.toml` file with `--annotation-style=line`.
#[test]
fn compile_pyproject_toml_with_line_annotation() -> Result<()> {