Fall back to PEP 517 hooks for non-compliant PEP 621 metadata (#2662)

If you pass a `pyproject.toml` that use Hatch's context formatting API,
we currently fail because the dependencies aren't valid under PEP 508.
This PR makes the static metadata parsing a little more relaxed, so that
we appropriately fall back to PEP 517 there.
This commit is contained in:
Charlie Marsh 2024-03-25 22:28:39 -04:00 committed by GitHub
parent 12846c2c85
commit 39769d82a0
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
9 changed files with 347 additions and 160 deletions

1
Cargo.lock generated
View file

@ -4748,6 +4748,7 @@ dependencies = [
"requirements-txt", "requirements-txt",
"rustc-hash", "rustc-hash",
"serde", "serde",
"thiserror",
"toml", "toml",
"tracing", "tracing",
"url", "url",

View file

@ -27,7 +27,7 @@ use tracing::{debug, info_span, instrument, Instrument};
use distribution_types::Resolution; use distribution_types::Resolution;
use pep440_rs::{Version, VersionSpecifiers}; use pep440_rs::{Version, VersionSpecifiers};
use pep508_rs::Requirement; use pep508_rs::{PackageName, Requirement};
use uv_fs::{PythonExt, Simplified}; use uv_fs::{PythonExt, Simplified};
use uv_interpreter::{Interpreter, PythonEnvironment}; use uv_interpreter::{Interpreter, PythonEnvironment};
use uv_traits::{ use uv_traits::{
@ -72,7 +72,7 @@ pub enum Error {
IO(#[from] io::Error), IO(#[from] io::Error),
#[error("Invalid source distribution: {0}")] #[error("Invalid source distribution: {0}")]
InvalidSourceDist(String), InvalidSourceDist(String),
#[error("Invalid pyproject.toml")] #[error("Invalid `pyproject.toml`")]
InvalidPyprojectToml(#[from] toml::de::Error), InvalidPyprojectToml(#[from] toml::de::Error),
#[error("Editable installs with setup.py legacy builds are unsupported, please specify a build backend in pyproject.toml")] #[error("Editable installs with setup.py legacy builds are unsupported, please specify a build backend in pyproject.toml")]
EditableSetupPy, EditableSetupPy,
@ -208,7 +208,7 @@ pub struct PyProjectToml {
#[serde(rename_all = "kebab-case")] #[serde(rename_all = "kebab-case")]
pub struct Project { pub struct Project {
/// The name of the project /// The name of the project
pub name: String, pub name: PackageName,
/// The version of the project as supported by PEP 440 /// The version of the project as supported by PEP 440
pub version: Option<Version>, pub version: Option<Version>,
/// The Python version requirements of the project /// The Python version requirements of the project

View file

@ -34,6 +34,7 @@ indexmap = { workspace = true }
pyproject-toml = { workspace = true } pyproject-toml = { workspace = true }
rustc-hash = { workspace = true } rustc-hash = { workspace = true }
serde = { workspace = true } serde = { workspace = true }
thiserror = { workspace = true }
toml = { workspace = true } toml = { workspace = true }
tracing = { workspace = true } tracing = { workspace = true }
url = { workspace = true } url = { workspace = true }

View file

@ -4,6 +4,7 @@ pub use crate::sources::*;
pub use crate::specification::*; pub use crate::specification::*;
mod confirm; mod confirm;
mod pyproject;
mod resolver; mod resolver;
mod source_tree; mod source_tree;
mod sources; mod sources;

View file

@ -0,0 +1,185 @@
use indexmap::IndexMap;
use rustc_hash::FxHashSet;
use serde::{Deserialize, Serialize};
use std::str::FromStr;
use pep508_rs::Requirement;
use uv_normalize::{ExtraName, PackageName};
use crate::ExtrasSpecification;
/// A pyproject.toml as specified in PEP 517
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq)]
#[serde(rename_all = "kebab-case")]
pub(crate) struct PyProjectToml {
/// Project metadata
pub(crate) project: Option<Project>,
}
/// PEP 621 project metadata
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq)]
#[serde(rename_all = "kebab-case")]
pub(crate) struct Project {
/// The name of the project
pub(crate) name: PackageName,
/// Project dependencies
pub(crate) dependencies: Option<Vec<String>>,
/// Optional dependencies
pub(crate) optional_dependencies: Option<IndexMap<ExtraName, Vec<String>>>,
/// Specifies which fields listed by PEP 621 were intentionally unspecified
/// so another tool can/will provide such metadata dynamically.
pub(crate) dynamic: Option<Vec<String>>,
}
/// The PEP 621 project metadata, with static requirements extracted in advance.
#[derive(Debug)]
pub(crate) struct Pep621Metadata {
/// The name of the project.
pub(crate) name: PackageName,
/// The requirements extracted from the project.
pub(crate) requirements: Vec<Requirement>,
/// The extras used to collect requirements.
pub(crate) used_extras: FxHashSet<ExtraName>,
}
#[derive(thiserror::Error, Debug)]
pub(crate) enum Pep621Error {
#[error(transparent)]
Pep508(#[from] pep508_rs::Pep508Error),
}
impl Pep621Metadata {
/// Extract the static [`Pep621Metadata`] from a [`Project`] and [`ExtrasSpecification`], if
/// possible.
///
/// If the project specifies dynamic dependencies, or if the project specifies dynamic optional
/// dependencies and the extras are requested, the requirements cannot be extracted.
///
/// Returns an error if the requirements are not valid PEP 508 requirements.
pub(crate) fn try_from(
project: Project,
extras: &ExtrasSpecification,
) -> Result<Option<Self>, Pep621Error> {
if let Some(dynamic) = project.dynamic.as_ref() {
// If the project specifies dynamic dependencies, we can't extract the requirements.
if dynamic.iter().any(|field| field == "dependencies") {
return Ok(None);
}
// If we requested extras, and the project specifies dynamic optional dependencies, we can't
// extract the requirements.
if !extras.is_empty() && dynamic.iter().any(|field| field == "optional-dependencies") {
return Ok(None);
}
}
let name = project.name;
// Parse out the project requirements.
let mut requirements = project
.dependencies
.unwrap_or_default()
.iter()
.map(String::as_str)
.map(Requirement::from_str)
.collect::<Result<Vec<_>, _>>()?;
// Include any optional dependencies specified in `extras`.
let mut used_extras = FxHashSet::default();
if !extras.is_empty() {
if let Some(optional_dependencies) = project.optional_dependencies {
// Parse out the optional dependencies.
let optional_dependencies = optional_dependencies
.into_iter()
.map(|(extra, requirements)| {
let requirements = requirements
.iter()
.map(String::as_str)
.map(Requirement::from_str)
.collect::<Result<Vec<_>, _>>()?;
Ok::<(ExtraName, Vec<Requirement>), Pep621Error>((extra, requirements))
})
.collect::<Result<IndexMap<_, _>, _>>()?;
// Include the optional dependencies if the extras are requested.
for (extra, optional_requirements) in &optional_dependencies {
if extras.contains(extra) {
used_extras.insert(extra.clone());
requirements.extend(flatten_extra(
&name,
optional_requirements,
&optional_dependencies,
));
}
}
}
}
Ok(Some(Self {
name,
requirements,
used_extras,
}))
}
}
/// Given an extra in a project that may contain references to the project
/// itself, flatten it into a list of requirements.
///
/// For example:
/// ```toml
/// [project]
/// name = "my-project"
/// version = "0.0.1"
/// dependencies = [
/// "tomli",
/// ]
///
/// [project.optional-dependencies]
/// test = [
/// "pep517",
/// ]
/// dev = [
/// "my-project[test]",
/// ]
/// ```
fn flatten_extra(
project_name: &PackageName,
requirements: &[Requirement],
extras: &IndexMap<ExtraName, Vec<Requirement>>,
) -> Vec<Requirement> {
fn inner(
project_name: &PackageName,
requirements: &[Requirement],
extras: &IndexMap<ExtraName, Vec<Requirement>>,
seen: &mut FxHashSet<ExtraName>,
) -> Vec<Requirement> {
let mut flattened = Vec::with_capacity(requirements.len());
for requirement in requirements {
if requirement.name == *project_name {
for extra in &requirement.extras {
// Avoid infinite recursion on mutually recursive extras.
if !seen.insert(extra.clone()) {
continue;
}
// Flatten the extra requirements.
for (other_extra, extra_requirements) in extras {
if other_extra == extra {
flattened.extend(inner(project_name, extra_requirements, extras, seen));
}
}
}
} else {
flattened.push(requirement.clone());
}
}
flattened
}
inner(
project_name,
requirements,
extras,
&mut FxHashSet::default(),
)
}

View file

@ -92,10 +92,7 @@ impl<'a> SourceTreeResolver<'a> {
SourceDistCachedBuilder::new(context, client) SourceDistCachedBuilder::new(context, client)
}; };
let metadata = builder let metadata = builder.download_and_build_metadata(&source).await?;
.download_and_build_metadata(&source)
.await
.context("Failed to build source distribution")?;
// Determine the appropriate requirements to return based on the extras. This involves // Determine the appropriate requirements to return based on the extras. This involves
// evaluating the `extras` expression in any markers, but preserving the remaining marker // evaluating the `extras` expression in any markers, but preserving the remaining marker

View file

@ -1,9 +1,6 @@
use std::path::PathBuf; use std::path::PathBuf;
use std::str::FromStr;
use anyhow::{Context, Result}; use anyhow::{Context, Result};
use indexmap::IndexMap;
use pyproject_toml::Project;
use rustc_hash::FxHashSet; use rustc_hash::FxHashSet;
use tracing::{instrument, Level}; use tracing::{instrument, Level};
@ -15,6 +12,7 @@ use uv_client::BaseClientBuilder;
use uv_fs::Simplified; use uv_fs::Simplified;
use uv_normalize::{ExtraName, PackageName}; use uv_normalize::{ExtraName, PackageName};
use crate::pyproject::{Pep621Metadata, PyProjectToml};
use crate::{ExtrasSpecification, RequirementsSource}; use crate::{ExtrasSpecification, RequirementsSource};
#[derive(Debug, Default)] #[derive(Debug, Default)]
@ -120,22 +118,22 @@ impl RequirementsSpecification {
} }
RequirementsSource::PyprojectToml(path) => { RequirementsSource::PyprojectToml(path) => {
let contents = uv_fs::read_to_string(path).await?; let contents = uv_fs::read_to_string(path).await?;
let pyproject_toml = toml::from_str::<pyproject_toml::PyProjectToml>(&contents) let pyproject = toml::from_str::<PyProjectToml>(&contents)
.with_context(|| format!("Failed to parse `{}`", path.user_display()))?; .with_context(|| format!("Failed to parse `{}`", path.user_display()))?;
// Attempt to read metadata from the `pyproject.toml` directly. // Attempt to read metadata from the `pyproject.toml` directly.
if let Some(project) = pyproject_toml //
// If we fail to extract the PEP 621 metadata, fall back to treating it as a source
// tree, as there are some cases where the `pyproject.toml` may not be a valid PEP
// 621 file, but might still resolve under PEP 517. (If the source tree doesn't
// resolve under PEP 517, we'll catch that later.)
//
// For example, Hatch's "Context formatting" API is not compliant with PEP 621, as
// it expects dynamic processing by the build backend for the static metadata
// fields. See: https://hatch.pypa.io/latest/config/context/
if let Some(project) = pyproject
.project .project
.map(|project| { .and_then(|project| Pep621Metadata::try_from(project, extras).ok().flatten())
StaticProject::try_from(project, extras).with_context(|| {
format!(
"Failed to extract requirements from `{}`",
path.user_display()
)
})
})
.transpose()?
.flatten()
{ {
Self { Self {
project: Some(project.name), project: Some(project.name),
@ -328,131 +326,3 @@ impl RequirementsSpecification {
.await .await
} }
} }
#[derive(Debug)]
pub struct StaticProject {
/// The name of the project.
pub name: PackageName,
/// The requirements extracted from the project.
pub requirements: Vec<Requirement>,
/// The extras used to collect requirements.
pub used_extras: FxHashSet<ExtraName>,
}
impl StaticProject {
pub fn try_from(project: Project, extras: &ExtrasSpecification) -> Result<Option<Self>> {
// Parse the project name.
let name =
PackageName::new(project.name).with_context(|| "Invalid `project.name`".to_string())?;
if let Some(dynamic) = project.dynamic.as_ref() {
// If the project specifies dynamic dependencies, we can't extract the requirements.
if dynamic.iter().any(|field| field == "dependencies") {
return Ok(None);
}
// If we requested extras, and the project specifies dynamic optional dependencies, we can't
// extract the requirements.
if !extras.is_empty() && dynamic.iter().any(|field| field == "optional-dependencies") {
return Ok(None);
}
}
let mut requirements = Vec::new();
let mut used_extras = FxHashSet::default();
// Include the default dependencies.
requirements.extend(project.dependencies.unwrap_or_default());
// Include any optional dependencies specified in `extras`.
if !extras.is_empty() {
if let Some(optional_dependencies) = project.optional_dependencies {
for (extra_name, optional_requirements) in &optional_dependencies {
let normalized_name = ExtraName::from_str(extra_name)
.with_context(|| format!("Invalid extra name `{extra_name}`"))?;
if extras.contains(&normalized_name) {
used_extras.insert(normalized_name);
requirements.extend(flatten_extra(
&name,
optional_requirements,
&optional_dependencies,
)?);
}
}
}
}
Ok(Some(Self {
name,
requirements,
used_extras,
}))
}
}
/// Given an extra in a project that may contain references to the project
/// itself, flatten it into a list of requirements.
///
/// For example:
/// ```toml
/// [project]
/// name = "my-project"
/// version = "0.0.1"
/// dependencies = [
/// "tomli",
/// ]
///
/// [project.optional-dependencies]
/// test = [
/// "pep517",
/// ]
/// dev = [
/// "my-project[test]",
/// ]
/// ```
fn flatten_extra(
project_name: &PackageName,
requirements: &[Requirement],
extras: &IndexMap<String, Vec<Requirement>>,
) -> Result<Vec<Requirement>> {
fn inner(
project_name: &PackageName,
requirements: &[Requirement],
extras: &IndexMap<String, Vec<Requirement>>,
seen: &mut FxHashSet<ExtraName>,
) -> Result<Vec<Requirement>> {
let mut flattened = Vec::with_capacity(requirements.len());
for requirement in requirements {
if requirement.name == *project_name {
for extra in &requirement.extras {
// Avoid infinite recursion on mutually recursive extras.
if !seen.insert(extra.clone()) {
continue;
}
// Flatten the extra requirements.
for (name, extra_requirements) in extras {
let normalized_name = ExtraName::from_str(name)?;
if normalized_name == *extra {
flattened.extend(inner(
project_name,
extra_requirements,
extras,
seen,
)?);
}
}
}
} else {
flattened.push(requirement.clone());
}
}
Ok(flattened)
}
inner(
project_name,
requirements,
extras,
&mut FxHashSet::default(),
)
}

View file

@ -612,6 +612,49 @@ setup(
Ok(()) Ok(())
} }
/// Resolve a `pyproject.toml` file with an invalid project name.
#[test]
fn compile_pyproject_toml_invalid_name() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"[build-system]
requires = ["setuptools", "wheel"]
[project]
name = "!project"
dependencies = [
"anyio==3.7.0",
]
"#,
)?;
// In addition to the standard filters, remove the temporary directory from the snapshot.
let filters: Vec<_> = [(r"file://.*/", "file://[TEMP_DIR]/")]
.into_iter()
.chain(INSTA_FILTERS.to_vec())
.collect();
uv_snapshot!(filters, context.compile()
.arg("pyproject.toml"), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: Failed to parse `pyproject.toml`
Caused by: TOML parse error at line 5, column 8
|
5 | name = "!project"
| ^^^^^^^^^^
Not a valid package or extra name: "!project". Names must start and end with a letter or digit and may only contain -, _, ., and alphanumeric characters.
"###
);
Ok(())
}
/// Request multiple extras that do not exist as a dependency group in a `pyproject.toml` file. /// Request multiple extras that do not exist as a dependency group in a `pyproject.toml` file.
#[test] #[test]
fn compile_pyproject_toml_extras_missing() -> Result<()> { fn compile_pyproject_toml_extras_missing() -> Result<()> {
@ -5715,7 +5758,7 @@ requires-python = "<=3.8"
/// Build an editable package with Hatchling's {root:uri} feature. /// Build an editable package with Hatchling's {root:uri} feature.
#[test] #[test]
fn compile_root_uri() -> Result<()> { fn compile_root_uri_editable() -> Result<()> {
let context = TestContext::new("3.12"); let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in"); let requirements_in = context.temp_dir.child("requirements.in");
@ -5749,6 +5792,43 @@ fn compile_root_uri() -> Result<()> {
Ok(()) Ok(())
} }
/// Build a non-editable package with Hatchling's {root:uri} feature.
#[test]
fn compile_root_uri_non_editable() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("${ROOT_PATH}\n${BLACK_PATH}")?;
// In addition to the standard filters, remove the temporary directory from the snapshot.
let filters: Vec<_> = [(r"file://.*/", "file://[TEMP_DIR]/")]
.into_iter()
.chain(INSTA_FILTERS.to_vec())
.collect();
let root_path = current_dir()?.join("../../scripts/packages/root_editable");
let black_path = current_dir()?.join("../../scripts/packages/black_editable");
uv_snapshot!(filters, context.compile()
.arg("requirements.in")
.env("ROOT_PATH", root_path.as_os_str())
.env("BLACK_PATH", black_path.as_os_str()), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in
black @ ${BLACK_PATH}
# via root-editable
root-editable @ ${ROOT_PATH}
----- stderr -----
Resolved 2 packages in [TIME]
"###
);
Ok(())
}
/// Request a local wheel with a mismatched package name. /// Request a local wheel with a mismatched package name.
#[test] #[test]
fn requirement_wheel_name_mismatch() -> Result<()> { fn requirement_wheel_name_mismatch() -> Result<()> {

View file

@ -215,7 +215,16 @@ dependencies = ["flask==1.0.x"]
"#, "#,
)?; )?;
uv_snapshot!(command(&context) let filters = [
(r"file://.*", "[SOURCE_DIR]"),
(r#"File ".*[/\\]site-packages"#, "File \"[SOURCE_DIR]"),
("exit status", "exit code"),
]
.into_iter()
.chain(INSTA_FILTERS.to_vec())
.collect::<Vec<_>>();
uv_snapshot!(filters, command(&context)
.arg("-r") .arg("-r")
.arg("pyproject.toml"), @r###" .arg("pyproject.toml"), @r###"
success: false success: false
@ -223,15 +232,58 @@ dependencies = ["flask==1.0.x"]
----- stdout ----- ----- stdout -----
----- stderr ----- ----- stderr -----
error: Failed to parse `pyproject.toml` error: Failed to build: [SOURCE_DIR]
Caused by: TOML parse error at line 3, column 16 Caused by: Build backend failed to determine extra requires with `build_wheel()` with exit code: 1
| --- stdout:
3 | dependencies = ["flask==1.0.x"] configuration error: `project.dependencies[0]` must be pep508
| ^^^^^^^^^^^^^^^^ DESCRIPTION:
after parsing 1.0, found ".x" after it, which is not part of a valid version Project dependency specification according to PEP 508
flask==1.0.x
^^^^^^^
GIVEN VALUE:
"flask==1.0.x"
OFFENDING RULE: 'format'
DEFINITION:
{
"$id": "#/definitions/dependency",
"title": "Dependency",
"type": "string",
"format": "pep508"
}
--- stderr:
Traceback (most recent call last):
File "<string>", line 14, in <module>
File "[SOURCE_DIR]/setuptools/build_meta.py", line 325, in get_requires_for_build_wheel
return self._get_build_requires(config_settings, requirements=['wheel'])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "[SOURCE_DIR]/setuptools/build_meta.py", line 295, in _get_build_requires
self.run_setup()
File "[SOURCE_DIR]/setuptools/build_meta.py", line 487, in run_setup
super().run_setup(setup_script=setup_script)
File "[SOURCE_DIR]/setuptools/build_meta.py", line 311, in run_setup
exec(code, locals())
File "<string>", line 1, in <module>
File "[SOURCE_DIR]/setuptools/__init__.py", line 104, in setup
return distutils.core.setup(**attrs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "[SOURCE_DIR]/setuptools/_distutils/core.py", line 159, in setup
dist.parse_config_files()
File "[SOURCE_DIR]/_virtualenv.py", line 22, in parse_config_files
result = old_parse_config_files(self, *args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "[SOURCE_DIR]/setuptools/dist.py", line 631, in parse_config_files
pyprojecttoml.apply_configuration(self, filename, ignore_option_errors)
File "[SOURCE_DIR]/setuptools/config/pyprojecttoml.py", line 68, in apply_configuration
config = read_configuration(filepath, True, ignore_option_errors, dist)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "[SOURCE_DIR]/setuptools/config/pyprojecttoml.py", line 129, in read_configuration
validate(subset, filepath)
File "[SOURCE_DIR]/setuptools/config/pyprojecttoml.py", line 57, in validate
raise ValueError(f"{error}/n{summary}") from None
ValueError: invalid pyproject.toml config: `project.dependencies[0]`.
configuration error: `project.dependencies[0]` must be pep508
---
"### "###
); );