Metadata transformation for the build backend (#7781)

This commit is contained in:
konsti 2024-10-07 10:38:40 +02:00 committed by GitHub
parent 37b73230d3
commit 92538ada7c
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
12 changed files with 1630 additions and 72 deletions

39
Cargo.lock generated
View file

@ -3372,6 +3372,15 @@ dependencies = [
"windows-sys 0.52.0",
]
[[package]]
name = "spdx"
version = "0.10.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "47317bbaf63785b53861e1ae2d11b80d6b624211d42cb20efcd210ee6f8a14bc"
dependencies = [
"smallvec",
]
[[package]]
name = "spin"
version = "0.9.8"
@ -4168,6 +4177,7 @@ dependencies = [
"unicode-width",
"url",
"uv-auth",
"uv-build-backend",
"uv-cache",
"uv-cache-info",
"uv-cache-key",
@ -4257,6 +4267,35 @@ dependencies = [
"uv-types",
]
[[package]]
name = "uv-build-backend"
version = "0.1.0"
dependencies = [
"async_zip",
"fs-err",
"glob",
"indoc",
"insta",
"itertools 0.13.0",
"serde",
"spdx",
"tempfile",
"thiserror",
"tokio",
"tokio-util",
"toml",
"tracing",
"uv-distribution-filename",
"uv-fs",
"uv-normalize",
"uv-pep440",
"uv-pep508",
"uv-pubgrub",
"uv-pypi-types",
"uv-version",
"uv-warnings",
]
[[package]]
name = "uv-build-frontend"
version = "0.0.1"

View file

@ -22,6 +22,7 @@ license = "MIT OR Apache-2.0"
[workspace.dependencies]
uv-auth = { path = "crates/uv-auth" }
uv-build-backend = { path = "crates/uv-build-backend" }
uv-build-frontend = { path = "crates/uv-build-frontend" }
uv-cache = { path = "crates/uv-cache" }
uv-cache-info = { path = "crates/uv-cache-info" }
@ -143,6 +144,7 @@ serde-untagged = { version = "0.1.6" }
serde_json = { version = "1.0.128" }
sha2 = { version = "0.10.8" }
smallvec = { version = "1.13.2" }
spdx = { version = "0.10.6" }
syn = { version = "2.0.77" }
sys-info = { version = "0.9.1" }
target-lexicon = { version = "0.12.16" }

View file

@ -0,0 +1,41 @@
[package]
name = "uv-build-backend"
version = "0.1.0"
edition.workspace = true
rust-version.workspace = true
homepage.workspace = true
documentation.workspace = true
repository.workspace = true
authors.workspace = true
license.workspace = true
[dependencies]
uv-distribution-filename = { workspace = true }
uv-fs = { workspace = true }
uv-normalize = { workspace = true }
uv-pep440 = { workspace = true }
uv-pep508 = { workspace = true }
uv-pubgrub = { workspace = true }
uv-pypi-types = { workspace = true }
uv-version = { workspace = true }
uv-warnings = { workspace = true }
async_zip = { workspace = true }
fs-err = { workspace = true, features = ["tokio"] }
glob = { workspace = true }
itertools = { workspace = true }
serde = { workspace = true }
spdx = { workspace = true }
thiserror = { workspace = true }
tokio = { workspace = true }
tokio-util = { workspace = true }
toml = { workspace = true }
tracing = { workspace = true }
[lints]
workspace = true
[dev-dependencies]
indoc = { version = "2.0.5" }
insta = { version = "1.40.0" }
tempfile = { version = "3.12.0" }

View file

@ -0,0 +1,166 @@
mod metadata;
mod pep639_glob;
use crate::metadata::{PyProjectToml, ValidationError};
use crate::pep639_glob::Pep639GlobError;
use async_zip::base::write::ZipFileWriter;
use async_zip::error::ZipError;
use async_zip::{Compression, ZipEntryBuilder, ZipString};
use glob::{GlobError, PatternError};
use std::io;
use std::path::{Path, PathBuf};
use thiserror::Error;
use uv_distribution_filename::WheelFilename;
#[derive(Debug, Error)]
pub enum Error {
#[error(transparent)]
Io(#[from] io::Error),
#[error("Invalid pyproject.toml")]
Toml(#[from] toml::de::Error),
#[error("Invalid pyproject.toml")]
Validation(#[from] ValidationError),
#[error("Invalid `project.license-files` glob expression: `{0}`")]
Pep639Glob(String, #[source] Pep639GlobError),
#[error("The `project.license-files` entry is not a valid glob pattern: `{0}`")]
Pattern(String, #[source] PatternError),
/// [`GlobError`] is a wrapped io error.
#[error(transparent)]
Glob(#[from] GlobError),
#[error("Failed to write wheel zip archive")]
Zip(#[from] ZipError),
}
/// Allow dispatching between writing to a directory, writing to zip and writing to a `.tar.gz`.
trait AsyncDirectoryWrite: Sized {
async fn write_bytes(
&mut self,
directory: &Path,
filename: &str,
bytes: &[u8],
) -> Result<(), Error>;
#[allow(clippy::unused_async)] // https://github.com/rust-lang/rust-clippy/issues/11660
async fn close(self) -> Result<(), Error> {
Ok(())
}
}
/// Zip archive (wheel) writer.
struct AsyncZipWriter(ZipFileWriter<tokio_util::compat::Compat<fs_err::tokio::File>>);
impl AsyncDirectoryWrite for AsyncZipWriter {
async fn write_bytes(
&mut self,
directory: &Path,
filename: &str,
bytes: &[u8],
) -> Result<(), Error> {
self.0
.write_entry_whole(
ZipEntryBuilder::new(
ZipString::from(format!("{}/{}", directory.display(), filename)),
// TODO(konsti): Editables use stored.
Compression::Deflate,
)
// https://github.com/Majored/rs-async-zip/issues/150
.unix_permissions(0o644),
bytes,
)
.await?;
Ok(())
}
async fn close(self) -> Result<(), Error> {
self.0.close().await?;
Ok(())
}
}
struct AsyncFsWriter {
root: PathBuf,
}
/// File system writer.
impl AsyncDirectoryWrite for AsyncFsWriter {
async fn write_bytes(
&mut self,
directory: &Path,
filename: &str,
bytes: &[u8],
) -> Result<(), Error> {
fs_err::tokio::create_dir_all(self.root.join(directory)).await?;
fs_err::tokio::write(self.root.join(directory).join(filename), bytes).await?;
Ok(())
}
}
/// Build a wheel from the source tree and place it in the output directory.
pub async fn build(source_tree: &Path, wheel_dir: &Path) -> Result<WheelFilename, Error> {
let contents = fs_err::tokio::read_to_string(source_tree.join("pyproject.toml")).await?;
let pyproject_toml = PyProjectToml::parse(&contents)?;
pyproject_toml.check_build_system();
let filename = WheelFilename {
name: pyproject_toml.name().clone(),
version: pyproject_toml.version().clone(),
build_tag: None,
python_tag: vec!["py3".to_string()],
abi_tag: vec!["none".to_string()],
platform_tag: vec!["any".to_string()],
};
// TODO(konsti): async-zip doesn't like a buffered writer
let wheel_file = fs_err::tokio::File::create(wheel_dir.join(filename.to_string())).await?;
let mut wheel_writer = AsyncZipWriter(ZipFileWriter::with_tokio(wheel_file));
write_dist_info(&mut wheel_writer, &pyproject_toml, source_tree).await?;
wheel_writer.close().await?;
Ok(filename)
}
/// Write the dist-info directory to the output directory without building the wheel.
pub async fn metadata(source_tree: &Path, metadata_directory: &Path) -> Result<String, Error> {
let contents = fs_err::tokio::read_to_string(source_tree.join("pyproject.toml")).await?;
let pyproject_toml = PyProjectToml::parse(&contents)?;
pyproject_toml.check_build_system();
let mut wheel_writer = AsyncFsWriter {
root: metadata_directory.to_path_buf(),
};
write_dist_info(&mut wheel_writer, &pyproject_toml, source_tree).await?;
wheel_writer.close().await?;
Ok(format!(
"{}-{}.dist-info",
pyproject_toml.name().as_dist_info_name(),
pyproject_toml.version()
))
}
/// Add `METADATA` and `entry_points.txt` to the dist-info directory.
async fn write_dist_info(
writer: &mut impl AsyncDirectoryWrite,
pyproject_toml: &PyProjectToml,
root: &Path,
) -> Result<(), Error> {
let dist_info_dir = PathBuf::from(format!(
"{}-{}.dist-info",
pyproject_toml.name().as_dist_info_name(),
pyproject_toml.version()
));
let metadata = pyproject_toml
.to_metadata(root)
.await?
.core_metadata_format();
writer
.write_bytes(&dist_info_dir, "METADATA", metadata.as_bytes())
.await?;
let entrypoint = pyproject_toml.to_entry_points()?;
writer
.write_bytes(&dist_info_dir, "entry_points.txt", entrypoint.as_bytes())
.await?;
Ok(())
}

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,136 @@
//! Implementation of PEP 639 cross-language restricted globs.
use glob::{Pattern, PatternError};
use thiserror::Error;
#[derive(Debug, Error)]
pub enum Pep639GlobError {
#[error(transparent)]
PatternError(#[from] PatternError),
#[error("The parent directory operator (`..`) at position {pos} is not allowed in license file globs")]
ParentDirectory { pos: usize },
#[error("Glob contains invalid character at position {pos}: `{invalid}`")]
InvalidCharacter { pos: usize, invalid: char },
#[error("Glob contains invalid character in range at position {pos}: `{invalid}`")]
InvalidCharacterRange { pos: usize, invalid: char },
}
/// Parse a PEP 639 `license-files` glob.
///
/// The syntax is more restricted than regular globbing in Python or Rust for platform independent
/// results. Since [`glob::Pattern`] is a superset over this format, we can use it after validating
/// that no unsupported features are in the string.
///
/// From [PEP 639](https://peps.python.org/pep-0639/#add-license-files-key):
///
/// > Its value is an array of strings which MUST contain valid glob patterns,
/// > as specified below:
/// >
/// > - Alphanumeric characters, underscores (`_`), hyphens (`-`) and dots (`.`)
/// > MUST be matched verbatim.
/// >
/// > - Special glob characters: `*`, `?`, `**` and character ranges: `[]`
/// > containing only the verbatim matched characters MUST be supported.
/// > Within `[...]`, the hyphen indicates a range (e.g. `a-z`).
/// > Hyphens at the start or end are matched literally.
/// >
/// > - Path delimiters MUST be the forward slash character (`/`).
/// > Patterns are relative to the directory containing `pyproject.toml`,
/// > therefore the leading slash character MUST NOT be used.
/// >
/// > - Parent directory indicators (`..`) MUST NOT be used.
/// >
/// > Any characters or character sequences not covered by this specification are
/// > invalid. Projects MUST NOT use such values.
/// > Tools consuming this field MAY reject invalid values with an error.
pub(crate) fn parse_pep639_glob(glob: &str) -> Result<Pattern, Pep639GlobError> {
let mut chars = glob.chars().enumerate().peekable();
// A `..` is on a parent directory indicator at the start of the string or after a directory
// separator.
let mut start_or_slash = true;
while let Some((pos, c)) = chars.next() {
if c.is_alphanumeric() || matches!(c, '_' | '-' | '*' | '?') {
start_or_slash = false;
} else if c == '.' {
if start_or_slash && matches!(chars.peek(), Some((_, '.'))) {
return Err(Pep639GlobError::ParentDirectory { pos });
}
start_or_slash = false;
} else if c == '/' {
start_or_slash = true;
} else if c == '[' {
for (pos, c) in chars.by_ref() {
// TODO: https://discuss.python.org/t/pep-639-round-3-improving-license-clarity-with-better-package-metadata/53020/98
if c.is_alphanumeric() || matches!(c, '_' | '-' | '.') {
// Allowed.
} else if c == ']' {
break;
} else {
return Err(Pep639GlobError::InvalidCharacterRange { pos, invalid: c });
}
}
start_or_slash = false;
} else {
return Err(Pep639GlobError::InvalidCharacter { pos, invalid: c });
}
}
Ok(Pattern::new(glob)?)
}
#[cfg(test)]
mod tests {
use super::*;
use insta::assert_snapshot;
#[test]
fn test_error() {
let parse_err = |glob| parse_pep639_glob(glob).unwrap_err().to_string();
assert_snapshot!(
parse_err(".."),
@"The parent directory operator (`..`) at position 0 is not allowed in license file globs"
);
assert_snapshot!(
parse_err("licenses/.."),
@"The parent directory operator (`..`) at position 9 is not allowed in license file globs"
);
assert_snapshot!(
parse_err("licenses/LICEN!E.txt"),
@"Glob contains invalid character at position 14: `!`"
);
assert_snapshot!(
parse_err("licenses/LICEN[!C]E.txt"),
@"Glob contains invalid character in range at position 15: `!`"
);
assert_snapshot!(
parse_err("licenses/LICEN[C?]E.txt"),
@"Glob contains invalid character in range at position 16: `?`"
);
assert_snapshot!(parse_err("******"), @"Pattern syntax error near position 2: wildcards are either regular `*` or recursive `**`");
assert_snapshot!(
parse_err(r"licenses\eula.txt"),
@r"Glob contains invalid character at position 8: `\`"
);
}
#[test]
fn test_valid() {
let cases = [
"licenses/*.txt",
"licenses/**/*.txt",
"LICEN[CS]E.txt",
"LICEN?E.txt",
"[a-z].txt",
"[a-z._-].txt",
"*/**",
"LICENSE..txt",
"LICENSE_file-1.txt",
// (google translate)
"licenses/라이센스*.txt",
"licenses/ライセンス*.txt",
"licenses/执照*.txt",
];
for case in cases {
parse_pep639_glob(case).unwrap();
}
}
}

View file

@ -2,6 +2,7 @@
use crate::metadata::Headers;
use crate::MetadataError;
use std::fmt::Display;
use std::str;
use std::str::FromStr;
@ -9,11 +10,12 @@ use std::str::FromStr;
/// <https://packaging.python.org/specifications/core-metadata/>.
#[derive(Debug, Clone, Default, PartialEq, Eq)]
pub struct Metadata23 {
/// Version of the file format; legal values are `1.0`, `1.1`, `1.2`, `2.1`, `2.2` and `2.3`.
/// Version of the file format; legal values are `1.0`, `1.1`, `1.2`, `2.1`, `2.2`, `2.3` and
/// `2.4`.
pub metadata_version: String,
/// The name of the distribution.
pub name: String,
/// A string containing the distributions version number.
/// A string containing the distribution's version number.
pub version: String,
/// A Platform specification describing an operating system supported by the distribution
/// which is not listed in the “Operating System” Trove classifiers.
@ -25,55 +27,77 @@ pub struct Metadata23 {
pub summary: Option<String>,
/// A longer description of the distribution that can run to several paragraphs.
pub description: Option<String>,
/// A string stating the markup syntax (if any) used in the distribution's description,
/// so that tools can intelligently render the description.
///
/// Known values: `text/plain`, `text/markdown` and `text/x-rst`.
pub description_content_type: Option<String>,
/// A list of additional keywords, separated by commas, to be used to
/// assist searching for the distribution in a larger catalog.
pub keywords: Option<String>,
/// A string containing the URL for the distributions home page.
/// A string containing the URL for the distribution's home page.
///
/// Deprecated by PEP 753.
pub home_page: Option<String>,
/// A string containing the URL from which this version of the distribution can be downloaded.
///
/// Deprecated by PEP 753.
pub download_url: Option<String>,
/// A string containing the authors name at a minimum; additional contact information may be provided.
/// A string containing the author's name at a minimum; additional contact information may be
/// provided.
pub author: Option<String>,
/// A string containing the authors e-mail address. It can contain a name and e-mail address in the legal forms for a RFC-822 `From:` header.
/// A string containing the author's e-mail address. It can contain a name and e-mail address in
/// the legal forms for an RFC-822 `From:` header.
pub author_email: Option<String>,
/// Text indicating the license covering the distribution where the license is not a selection from the `License` Trove classifiers or an SPDX license expression.
/// A string containing the maintainer's name at a minimum; additional contact information may
/// be provided.
///
/// Note that this field is intended for use when a project is being maintained by someone other
/// than the original author:
/// it should be omitted if it is identical to `author`.
pub maintainer: Option<String>,
/// A string containing the maintainer's e-mail address.
/// It can contain a name and e-mail address in the legal forms for a RFC-822 `From:` header.
///
/// Note that this field is intended for use when a project is being maintained by someone other
/// than the original author: it should be omitted if it is identical to `author_email`.
pub maintainer_email: Option<String>,
/// Text indicating the license covering the distribution where the license is not a selection
/// from the `License` Trove classifiers or an SPDX license expression.
pub license: Option<String>,
/// An SPDX expression indicating the license covering the distribution.
///
/// Introduced by PEP 639, requires metadata version 2.4.
pub license_expression: Option<String>,
/// Paths to files containing the text of the licenses covering the distribution.
///
/// Introduced by PEP 639, requires metadata version 2.4.
pub license_files: Vec<String>,
/// Each entry is a string giving a single classification value for the distribution.
pub classifiers: Vec<String>,
/// Each entry contains a string naming some other distutils project required by this distribution.
/// Each entry contains a string naming some other distutils project required by this
/// distribution.
pub requires_dist: Vec<String>,
/// Each entry contains a string naming a Distutils project which is contained within this distribution.
/// Each entry contains a string naming a Distutils project which is contained within this
/// distribution.
pub provides_dist: Vec<String>,
/// Each entry contains a string describing a distutils projects distribution which this distribution renders obsolete,
/// Each entry contains a string describing a distutils project's distribution which this
/// distribution renders obsolete,
/// meaning that the two projects should not be installed at the same time.
pub obsoletes_dist: Vec<String>,
/// A string containing the maintainers name at a minimum; additional contact information may be provided.
///
/// Note that this field is intended for use when a project is being maintained by someone other than the original author:
/// it should be omitted if it is identical to `author`.
pub maintainer: Option<String>,
/// A string containing the maintainers e-mail address.
/// It can contain a name and e-mail address in the legal forms for a RFC-822 `From:` header.
///
/// Note that this field is intended for use when a project is being maintained by someone other than the original author:
/// it should be omitted if it is identical to `author_email`.
pub maintainer_email: Option<String>,
/// This field specifies the Python version(s) that the distribution is guaranteed to be compatible with.
/// This field specifies the Python version(s) that the distribution is guaranteed to be
/// compatible with.
pub requires_python: Option<String>,
/// Each entry contains a string describing some dependency in the system that the distribution is to be used.
/// Each entry contains a string describing some dependency in the system that the distribution
/// is to be used.
pub requires_external: Vec<String>,
/// A string containing a browsable URL for the project and a label for it, separated by a comma.
/// A string containing a browsable URL for the project and a label for it, separated by a
/// comma.
pub project_urls: Vec<String>,
/// A string containing the name of an optional feature. Must be a valid Python identifier.
/// May be used to make a dependency conditional on whether the optional feature has been requested.
/// May be used to make a dependency conditional on whether the optional feature has been
/// requested.
pub provides_extras: Vec<String>,
/// A string stating the markup syntax (if any) used in the distributions description,
/// so that tools can intelligently render the description.
pub description_content_type: Option<String>,
/// A string containing the name of another core metadata field.
pub dynamic: Vec<String>,
}
@ -130,11 +154,14 @@ impl Metadata23 {
supported_platforms,
summary,
description,
description_content_type,
keywords,
home_page,
download_url,
author,
author_email,
maintainer,
maintainer_email,
license,
license_expression,
license_files,
@ -142,16 +169,101 @@ impl Metadata23 {
requires_dist,
provides_dist,
obsoletes_dist,
maintainer,
maintainer_email,
requires_python,
requires_external,
project_urls,
provides_extras,
description_content_type,
dynamic,
})
}
/// Convert to the pseudo-email format used by Python's METADATA.
///
/// > The standard file format for metadata (including in wheels and installed projects) is
/// > based on the format of email headers. However, email formats have been revised several
/// > times, and exactly which email RFC applies to packaging metadata is not specified. In the
/// > absence of a precise definition, the practical standard is set by what the standard
/// > library `email.parser` module can parse using the `compat32` policy.
/// - <https://packaging.python.org/en/latest/specifications/core-metadata/#core-metadata-specifications>
///
/// # Example
///
/// ```text
/// Metadata-Version: 2.3
/// Name: hello-world
/// Version: 0.1.0
/// License: THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
/// INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A [...]
/// ```
pub fn core_metadata_format(&self) -> String {
fn write_str(writer: &mut String, key: &str, value: impl Display) {
let value = value.to_string();
let mut lines = value.lines();
if let Some(line) = lines.next() {
writer.push_str(&format!("{key}: {line}\n"));
} else {
// The value is an empty string
writer.push_str(&format!("{key}: \n"));
}
for line in lines {
// Python implementations vary
// https://github.com/pypa/pyproject-metadata/pull/150/files#diff-7d938dbc255a08c2cfab1b4f1f8d1f6519c9312dd0a39d7793fa778474f1fbd1L135-R141
writer.push_str(&format!("{}{}\n", " ".repeat(key.len() + 2), line));
}
}
fn write_opt_str(writer: &mut String, key: &str, value: &Option<impl Display>) {
if let Some(value) = value {
write_str(writer, key, value);
}
}
fn write_all(
writer: &mut String,
key: &str,
values: impl IntoIterator<Item = impl Display>,
) {
for value in values {
write_str(writer, key, value);
}
}
let mut writer = String::new();
write_str(&mut writer, "Metadata-Version", &self.metadata_version);
write_str(&mut writer, "Name", &self.name);
write_str(&mut writer, "Version", &self.version);
write_all(&mut writer, "Platform", &self.platforms);
write_all(&mut writer, "Supported-Platform", &self.supported_platforms);
write_all(&mut writer, "Summary", &self.summary);
write_opt_str(&mut writer, "Keywords", &self.keywords);
write_opt_str(&mut writer, "Home-Page", &self.home_page);
write_opt_str(&mut writer, "Download-URL", &self.download_url);
write_opt_str(&mut writer, "Author", &self.author);
write_opt_str(&mut writer, "Author-email", &self.author_email);
write_opt_str(&mut writer, "License", &self.license);
write_opt_str(&mut writer, "License-Expression", &self.license_expression);
write_all(&mut writer, "License-File", &self.license_files);
write_all(&mut writer, "Classifier", &self.classifiers);
write_all(&mut writer, "Requires-Dist", &self.requires_dist);
write_all(&mut writer, "Provides-Dist", &self.provides_dist);
write_all(&mut writer, "Obsoletes-Dist", &self.obsoletes_dist);
write_opt_str(&mut writer, "Maintainer", &self.maintainer);
write_opt_str(&mut writer, "Maintainer-email", &self.maintainer_email);
write_opt_str(&mut writer, "Requires-Python", &self.requires_python);
write_all(&mut writer, "Requires-External", &self.requires_external);
write_all(&mut writer, "Project-URL", &self.project_urls);
write_all(&mut writer, "Provides-Extra", &self.provides_extras);
write_opt_str(
&mut writer,
"Description-Content-Type",
&self.description_content_type,
);
write_all(&mut writer, "Dynamic", &self.dynamic);
if let Some(description) = &self.description {
writer.push('\n');
writer.push_str(description);
}
writer
}
}
impl FromStr for Metadata23 {

View file

@ -15,6 +15,7 @@ workspace = true
[dependencies]
uv-auth = { workspace = true }
uv-build-backend = { workspace = true }
uv-cache = { workspace = true }
uv-cache-info = { workspace = true }
uv-cache-key = { workspace = true }

View file

@ -1,41 +1,58 @@
#![allow(clippy::print_stdout)]
use crate::commands::ExitStatus;
use anyhow::Result;
use std::env;
use std::path::Path;
pub(crate) fn build_sdist(_sdist_directory: &Path) -> Result<ExitStatus> {
#[expect(clippy::unused_async)]
pub(crate) async fn build_sdist(_sdist_directory: &Path) -> Result<ExitStatus> {
todo!()
}
pub(crate) fn build_wheel(
pub(crate) async fn build_wheel(
wheel_directory: &Path,
_metadata_directory: Option<&Path>,
) -> Result<ExitStatus> {
let filename = uv_build_backend::build(&env::current_dir()?, wheel_directory).await?;
println!("{filename}");
Ok(ExitStatus::Success)
}
#[expect(clippy::unused_async)]
pub(crate) async fn build_editable(
_wheel_directory: &Path,
_metadata_directory: Option<&Path>,
) -> Result<ExitStatus> {
todo!()
}
pub(crate) fn build_editable(
#[expect(clippy::unused_async)]
pub(crate) async fn get_requires_for_build_sdist() -> Result<ExitStatus> {
todo!()
}
#[expect(clippy::unused_async)]
pub(crate) async fn get_requires_for_build_wheel() -> Result<ExitStatus> {
todo!()
}
pub(crate) async fn prepare_metadata_for_build_wheel(
metadata_directory: &Path,
) -> Result<ExitStatus> {
let filename = uv_build_backend::metadata(&env::current_dir()?, metadata_directory).await?;
println!("{filename}");
Ok(ExitStatus::Success)
}
#[expect(clippy::unused_async)]
pub(crate) async fn get_requires_for_build_editable() -> Result<ExitStatus> {
todo!()
}
#[expect(clippy::unused_async)]
pub(crate) async fn prepare_metadata_for_build_editable(
_wheel_directory: &Path,
_metadata_directory: Option<&Path>,
) -> Result<ExitStatus> {
todo!()
}
pub(crate) fn get_requires_for_build_sdist() -> Result<ExitStatus> {
todo!()
}
pub(crate) fn get_requires_for_build_wheel() -> Result<ExitStatus> {
todo!()
}
pub(crate) fn prepare_metadata_for_build_wheel(_wheel_directory: &Path) -> Result<ExitStatus> {
todo!()
}
pub(crate) fn get_requires_for_build_editable() -> Result<ExitStatus> {
todo!()
}
pub(crate) fn prepare_metadata_for_build_editable(_wheel_directory: &Path) -> Result<ExitStatus> {
todo!()
}

View file

@ -38,7 +38,7 @@ use crate::settings::{ResolverSettings, ResolverSettingsRef};
/// Build source distributions and wheels.
#[allow(clippy::fn_params_excessive_bools)]
pub(crate) async fn build(
pub(crate) async fn build_frontend(
project_dir: &Path,
src: Option<PathBuf>,
package: Option<PackageName>,

View file

@ -7,7 +7,7 @@ use std::path::Path;
use std::time::Duration;
use std::{fmt::Display, fmt::Write, process::ExitCode};
pub(crate) use build::build;
pub(crate) use build_frontend::build_frontend;
pub(crate) use cache_clean::cache_clean;
pub(crate) use cache_dir::cache_dir;
pub(crate) use cache_prune::cache_prune;
@ -60,8 +60,8 @@ pub(crate) use version::version;
use crate::printer::Printer;
mod build;
pub(crate) mod build_backend;
mod build_frontend;
mod cache_clean;
mod cache_dir;
mod cache_prune;

View file

@ -685,7 +685,7 @@ async fn run(cli: Cli) -> Result<ExitStatus> {
.map(RequirementsSource::from_constraints_txt)
.collect::<Vec<_>>();
commands::build(
commands::build_frontend(
&project_dir,
args.src,
args.package,
@ -1115,36 +1115,42 @@ async fn run(cli: Cli) -> Result<ExitStatus> {
}
Commands::BuildBackend { command } => match command {
BuildBackendCommand::BuildSdist { sdist_directory } => {
commands::build_backend::build_sdist(&sdist_directory)
commands::build_backend::build_sdist(&sdist_directory).await
}
BuildBackendCommand::BuildWheel {
wheel_directory,
metadata_directory,
} => commands::build_backend::build_wheel(
} => {
commands::build_backend::build_wheel(
&wheel_directory,
metadata_directory.as_deref(),
),
)
.await
}
BuildBackendCommand::BuildEditable {
wheel_directory,
metadata_directory,
} => commands::build_backend::build_editable(
} => {
commands::build_backend::build_editable(
&wheel_directory,
metadata_directory.as_deref(),
),
)
.await
}
BuildBackendCommand::GetRequiresForBuildSdist => {
commands::build_backend::get_requires_for_build_sdist()
commands::build_backend::get_requires_for_build_sdist().await
}
BuildBackendCommand::GetRequiresForBuildWheel => {
commands::build_backend::get_requires_for_build_wheel()
commands::build_backend::get_requires_for_build_wheel().await
}
BuildBackendCommand::PrepareMetadataForBuildWheel { wheel_directory } => {
commands::build_backend::prepare_metadata_for_build_wheel(&wheel_directory)
commands::build_backend::prepare_metadata_for_build_wheel(&wheel_directory).await
}
BuildBackendCommand::GetRequiresForBuildEditable => {
commands::build_backend::get_requires_for_build_editable()
commands::build_backend::get_requires_for_build_editable().await
}
BuildBackendCommand::PrepareMetadataForBuildEditable { wheel_directory } => {
commands::build_backend::prepare_metadata_for_build_editable(&wheel_directory)
commands::build_backend::prepare_metadata_for_build_editable(&wheel_directory).await
}
},
}