diff --git a/Cargo.lock b/Cargo.lock index 90ad731ff..77077cdaa 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -399,6 +399,7 @@ dependencies = [ "once_cell", "pep508_rs", "platform-tags", + "pypi-types", "tokio", "uv-cache", "uv-client", @@ -1103,7 +1104,6 @@ dependencies = [ "cache-key", "distribution-filename", "fs-err", - "indexmap", "itertools 0.13.0", "once_cell", "pep440_rs", @@ -4567,10 +4567,10 @@ version = "0.0.1" dependencies = [ "anyhow", "clap", - "distribution-types", "either", "pep508_rs", "platform-tags", + "pypi-types", "rustc-hash", "schemars", "serde", @@ -4615,10 +4615,10 @@ dependencies = [ "uv-client", "uv-configuration", "uv-dispatch", + "uv-distribution", "uv-fs", "uv-installer", "uv-interpreter", - "uv-requirements", "uv-resolver", "uv-types", "uv-workspace", @@ -4634,6 +4634,7 @@ dependencies = [ "futures", "install-wheel-rs", "itertools 0.13.0", + "pypi-types", "rustc-hash", "tracing", "uv-build", @@ -4657,22 +4658,28 @@ dependencies = [ "distribution-types", "fs-err", "futures", + "glob", + "insta", "install-wheel-rs", "nanoid", "once_cell", + "path-absolutize", "pep440_rs", "pep508_rs", "platform-tags", "pypi-types", + "regex", "reqwest", "reqwest-middleware", "rmp-serde", "rustc-hash", + "schemars", "serde", "tempfile", "thiserror", "tokio", "tokio-util", + "toml", "tracing", "url", "uv-cache", @@ -4683,6 +4690,7 @@ dependencies = [ "uv-git", "uv-normalize", "uv-types", + "uv-warnings", "zip", ] @@ -4848,22 +4856,12 @@ dependencies = [ "distribution-types", "fs-err", "futures", - "glob", - "indexmap", - "indoc", - "insta", - "path-absolutize", - "pep440_rs", "pep508_rs", "pypi-types", - "regex", "requirements-txt", "rustc-hash", - "same-file", - "schemars", "serde", "thiserror", - "tokio", "toml", "tracing", "url", diff --git a/crates/bench/Cargo.toml b/crates/bench/Cargo.toml index 3ab59e30f..5d49a9bbd 100644 --- a/crates/bench/Cargo.toml +++ b/crates/bench/Cargo.toml @@ -30,17 +30,18 @@ harness = false [dependencies] distribution-filename = { workspace = true } distribution-types = { workspace = true } +install-wheel-rs = { workspace = true } pep508_rs = { workspace = true } platform-tags = { workspace = true } +pypi-types = { workspace = true } uv-cache = { workspace = true } uv-client = { workspace = true } -uv-dispatch = { workspace = true } uv-configuration = { workspace = true } +uv-dispatch = { workspace = true } uv-distribution = { workspace = true } uv-interpreter = { workspace = true } uv-resolver = { workspace = true } uv-types = { workspace = true } -install-wheel-rs = { workspace = true } anyhow = { workspace = true } codspeed-criterion-compat = { version = "2.6.0", default-features = false, optional = true } diff --git a/crates/bench/benches/uv.rs b/crates/bench/benches/uv.rs index ab346ad1d..391007118 100644 --- a/crates/bench/benches/uv.rs +++ b/crates/bench/benches/uv.rs @@ -2,7 +2,7 @@ use std::str::FromStr; use bench::criterion::black_box; use bench::criterion::{criterion_group, criterion_main, measurement::WallTime, Criterion}; -use distribution_types::Requirement; +use pypi_types::Requirement; use uv_cache::Cache; use uv_client::RegistryClientBuilder; use uv_interpreter::PythonEnvironment; @@ -80,7 +80,9 @@ mod resolver { use platform_tags::{Arch, Os, Platform, Tags}; use uv_cache::Cache; use uv_client::RegistryClient; - use uv_configuration::{Concurrency, ConfigSettings, NoBinary, NoBuild, SetupPyStrategy}; + use uv_configuration::{ + Concurrency, ConfigSettings, NoBinary, NoBuild, PreviewMode, SetupPyStrategy, + }; use uv_dispatch::BuildDispatch; use uv_distribution::DistributionDatabase; use uv_interpreter::PythonEnvironment; @@ -149,6 +151,7 @@ mod resolver { &NoBuild::None, &NoBinary::None, concurrency, + PreviewMode::Disabled, ); let resolver = Resolver::new( @@ -162,7 +165,12 @@ mod resolver { &hashes, &build_context, installed_packages, - DistributionDatabase::new(client, &build_context, concurrency.downloads), + DistributionDatabase::new( + client, + &build_context, + concurrency.downloads, + PreviewMode::Disabled, + ), )?; Ok(resolver.resolve().await?) diff --git a/crates/distribution-types/Cargo.toml b/crates/distribution-types/Cargo.toml index 1538b1032..d2b8cfada 100644 --- a/crates/distribution-types/Cargo.toml +++ b/crates/distribution-types/Cargo.toml @@ -25,7 +25,6 @@ uv-normalize = { workspace = true } anyhow = { workspace = true } fs-err = { workspace = true } -indexmap = { workspace = true } itertools = { workspace = true } once_cell = { workspace = true } rkyv = { workspace = true } diff --git a/crates/distribution-types/src/lib.rs b/crates/distribution-types/src/lib.rs index 4e10a38c8..e259370fd 100644 --- a/crates/distribution-types/src/lib.rs +++ b/crates/distribution-types/src/lib.rs @@ -58,7 +58,6 @@ pub use crate::id::*; pub use crate::index_url::*; pub use crate::installed::*; pub use crate::prioritized_distribution::*; -pub use crate::requirement::*; pub use crate::resolution::*; pub use crate::resolved::*; pub use crate::specified_requirement::*; @@ -76,7 +75,6 @@ mod id; mod index_url; mod installed; mod prioritized_distribution; -mod requirement; mod resolution; mod resolved; mod specified_requirement; diff --git a/crates/distribution-types/src/resolution.rs b/crates/distribution-types/src/resolution.rs index 8f5a1a222..d7cc5588b 100644 --- a/crates/distribution-types/src/resolution.rs +++ b/crates/distribution-types/src/resolution.rs @@ -1,10 +1,9 @@ +use pypi_types::{Requirement, RequirementSource}; use std::collections::BTreeMap; use uv_normalize::{ExtraName, PackageName}; -use crate::{ - BuiltDist, Diagnostic, Dist, Name, Requirement, RequirementSource, ResolvedDist, SourceDist, -}; +use crate::{BuiltDist, Diagnostic, Dist, Name, ResolvedDist, SourceDist}; /// A set of packages pinned at specific versions. #[derive(Debug, Default, Clone)] diff --git a/crates/distribution-types/src/specified_requirement.rs b/crates/distribution-types/src/specified_requirement.rs index 8fed17d62..b12b0a0f6 100644 --- a/crates/distribution-types/src/specified_requirement.rs +++ b/crates/distribution-types/src/specified_requirement.rs @@ -2,9 +2,10 @@ use std::borrow::Cow; use std::fmt::{Display, Formatter}; use pep508_rs::{MarkerEnvironment, UnnamedRequirement}; +use pypi_types::{Requirement, RequirementSource}; use uv_normalize::ExtraName; -use crate::{Requirement, RequirementSource, VerbatimParsedUrl}; +use crate::VerbatimParsedUrl; /// An [`UnresolvedRequirement`] with additional metadata from `requirements.txt`, currently only /// hashes but in the future also editable and similar information. diff --git a/crates/pep508-rs/src/marker.rs b/crates/pep508-rs/src/marker.rs index afe6db829..8ffa28015 100644 --- a/crates/pep508-rs/src/marker.rs +++ b/crates/pep508-rs/src/marker.rs @@ -1541,6 +1541,25 @@ pub enum MarkerTree { Or(Vec), } +impl<'de> Deserialize<'de> for MarkerTree { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + let s = String::deserialize(deserializer)?; + FromStr::from_str(&s).map_err(de::Error::custom) + } +} + +impl Serialize for MarkerTree { + fn serialize(&self, serializer: S) -> Result + where + S: Serializer, + { + serializer.serialize_str(&self.to_string()) + } +} + impl FromStr for MarkerTree { type Err = Pep508Error; diff --git a/crates/pypi-types/src/lib.rs b/crates/pypi-types/src/lib.rs index 086437ba5..73efeba94 100644 --- a/crates/pypi-types/src/lib.rs +++ b/crates/pypi-types/src/lib.rs @@ -3,6 +3,7 @@ pub use direct_url::*; pub use lenient_requirement::*; pub use metadata::*; pub use parsed_url::*; +pub use requirement::*; pub use scheme::*; pub use simple_json::*; @@ -11,5 +12,6 @@ mod direct_url; mod lenient_requirement; mod metadata; mod parsed_url; +mod requirement; mod scheme; mod simple_json; diff --git a/crates/distribution-types/src/requirement.rs b/crates/pypi-types/src/requirement.rs similarity index 99% rename from crates/distribution-types/src/requirement.rs rename to crates/pypi-types/src/requirement.rs index 318a35045..4f235d702 100644 --- a/crates/distribution-types/src/requirement.rs +++ b/crates/pypi-types/src/requirement.rs @@ -6,10 +6,11 @@ use url::Url; use pep440_rs::VersionSpecifiers; use pep508_rs::{MarkerEnvironment, MarkerTree, RequirementOrigin, VerbatimUrl, VersionOrUrl}; -use pypi_types::{ParsedUrl, VerbatimParsedUrl}; use uv_git::{GitReference, GitSha}; use uv_normalize::{ExtraName, PackageName}; +use crate::{ParsedUrl, VerbatimParsedUrl}; + /// The requirements of a distribution, an extension over PEP 508's requirements. #[derive(Debug, Clone, Eq, PartialEq)] pub struct Requirements { diff --git a/crates/requirements-txt/src/lib.rs b/crates/requirements-txt/src/lib.rs index 68166c033..a7ed8d5cd 100644 --- a/crates/requirements-txt/src/lib.rs +++ b/crates/requirements-txt/src/lib.rs @@ -44,12 +44,12 @@ use tracing::instrument; use unscanny::{Pattern, Scanner}; use url::Url; -use distribution_types::{Requirement, UnresolvedRequirement, UnresolvedRequirementSpecification}; +use distribution_types::{UnresolvedRequirement, UnresolvedRequirementSpecification}; use pep508_rs::{ expand_env_vars, split_scheme, strip_host, Extras, MarkerTree, Pep508Error, Pep508ErrorSource, RequirementOrigin, Scheme, UnnamedRequirement, VerbatimUrl, }; -use pypi_types::{ParsedPathUrl, ParsedUrl, VerbatimParsedUrl}; +use pypi_types::{ParsedPathUrl, ParsedUrl, Requirement, VerbatimParsedUrl}; #[cfg(feature = "http")] use uv_client::BaseClient; use uv_client::BaseClientBuilder; diff --git a/crates/uv-build/src/lib.rs b/crates/uv-build/src/lib.rs index 8057eda86..604bc5c8e 100644 --- a/crates/uv-build/src/lib.rs +++ b/crates/uv-build/src/lib.rs @@ -25,10 +25,10 @@ use tokio::process::Command; use tokio::sync::{Mutex, Semaphore}; use tracing::{debug, info_span, instrument, Instrument}; -use distribution_types::{Requirement, Resolution}; +use distribution_types::Resolution; use pep440_rs::Version; use pep508_rs::PackageName; -use pypi_types::VerbatimParsedUrl; +use pypi_types::{Requirement, VerbatimParsedUrl}; use uv_configuration::{BuildKind, ConfigSettings, SetupPyStrategy}; use uv_fs::{PythonExt, Simplified}; use uv_interpreter::{Interpreter, PythonEnvironment}; diff --git a/crates/uv-configuration/Cargo.toml b/crates/uv-configuration/Cargo.toml index b9a9b8769..e1e3bdc6c 100644 --- a/crates/uv-configuration/Cargo.toml +++ b/crates/uv-configuration/Cargo.toml @@ -13,9 +13,9 @@ license = { workspace = true } workspace = true [dependencies] -distribution-types = { workspace = true } pep508_rs = { workspace = true } platform-tags = { workspace = true } +pypi-types = { workspace = true } uv-auth = { workspace = true } uv-normalize = { workspace = true } diff --git a/crates/uv-configuration/src/constraints.rs b/crates/uv-configuration/src/constraints.rs index f1d7a9957..8c390d204 100644 --- a/crates/uv-configuration/src/constraints.rs +++ b/crates/uv-configuration/src/constraints.rs @@ -1,8 +1,8 @@ use std::hash::BuildHasherDefault; -use distribution_types::Requirement; use rustc_hash::FxHashMap; +use pypi_types::Requirement; use uv_normalize::PackageName; /// A set of constraints for a set of requirements. diff --git a/crates/uv-configuration/src/overrides.rs b/crates/uv-configuration/src/overrides.rs index 37a5f0531..d91aa1e76 100644 --- a/crates/uv-configuration/src/overrides.rs +++ b/crates/uv-configuration/src/overrides.rs @@ -1,9 +1,9 @@ use std::hash::BuildHasherDefault; use either::Either; +use pypi_types::Requirement; use rustc_hash::FxHashMap; -use distribution_types::Requirement; use uv_normalize::PackageName; /// A set of overrides for a set of requirements. diff --git a/crates/uv-dev/Cargo.toml b/crates/uv-dev/Cargo.toml index 76df63b19..c78e8acfe 100644 --- a/crates/uv-dev/Cargo.toml +++ b/crates/uv-dev/Cargo.toml @@ -25,11 +25,11 @@ uv-build = { workspace = true } uv-cache = { workspace = true, features = ["clap"] } uv-client = { workspace = true } uv-configuration = { workspace = true } +uv-distribution = { workspace = true, features = ["schemars"] } uv-dispatch = { workspace = true } uv-fs = { workspace = true } uv-installer = { workspace = true } uv-interpreter = { workspace = true } -uv-requirements = { workspace = true, features = ["schemars"] } uv-resolver = { workspace = true } uv-types = { workspace = true } uv-workspace = { workspace = true, features = ["schemars"] } diff --git a/crates/uv-dev/src/build.rs b/crates/uv-dev/src/build.rs index 965f3f056..503c030cf 100644 --- a/crates/uv-dev/src/build.rs +++ b/crates/uv-dev/src/build.rs @@ -11,7 +11,7 @@ use uv_build::{SourceBuild, SourceBuildContext}; use uv_cache::{Cache, CacheArgs}; use uv_client::RegistryClientBuilder; use uv_configuration::{ - BuildKind, Concurrency, ConfigSettings, NoBinary, NoBuild, SetupPyStrategy, + BuildKind, Concurrency, ConfigSettings, NoBinary, NoBuild, PreviewMode, SetupPyStrategy, }; use uv_dispatch::BuildDispatch; use uv_interpreter::PythonEnvironment; @@ -80,6 +80,7 @@ pub(crate) async fn build(args: BuildArgs) -> Result { &NoBuild::None, &NoBinary::None, concurrency, + PreviewMode::Enabled, ); let builder = SourceBuild::setup( diff --git a/crates/uv-dev/src/generate_json_schema.rs b/crates/uv-dev/src/generate_json_schema.rs index 494ca1b32..354908f09 100644 --- a/crates/uv-dev/src/generate_json_schema.rs +++ b/crates/uv-dev/src/generate_json_schema.rs @@ -20,7 +20,7 @@ struct ToolUv { #[serde(flatten)] options: Options, #[serde(flatten)] - dep_spec: uv_requirements::pyproject::ToolUv, + dep_spec: uv_distribution::pyproject::ToolUv, } #[derive(clap::Args)] diff --git a/crates/uv-dispatch/Cargo.toml b/crates/uv-dispatch/Cargo.toml index c4fb3bafc..5142b695f 100644 --- a/crates/uv-dispatch/Cargo.toml +++ b/crates/uv-dispatch/Cargo.toml @@ -16,13 +16,14 @@ workspace = true [dependencies] distribution-types = { workspace = true } install-wheel-rs = { workspace = true } +pypi-types = { workspace = true } uv-build = { workspace = true } uv-cache = { workspace = true } uv-client = { workspace = true } uv-configuration = { workspace = true } +uv-distribution = { workspace = true } uv-installer = { workspace = true } uv-interpreter = { workspace = true } -uv-distribution = { workspace = true } uv-resolver = { workspace = true } uv-types = { workspace = true } diff --git a/crates/uv-dispatch/src/lib.rs b/crates/uv-dispatch/src/lib.rs index 62fb4e15e..2a1e222e7 100644 --- a/crates/uv-dispatch/src/lib.rs +++ b/crates/uv-dispatch/src/lib.rs @@ -11,12 +11,13 @@ use itertools::Itertools; use rustc_hash::FxHashMap; use tracing::{debug, instrument}; -use distribution_types::{CachedDist, IndexLocations, Name, Requirement, Resolution, SourceDist}; +use distribution_types::{CachedDist, IndexLocations, Name, Resolution, SourceDist}; +use pypi_types::Requirement; use uv_build::{SourceBuild, SourceBuildContext}; use uv_cache::Cache; use uv_client::RegistryClient; -use uv_configuration::Concurrency; use uv_configuration::{BuildKind, ConfigSettings, NoBinary, NoBuild, Reinstall, SetupPyStrategy}; +use uv_configuration::{Concurrency, PreviewMode}; use uv_distribution::DistributionDatabase; use uv_installer::{Downloader, Installer, Plan, Planner, SitePackages}; use uv_interpreter::{Interpreter, PythonEnvironment}; @@ -43,6 +44,7 @@ pub struct BuildDispatch<'a> { options: Options, build_extra_env_vars: FxHashMap, concurrency: Concurrency, + preview_mode: PreviewMode, } impl<'a> BuildDispatch<'a> { @@ -62,6 +64,7 @@ impl<'a> BuildDispatch<'a> { no_build: &'a NoBuild, no_binary: &'a NoBinary, concurrency: Concurrency, + preview_mode: PreviewMode, ) -> Self { Self { client, @@ -81,6 +84,7 @@ impl<'a> BuildDispatch<'a> { source_build_context: SourceBuildContext::default(), options: Options::default(), build_extra_env_vars: FxHashMap::default(), + preview_mode, } } @@ -138,7 +142,12 @@ impl<'a> BuildContext for BuildDispatch<'a> { &HashStrategy::None, self, EmptyInstalledPackages, - DistributionDatabase::new(self.client, self, self.concurrency.downloads), + DistributionDatabase::new( + self.client, + self, + self.concurrency.downloads, + self.preview_mode, + ), )?; let graph = resolver.resolve().await.with_context(|| { format!( @@ -220,7 +229,12 @@ impl<'a> BuildContext for BuildDispatch<'a> { self.cache, tags, &HashStrategy::None, - DistributionDatabase::new(self.client, self, self.concurrency.downloads), + DistributionDatabase::new( + self.client, + self, + self.concurrency.downloads, + self.preview_mode, + ), ); debug!( diff --git a/crates/uv-distribution/Cargo.toml b/crates/uv-distribution/Cargo.toml index d3319a13d..089788318 100644 --- a/crates/uv-distribution/Cargo.toml +++ b/crates/uv-distribution/Cargo.toml @@ -23,27 +23,39 @@ platform-tags = { workspace = true } pypi-types = { workspace = true } uv-cache = { workspace = true } uv-client = { workspace = true } +uv-configuration = { workspace = true } uv-extract = { workspace = true } uv-fs = { workspace = true, features = ["tokio"] } uv-git = { workspace = true } uv-normalize = { workspace = true } uv-types = { workspace = true } -uv-configuration = { workspace = true } +uv-warnings = { workspace = true } anyhow = { workspace = true } fs-err = { workspace = true } futures = { workspace = true } +glob = { workspace = true } nanoid = { workspace = true } once_cell = { workspace = true } +path-absolutize = { workspace = true } reqwest = { workspace = true } reqwest-middleware = { workspace = true } rmp-serde = { workspace = true } rustc-hash = { workspace = true } +schemars = { workspace = true, optional = true } serde = { workspace = true, features = ["derive"] } tempfile = { workspace = true } thiserror = { workspace = true } tokio = { workspace = true } tokio-util = { workspace = true, features = ["compat"] } +toml = { workspace = true } tracing = { workspace = true } url = { workspace = true } zip = { workspace = true } + +[dev-dependencies] +insta = { version = "1.39.0", features = ["filters", "json", "redactions"] } +regex = { workspace = true } + +[features] +schemars = ["dep:schemars"] diff --git a/crates/uv-distribution/src/distribution_database.rs b/crates/uv-distribution/src/distribution_database.rs index 92baac6b5..56f2ca1d2 100644 --- a/crates/uv-distribution/src/distribution_database.rs +++ b/crates/uv-distribution/src/distribution_database.rs @@ -25,7 +25,7 @@ use uv_cache::{ArchiveId, ArchiveTimestamp, CacheBucket, CacheEntry, Timestamp, use uv_client::{ CacheControl, CachedClientError, Connectivity, DataWithCachePolicy, RegistryClient, }; -use uv_configuration::{NoBinary, NoBuild}; +use uv_configuration::{NoBinary, NoBuild, PreviewMode}; use uv_extract::hash::Hasher; use uv_fs::write_atomic; use uv_types::BuildContext; @@ -33,7 +33,7 @@ use uv_types::BuildContext; use crate::archive::Archive; use crate::locks::Locks; use crate::source::SourceDistributionBuilder; -use crate::{ArchiveMetadata, Error, LocalWheel, Reporter}; +use crate::{ArchiveMetadata, Error, LocalWheel, Metadata, Reporter}; /// A cached high-level interface to convert distributions (a requirement resolved to a location) /// to a wheel or wheel metadata. @@ -60,10 +60,11 @@ impl<'a, Context: BuildContext> DistributionDatabase<'a, Context> { client: &'a RegistryClient, build_context: &'a Context, concurrent_downloads: usize, + preview_mode: PreviewMode, ) -> Self { Self { build_context, - builder: SourceDistributionBuilder::new(build_context), + builder: SourceDistributionBuilder::new(build_context, preview_mode), locks: Rc::new(Locks::default()), client: ManagedClient::new(client, concurrent_downloads), reporter: None, @@ -364,7 +365,10 @@ impl<'a, Context: BuildContext> DistributionDatabase<'a, Context> { let wheel = self.get_wheel(dist, hashes).await?; let metadata = wheel.metadata()?; let hashes = wheel.hashes; - return Ok(ArchiveMetadata { metadata, hashes }); + return Ok(ArchiveMetadata { + metadata: Metadata::from_metadata23(metadata), + hashes, + }); } let result = self @@ -373,7 +377,7 @@ impl<'a, Context: BuildContext> DistributionDatabase<'a, Context> { .await; match result { - Ok(metadata) => Ok(ArchiveMetadata::from(metadata)), + Ok(metadata) => Ok(ArchiveMetadata::from_metadata23(metadata)), Err(err) if err.is_http_streaming_unsupported() => { warn!("Streaming unsupported when fetching metadata for {dist}; downloading wheel directly ({err})"); @@ -382,7 +386,10 @@ impl<'a, Context: BuildContext> DistributionDatabase<'a, Context> { let wheel = self.get_wheel(dist, hashes).await?; let metadata = wheel.metadata()?; let hashes = wheel.hashes; - Ok(ArchiveMetadata { metadata, hashes }) + Ok(ArchiveMetadata { + metadata: Metadata::from_metadata23(metadata), + hashes, + }) } Err(err) => Err(err.into()), } diff --git a/crates/uv-distribution/src/error.rs b/crates/uv-distribution/src/error.rs index 9353798db..b74b9215b 100644 --- a/crates/uv-distribution/src/error.rs +++ b/crates/uv-distribution/src/error.rs @@ -3,6 +3,7 @@ use std::path::PathBuf; use tokio::task::JoinError; use zip::result::ZipError; +use crate::MetadataLoweringError; use distribution_filename::WheelFilenameError; use pep440_rs::Version; use pypi_types::HashDigest; @@ -77,6 +78,8 @@ pub enum Error { DynamicPyprojectToml(#[source] pypi_types::MetadataError), #[error("Unsupported scheme in URL: {0}")] UnsupportedScheme(String), + #[error(transparent)] + MetadataLowering(#[from] MetadataLoweringError), /// A generic request middleware error happened while making a request. /// Refer to the error message for more details. diff --git a/crates/uv-distribution/src/lib.rs b/crates/uv-distribution/src/lib.rs index a9ceecac7..9440fa049 100644 --- a/crates/uv-distribution/src/lib.rs +++ b/crates/uv-distribution/src/lib.rs @@ -1,11 +1,21 @@ -pub use archive::Archive; +use std::collections::BTreeMap; +use std::path::Path; + +use thiserror::Error; + +use archive::Archive; pub use distribution_database::{DistributionDatabase, HttpArchivePointer, LocalArchivePointer}; pub use download::LocalWheel; pub use error::Error; pub use git::{git_url_to_precise, is_same_reference}; pub use index::{BuiltWheelIndex, RegistryWheelIndex}; +use pep440_rs::{Version, VersionSpecifiers}; use pypi_types::{HashDigest, Metadata23}; pub use reporter::Reporter; +use requirement_lowering::{lower_requirement, LoweringError}; +use uv_configuration::PreviewMode; +use uv_normalize::{ExtraName, PackageName}; +pub use workspace::{ProjectWorkspace, Workspace, WorkspaceError, WorkspaceMember}; mod archive; mod distribution_database; @@ -14,20 +24,120 @@ mod error; mod git; mod index; mod locks; +pub mod pyproject; mod reporter; +mod requirement_lowering; mod source; +mod workspace; + +#[derive(Debug, Error)] +pub enum MetadataLoweringError { + #[error(transparent)] + Workspace(#[from] WorkspaceError), + #[error(transparent)] + Lowering(#[from] LoweringError), +} + +#[derive(Debug, Clone)] +pub struct Metadata { + // Mandatory fields + pub name: PackageName, + pub version: Version, + // Optional fields + pub requires_dist: Vec, + pub requires_python: Option, + pub provides_extras: Vec, +} + +impl Metadata { + /// Lower without considering `tool.uv` in `pyproject.toml`, used for index and other archive + /// dependencies. + pub fn from_metadata23(metadata: Metadata23) -> Self { + Self { + name: metadata.name, + version: metadata.version, + requires_dist: metadata + .requires_dist + .into_iter() + .map(pypi_types::Requirement::from) + .collect(), + requires_python: metadata.requires_python, + provides_extras: metadata.provides_extras, + } + } + + /// Lower by considering `tool.uv` in `pyproject.toml` if present, used for Git and directory + /// dependencies. + pub async fn from_workspace( + metadata: Metadata23, + project_root: &Path, + preview_mode: PreviewMode, + ) -> Result { + // TODO(konsti): Limit discovery for Git checkouts to Git root. + // TODO(konsti): Cache workspace discovery. + let Some(project_workspace) = + ProjectWorkspace::from_maybe_project_root(project_root).await? + else { + return Ok(Self::from_metadata23(metadata)); + }; + + let empty = BTreeMap::default(); + let sources = project_workspace + .current_project() + .pyproject_toml() + .tool + .as_ref() + .and_then(|tool| tool.uv.as_ref()) + .and_then(|uv| uv.sources.as_ref()) + .unwrap_or(&empty); + + let requires_dist = metadata + .requires_dist + .into_iter() + .map(|requirement| { + lower_requirement( + requirement, + &metadata.name, + project_workspace.project_root(), + sources, + project_workspace.workspace(), + preview_mode, + ) + }) + .collect::>()?; + + Ok(Self { + name: metadata.name, + version: metadata.version, + requires_dist, + requires_python: metadata.requires_python, + provides_extras: metadata.provides_extras, + }) + } +} /// The metadata associated with an archive. #[derive(Debug, Clone)] pub struct ArchiveMetadata { - /// The [`Metadata23`] for the underlying distribution. - pub metadata: Metadata23, + /// The [`Metadata`] for the underlying distribution. + pub metadata: Metadata, /// The hashes of the source or built archive. pub hashes: Vec, } -impl From for ArchiveMetadata { - fn from(metadata: Metadata23) -> Self { +impl ArchiveMetadata { + /// Lower without considering `tool.uv` in `pyproject.toml`, used for index and other archive + /// dependencies. + pub fn from_metadata23(metadata: Metadata23) -> Self { + Self { + metadata: Metadata::from_metadata23(metadata), + hashes: vec![], + } + } +} + +impl From for ArchiveMetadata { + fn from(metadata: Metadata) -> Self { Self { metadata, hashes: vec![], diff --git a/crates/uv-distribution/src/pyproject.rs b/crates/uv-distribution/src/pyproject.rs new file mode 100644 index 000000000..2e157a8dc --- /dev/null +++ b/crates/uv-distribution/src/pyproject.rs @@ -0,0 +1,199 @@ +//! Reads the following fields from `pyproject.toml`: +//! +//! * `project.{dependencies,optional-dependencies}` +//! * `tool.uv.sources` +//! * `tool.uv.workspace` +//! +//! Then lowers them into a dependency specification. + +use std::collections::BTreeMap; +use std::ops::Deref; + +use glob::Pattern; +use serde::{Deserialize, Serialize}; +use thiserror::Error; +use url::Url; + +use pep508_rs::Pep508Error; +use pypi_types::VerbatimParsedUrl; +use uv_normalize::{ExtraName, PackageName}; + +use crate::LoweringError; + +#[derive(Debug, Error)] +pub enum Pep621Error { + #[error(transparent)] + Pep508(#[from] Box>), + #[error("Must specify a `[project]` section alongside `[tool.uv.sources]`")] + MissingProjectSection, + #[error("pyproject.toml section is declared as dynamic, but must be static: `{0}`")] + DynamicNotAllowed(&'static str), + #[error("Failed to parse entry for: `{0}`")] + LoweringError(PackageName, #[source] LoweringError), +} + +impl From> for Pep621Error { + fn from(error: Pep508Error) -> Self { + Self::Pep508(Box::new(error)) + } +} + +/// A `pyproject.toml` as specified in PEP 517. +#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq)] +#[serde(rename_all = "kebab-case")] +pub struct PyProjectToml { + /// PEP 621-compliant project metadata. + pub project: Option, + /// Tool-specific metadata. + pub tool: Option, +} + +/// PEP 621 project metadata (`project`). +/// +/// See . +#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq)] +#[serde(rename_all = "kebab-case")] +pub struct Project { + /// The name of the project + pub name: PackageName, + /// The optional dependencies of the project. + pub optional_dependencies: Option>>, +} + +#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq)] +#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] +pub struct Tool { + pub uv: Option, +} + +#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq)] +#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] +pub struct ToolUv { + pub sources: Option>, + pub workspace: Option, +} + +#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq)] +#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] +pub struct ToolUvWorkspace { + pub members: Option>, + pub exclude: Option>, +} + +/// (De)serialize globs as strings. +#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq)] +pub struct SerdePattern(#[serde(with = "serde_from_and_to_string")] pub Pattern); + +#[cfg(feature = "schemars")] +impl schemars::JsonSchema for SerdePattern { + fn schema_name() -> String { + ::schema_name() + } + + fn json_schema(gen: &mut schemars::gen::SchemaGenerator) -> schemars::schema::Schema { + ::json_schema(gen) + } +} + +impl Deref for SerdePattern { + type Target = Pattern; + + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +/// A `tool.uv.sources` value. +#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq)] +#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] +#[serde(untagged, deny_unknown_fields)] +pub enum Source { + /// A remote Git repository, available over HTTPS or SSH. + /// + /// Example: + /// ```toml + /// flask = { git = "https://github.com/pallets/flask", tag = "3.0.0" } + /// ``` + Git { + /// The repository URL (without the `git+` prefix). + git: Url, + /// The path to the directory with the `pyproject.toml`, if it's not in the archive root. + subdirectory: Option, + // Only one of the three may be used; we'll validate this later and emit a custom error. + rev: Option, + tag: Option, + branch: Option, + }, + /// A remote `http://` or `https://` URL, either a wheel (`.whl`) or a source distribution + /// (`.zip`, `.tar.gz`). + /// + /// Example: + /// ```toml + /// flask = { url = "https://files.pythonhosted.org/packages/61/80/ffe1da13ad9300f87c93af113edd0638c75138c42a0994becfacac078c06/flask-3.0.3-py3-none-any.whl" } + /// ``` + Url { + url: Url, + /// For source distributions, the path to the directory with the `pyproject.toml`, if it's + /// not in the archive root. + subdirectory: Option, + }, + /// The path to a dependency, either a wheel (a `.whl` file), source distribution (a `.zip` or + /// `.tag.gz` file), or source tree (i.e., a directory containing a `pyproject.toml` or + /// `setup.py` file in the root). + Path { + path: String, + /// `false` by default. + editable: Option, + }, + /// A dependency pinned to a specific index, e.g., `torch` after setting `torch` to `https://download.pytorch.org/whl/cu118`. + Registry { + // TODO(konstin): The string is more-or-less a placeholder + index: String, + }, + /// A dependency on another package in the workspace. + Workspace { + /// When set to `false`, the package will be fetched from the remote index, rather than + /// included as a workspace package. + workspace: bool, + /// `true` by default. + editable: Option, + }, + /// A catch-all variant used to emit precise error messages when deserializing. + CatchAll { + git: String, + subdirectory: Option, + rev: Option, + tag: Option, + branch: Option, + url: String, + patch: String, + index: String, + workspace: bool, + }, +} +/// +mod serde_from_and_to_string { + use std::fmt::Display; + use std::str::FromStr; + + use serde::{de, Deserialize, Deserializer, Serializer}; + + pub(super) fn serialize(value: &T, serializer: S) -> Result + where + T: Display, + S: Serializer, + { + serializer.collect_str(value) + } + + pub(super) fn deserialize<'de, T, D>(deserializer: D) -> Result + where + T: FromStr, + T::Err: Display, + D: Deserializer<'de>, + { + String::deserialize(deserializer)? + .parse() + .map_err(de::Error::custom) + } +} diff --git a/crates/uv-distribution/src/requirement_lowering.rs b/crates/uv-distribution/src/requirement_lowering.rs new file mode 100644 index 000000000..e50968d0e --- /dev/null +++ b/crates/uv-distribution/src/requirement_lowering.rs @@ -0,0 +1,241 @@ +use std::collections::BTreeMap; +use std::io; +use std::path::{Path, PathBuf}; + +use path_absolutize::Absolutize; +use thiserror::Error; +use url::Url; + +use pep440_rs::VersionSpecifiers; +use pep508_rs::{VerbatimUrl, VersionOrUrl}; +use pypi_types::{Requirement, RequirementSource, VerbatimParsedUrl}; +use uv_configuration::PreviewMode; +use uv_fs::Simplified; +use uv_git::GitReference; +use uv_normalize::PackageName; +use uv_warnings::warn_user_once; + +use crate::pyproject::Source; +use crate::Workspace; + +/// An error parsing and merging `tool.uv.sources` with +/// `project.{dependencies,optional-dependencies}`. +#[derive(Debug, Error)] +pub enum LoweringError { + #[error("Package is not included as workspace package in `tool.uv.workspace`")] + UndeclaredWorkspacePackage, + #[error("Can only specify one of: `rev`, `tag`, or `branch`")] + MoreThanOneGitRef, + #[error("Unable to combine options in `tool.uv.sources`")] + InvalidEntry, + #[error(transparent)] + InvalidUrl(#[from] url::ParseError), + #[error(transparent)] + InvalidVerbatimUrl(#[from] pep508_rs::VerbatimUrlError), + #[error("Can't combine URLs from both `project.dependencies` and `tool.uv.sources`")] + ConflictingUrls, + #[error("Could not normalize path: `{}`", _0.user_display())] + Absolutize(PathBuf, #[source] io::Error), + #[error("Fragments are not allowed in URLs: `{0}`")] + ForbiddenFragment(Url), + #[error("`workspace = false` is not yet supported")] + WorkspaceFalse, + #[error("`tool.uv.sources` is a preview feature; use `--preview` or set `UV_PREVIEW=1` to enable it")] + MissingPreview, +} + +/// Combine `project.dependencies` or `project.optional-dependencies` with `tool.uv.sources`. +pub(crate) fn lower_requirement( + requirement: pep508_rs::Requirement, + project_name: &PackageName, + project_dir: &Path, + project_sources: &BTreeMap, + workspace: &Workspace, + preview: PreviewMode, +) -> Result { + let source = project_sources + .get(&requirement.name) + .or(workspace.sources().get(&requirement.name)) + .cloned(); + + let workspace_package_declared = + // We require that when you use a package that's part of the workspace, ... + !workspace.packages().contains_key(&requirement.name) + // ... it must be declared as a workspace dependency (`workspace = true`), ... + || matches!( + source, + Some(Source::Workspace { + // By using toml, we technically support `workspace = false`. + workspace: true, + .. + }) + ) + // ... except for recursive self-inclusion (extras that activate other extras), e.g. + // `framework[machine_learning]` depends on `framework[cuda]`. + || &requirement.name == project_name; + if !workspace_package_declared { + return Err(LoweringError::UndeclaredWorkspacePackage); + } + + let Some(source) = source else { + let has_sources = !project_sources.is_empty() || !workspace.sources().is_empty(); + // Support recursive editable inclusions. + if has_sources && requirement.version_or_url.is_none() && &requirement.name != project_name + { + warn_user_once!( + "Missing version constraint (e.g., a lower bound) for `{}`", + requirement.name + ); + } + return Ok(Requirement::from(requirement)); + }; + + if preview.is_disabled() { + return Err(LoweringError::MissingPreview); + } + + let source = match source { + Source::Git { + git, + subdirectory, + rev, + tag, + branch, + } => { + if matches!(requirement.version_or_url, Some(VersionOrUrl::Url(_))) { + return Err(LoweringError::ConflictingUrls); + } + let reference = match (rev, tag, branch) { + (None, None, None) => GitReference::DefaultBranch, + (Some(rev), None, None) => { + if rev.starts_with("refs/") { + GitReference::NamedRef(rev.clone()) + } else if rev.len() == 40 { + GitReference::FullCommit(rev.clone()) + } else { + GitReference::ShortCommit(rev.clone()) + } + } + (None, Some(tag), None) => GitReference::Tag(tag), + (None, None, Some(branch)) => GitReference::Branch(branch), + _ => return Err(LoweringError::MoreThanOneGitRef), + }; + + // Create a PEP 508-compatible URL. + let mut url = Url::parse(&format!("git+{git}"))?; + if let Some(rev) = reference.as_str() { + url.set_path(&format!("{}@{}", url.path(), rev)); + } + if let Some(subdirectory) = &subdirectory { + url.set_fragment(Some(&format!("subdirectory={subdirectory}"))); + } + let url = VerbatimUrl::from_url(url); + + let repository = git.clone(); + + RequirementSource::Git { + url, + repository, + reference, + precise: None, + subdirectory: subdirectory.map(PathBuf::from), + } + } + Source::Url { url, subdirectory } => { + if matches!(requirement.version_or_url, Some(VersionOrUrl::Url(_))) { + return Err(LoweringError::ConflictingUrls); + } + + let mut verbatim_url = url.clone(); + if verbatim_url.fragment().is_some() { + return Err(LoweringError::ForbiddenFragment(url)); + } + if let Some(subdirectory) = &subdirectory { + verbatim_url.set_fragment(Some(subdirectory)); + } + + let verbatim_url = VerbatimUrl::from_url(verbatim_url); + RequirementSource::Url { + location: url, + subdirectory: subdirectory.map(PathBuf::from), + url: verbatim_url, + } + } + Source::Path { path, editable } => { + if matches!(requirement.version_or_url, Some(VersionOrUrl::Url(_))) { + return Err(LoweringError::ConflictingUrls); + } + path_source(path, project_dir, editable.unwrap_or(false))? + } + Source::Registry { index } => match requirement.version_or_url { + None => { + warn_user_once!( + "Missing version constraint (e.g., a lower bound) for `{}`", + requirement.name + ); + RequirementSource::Registry { + specifier: VersionSpecifiers::empty(), + index: Some(index), + } + } + Some(VersionOrUrl::VersionSpecifier(version)) => RequirementSource::Registry { + specifier: version, + index: Some(index), + }, + Some(VersionOrUrl::Url(_)) => return Err(LoweringError::ConflictingUrls), + }, + Source::Workspace { + workspace: is_workspace, + editable, + } => { + if !is_workspace { + return Err(LoweringError::WorkspaceFalse); + } + if matches!(requirement.version_or_url, Some(VersionOrUrl::Url(_))) { + return Err(LoweringError::ConflictingUrls); + } + let path = workspace + .packages() + .get(&requirement.name) + .ok_or(LoweringError::UndeclaredWorkspacePackage)? + .clone(); + path_source(path.root(), workspace.root(), editable.unwrap_or(true))? + } + Source::CatchAll { .. } => { + // Emit a dedicated error message, which is an improvement over Serde's default error. + return Err(LoweringError::InvalidEntry); + } + }; + Ok(Requirement { + name: requirement.name, + extras: requirement.extras, + marker: requirement.marker, + source, + origin: requirement.origin, + }) +} + +/// Convert a path string to a path section. +fn path_source( + path: impl AsRef, + project_dir: &Path, + editable: bool, +) -> Result { + let url = VerbatimUrl::parse_path(path.as_ref(), project_dir)? + .with_given(path.as_ref().to_string_lossy().to_string()); + let path_buf = path.as_ref().to_path_buf(); + let path_buf = path_buf + .absolutize_from(project_dir) + .map_err(|err| LoweringError::Absolutize(path.as_ref().to_path_buf(), err))? + .to_path_buf(); + //if !editable { + // // TODO(konsti): Support this. Currently we support `{ workspace = true }`, but we don't + // // support `{ workspace = true, editable = false }` since we only collect editables. + // return Err(LoweringError::NonEditableWorkspaceDependency); + //} + Ok(RequirementSource::Path { + path: path_buf, + url, + editable, + }) +} diff --git a/crates/uv-distribution/src/source/mod.rs b/crates/uv-distribution/src/source/mod.rs index ef7f54198..04665a24a 100644 --- a/crates/uv-distribution/src/source/mod.rs +++ b/crates/uv-distribution/src/source/mod.rs @@ -29,7 +29,7 @@ use uv_cache::{ use uv_client::{ CacheControl, CachedClientError, Connectivity, DataWithCachePolicy, RegistryClient, }; -use uv_configuration::{BuildKind, NoBuild}; +use uv_configuration::{BuildKind, NoBuild, PreviewMode}; use uv_extract::hash::Hasher; use uv_fs::{write_atomic, LockedFile}; use uv_types::{BuildContext, SourceBuildTrait}; @@ -39,7 +39,7 @@ use crate::error::Error; use crate::git::{fetch_git_archive, resolve_precise}; use crate::source::built_wheel_metadata::BuiltWheelMetadata; use crate::source::revision::Revision; -use crate::{ArchiveMetadata, Reporter}; +use crate::{ArchiveMetadata, Metadata, Reporter}; mod built_wheel_metadata; mod revision; @@ -48,6 +48,7 @@ mod revision; pub(crate) struct SourceDistributionBuilder<'a, T: BuildContext> { build_context: &'a T, reporter: Option>, + preview_mode: PreviewMode, } /// The name of the file that contains the revision ID for a remote distribution, encoded via `MsgPack`. @@ -61,10 +62,11 @@ pub(crate) const METADATA: &str = "metadata.msgpack"; impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> { /// Initialize a [`SourceDistributionBuilder`] from a [`BuildContext`]. - pub(crate) fn new(build_context: &'a T) -> Self { + pub(crate) fn new(build_context: &'a T, preview_mode: PreviewMode) -> Self { Self { build_context, reporter: None, + preview_mode, } } @@ -492,7 +494,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> { if let Some(metadata) = read_cached_metadata(&metadata_entry).await? { debug!("Using cached metadata for: {source}"); return Ok(ArchiveMetadata { - metadata, + metadata: Metadata::from_metadata23(metadata), hashes: revision.into_hashes(), }); } @@ -515,7 +517,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> { .map_err(Error::CacheWrite)?; return Ok(ArchiveMetadata { - metadata, + metadata: Metadata::from_metadata23(metadata), hashes: revision.into_hashes(), }); } @@ -542,7 +544,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> { } Ok(ArchiveMetadata { - metadata, + metadata: Metadata::from_metadata23(metadata), hashes: revision.into_hashes(), }) } @@ -720,7 +722,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> { if let Some(metadata) = read_cached_metadata(&metadata_entry).await? { debug!("Using cached metadata for: {source}"); return Ok(ArchiveMetadata { - metadata, + metadata: Metadata::from_metadata23(metadata), hashes: revision.into_hashes(), }); } @@ -742,7 +744,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> { .map_err(Error::CacheWrite)?; return Ok(ArchiveMetadata { - metadata, + metadata: Metadata::from_metadata23(metadata), hashes: revision.into_hashes(), }); } @@ -769,7 +771,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> { .map_err(Error::CacheWrite)?; Ok(ArchiveMetadata { - metadata, + metadata: Metadata::from_metadata23(metadata), hashes: revision.into_hashes(), }) } @@ -929,7 +931,10 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> { let metadata_entry = cache_shard.entry(METADATA); if let Some(metadata) = read_cached_metadata(&metadata_entry).await? { debug!("Using cached metadata for: {source}"); - return Ok(ArchiveMetadata::from(metadata)); + return Ok(ArchiveMetadata::from( + Metadata::from_workspace(metadata, resource.path.as_ref(), self.preview_mode) + .await?, + )); } // If the backend supports `prepare_metadata_for_build_wheel`, use it. @@ -946,7 +951,10 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> { .await .map_err(Error::CacheWrite)?; - return Ok(ArchiveMetadata::from(metadata)); + return Ok(ArchiveMetadata::from( + Metadata::from_workspace(metadata, resource.path.as_ref(), self.preview_mode) + .await?, + )); } // Otherwise, we need to build a wheel. @@ -970,7 +978,9 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> { .await .map_err(Error::CacheWrite)?; - Ok(ArchiveMetadata::from(metadata)) + Ok(ArchiveMetadata::from( + Metadata::from_workspace(metadata, resource.path.as_ref(), self.preview_mode).await?, + )) } /// Return the [`Revision`] for a local source tree, refreshing it if necessary. @@ -1137,7 +1147,9 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> { { if let Some(metadata) = read_cached_metadata(&metadata_entry).await? { debug!("Using cached metadata for: {source}"); - return Ok(ArchiveMetadata::from(metadata)); + return Ok(ArchiveMetadata::from( + Metadata::from_workspace(metadata, fetch.path(), self.preview_mode).await?, + )); } } @@ -1155,7 +1167,9 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> { .await .map_err(Error::CacheWrite)?; - return Ok(ArchiveMetadata::from(metadata)); + return Ok(ArchiveMetadata::from( + Metadata::from_workspace(metadata, fetch.path(), self.preview_mode).await?, + )); } // Otherwise, we need to build a wheel. @@ -1179,7 +1193,9 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> { .await .map_err(Error::CacheWrite)?; - Ok(ArchiveMetadata::from(metadata)) + Ok(ArchiveMetadata::from( + Metadata::from_workspace(metadata, fetch.path(), self.preview_mode).await?, + )) } /// Download and unzip a source distribution into the cache from an HTTP response. @@ -1592,7 +1608,7 @@ async fn read_pyproject_toml( /// Read an existing cached [`Metadata23`], if it exists. async fn read_cached_metadata(cache_entry: &CacheEntry) -> Result, Error> { match fs::read(&cache_entry.path()).await { - Ok(cached) => Ok(Some(rmp_serde::from_slice::(&cached)?)), + Ok(cached) => Ok(Some(rmp_serde::from_slice(&cached)?)), Err(err) if err.kind() == std::io::ErrorKind::NotFound => Ok(None), Err(err) => Err(Error::CacheRead(err)), } diff --git a/crates/uv-requirements/src/workspace.rs b/crates/uv-distribution/src/workspace.rs similarity index 98% rename from crates/uv-requirements/src/workspace.rs rename to crates/uv-distribution/src/workspace.rs index 94a48925a..8a4774fd2 100644 --- a/crates/uv-requirements/src/workspace.rs +++ b/crates/uv-distribution/src/workspace.rs @@ -3,12 +3,12 @@ use std::collections::BTreeMap; use std::path::{Path, PathBuf}; -use distribution_types::{Requirement, RequirementSource}; use glob::{glob, GlobError, PatternError}; -use pep508_rs::{VerbatimUrl, VerbatimUrlError}; use rustc_hash::FxHashSet; use tracing::{debug, trace}; +use pep508_rs::VerbatimUrl; +use pypi_types::{Requirement, RequirementSource}; use uv_fs::{absolutize_path, Simplified}; use uv_normalize::{ExtraName, PackageName}; use uv_warnings::warn_user; @@ -29,12 +29,8 @@ pub enum WorkspaceError { Toml(PathBuf, #[source] Box), #[error("No `project` table found in: `{}`", _0.simplified_display())] MissingProject(PathBuf), - #[error("pyproject.toml section is declared as dynamic, but must be static: `{0}`")] - DynamicNotAllowed(&'static str), #[error("Failed to normalize workspace member path")] Normalize(#[source] std::io::Error), - #[error("Failed to normalize workspace member path")] - VerbatimUrl(#[from] VerbatimUrlError), } /// A workspace, consisting of a root directory and members. See [`ProjectWorkspace`]. @@ -371,6 +367,7 @@ impl ProjectWorkspace { let mut seen = FxHashSet::default(); for member_glob in workspace_definition.members.unwrap_or_default() { let absolute_glob = workspace_root + .simplified() .join(member_glob.as_str()) .to_string_lossy() .to_string(); @@ -427,8 +424,8 @@ impl ProjectWorkspace { }) } - #[cfg(test)] - pub(crate) fn dummy(root: &Path, project_name: &PackageName) -> Self { + /// Used in tests. + pub fn dummy(root: &Path, project_name: &PackageName) -> Self { // This doesn't necessarily match the exact test case, but we don't use the other fields // for the test cases atm. let root_member = WorkspaceMember { @@ -436,9 +433,7 @@ impl ProjectWorkspace { pyproject_toml: PyProjectToml { project: Some(crate::pyproject::Project { name: project_name.clone(), - dependencies: None, optional_dependencies: None, - dynamic: None, }), tool: None, }, @@ -605,6 +600,7 @@ fn is_excluded_from_workspace( ) -> Result { for exclude_glob in workspace.exclude.iter().flatten() { let absolute_glob = workspace_root + .simplified() .join(exclude_glob.as_str()) .to_string_lossy() .to_string(); diff --git a/crates/uv-fs/src/lib.rs b/crates/uv-fs/src/lib.rs index c98967855..01a1615da 100644 --- a/crates/uv-fs/src/lib.rs +++ b/crates/uv-fs/src/lib.rs @@ -12,23 +12,6 @@ pub use crate::path::*; pub mod cachedir; mod path; -/// Reads data from the path and requires that it be valid UTF-8. -/// -/// If the file path is `-`, then contents are read from stdin instead. -#[cfg(feature = "tokio")] -pub async fn read_to_string(path: impl AsRef) -> std::io::Result { - use std::io::Read; - - let path = path.as_ref(); - if path == Path::new("-") { - let mut buf = String::with_capacity(1024); - std::io::stdin().read_to_string(&mut buf)?; - Ok(buf) - } else { - fs_err::tokio::read_to_string(path).await - } -} - /// Reads data from the path and requires that it be valid UTF-8 or UTF-16. /// /// This uses BOM sniffing to determine if the data should be transcoded diff --git a/crates/uv-installer/src/plan.rs b/crates/uv-installer/src/plan.rs index d14420100..6f7e515ec 100644 --- a/crates/uv-installer/src/plan.rs +++ b/crates/uv-installer/src/plan.rs @@ -11,9 +11,10 @@ use distribution_filename::WheelFilename; use distribution_types::{ CachedDirectUrlDist, CachedDist, DirectUrlBuiltDist, DirectUrlSourceDist, DirectorySourceDist, Error, GitSourceDist, Hashed, IndexLocations, InstalledDist, Name, PathBuiltDist, - PathSourceDist, RemoteSource, Requirement, RequirementSource, Verbatim, + PathSourceDist, RemoteSource, Verbatim, }; use platform_tags::Tags; +use pypi_types::{Requirement, RequirementSource}; use uv_cache::{ArchiveTimestamp, Cache, CacheBucket, WheelCache}; use uv_configuration::{NoBinary, Reinstall}; use uv_distribution::{ diff --git a/crates/uv-installer/src/satisfies.rs b/crates/uv-installer/src/satisfies.rs index 4f807334d..b77ae3a4a 100644 --- a/crates/uv-installer/src/satisfies.rs +++ b/crates/uv-installer/src/satisfies.rs @@ -6,8 +6,8 @@ use serde::Deserialize; use tracing::{debug, trace}; use cache_key::{CanonicalUrl, RepositoryUrl}; -use distribution_types::{InstalledDirectUrlDist, InstalledDist, RequirementSource}; -use pypi_types::{DirInfo, DirectUrl, VcsInfo, VcsKind}; +use distribution_types::{InstalledDirectUrlDist, InstalledDist}; +use pypi_types::{DirInfo, DirectUrl, RequirementSource, VcsInfo, VcsKind}; use uv_cache::{ArchiveTarget, ArchiveTimestamp}; #[derive(Debug, Copy, Clone)] diff --git a/crates/uv-installer/src/site_packages.rs b/crates/uv-installer/src/site_packages.rs index dfa337c2e..86f6d7768 100644 --- a/crates/uv-installer/src/site_packages.rs +++ b/crates/uv-installer/src/site_packages.rs @@ -8,11 +8,10 @@ use rustc_hash::{FxHashMap, FxHashSet}; use url::Url; use distribution_types::{ - Diagnostic, InstalledDist, Name, Requirement, UnresolvedRequirement, - UnresolvedRequirementSpecification, + Diagnostic, InstalledDist, Name, UnresolvedRequirement, UnresolvedRequirementSpecification, }; use pep440_rs::{Version, VersionSpecifiers}; -use pypi_types::VerbatimParsedUrl; +use pypi_types::{Requirement, VerbatimParsedUrl}; use uv_interpreter::PythonEnvironment; use uv_normalize::PackageName; use uv_types::InstalledPackagesProvider; diff --git a/crates/uv-requirements/Cargo.toml b/crates/uv-requirements/Cargo.toml index 593a3c9f3..fe2c00e02 100644 --- a/crates/uv-requirements/Cargo.toml +++ b/crates/uv-requirements/Cargo.toml @@ -13,7 +13,6 @@ license.workspace = true cache-key = { workspace = true } distribution-filename = { workspace = true } distribution-types = { workspace = true } -pep440_rs = { workspace = true } pep508_rs = { workspace = true } pypi-types = { workspace = true } requirements-txt = { workspace = true, features = ["http"] } @@ -34,26 +33,12 @@ console = { workspace = true } ctrlc = { workspace = true } fs-err = { workspace = true, features = ["tokio"] } futures = { workspace = true } -glob = { workspace = true } -indexmap = { workspace = true } -path-absolutize = { workspace = true } rustc-hash = { workspace = true } -same-file = { workspace = true } -schemars = { workspace = true, optional = true } serde = { workspace = true } thiserror = { workspace = true } toml = { workspace = true } tracing = { workspace = true } url = { workspace = true } -[features] -schemars = ["dep:schemars"] - -[dev-dependencies] -indoc = "2.0.5" -insta = { version = "1.38.0", features = ["filters", "redactions", "json"] } -regex = { workspace = true } -tokio = { workspace = true } - [lints] workspace = true diff --git a/crates/uv-requirements/src/lib.rs b/crates/uv-requirements/src/lib.rs index ec5c188ca..2a582bc06 100644 --- a/crates/uv-requirements/src/lib.rs +++ b/crates/uv-requirements/src/lib.rs @@ -3,14 +3,11 @@ pub use crate::source_tree::*; pub use crate::sources::*; pub use crate::specification::*; pub use crate::unnamed::*; -pub use crate::workspace::*; mod confirm; mod lookahead; -pub mod pyproject; mod source_tree; mod sources; mod specification; mod unnamed; pub mod upgrade; -mod workspace; diff --git a/crates/uv-requirements/src/lookahead.rs b/crates/uv-requirements/src/lookahead.rs index b4c5d3547..65d5f78a4 100644 --- a/crates/uv-requirements/src/lookahead.rs +++ b/crates/uv-requirements/src/lookahead.rs @@ -6,11 +6,9 @@ use rustc_hash::FxHashSet; use thiserror::Error; use tracing::trace; -use distribution_types::{ - BuiltDist, Dist, DistributionMetadata, GitSourceDist, Requirement, RequirementSource, - SourceDist, -}; +use distribution_types::{BuiltDist, Dist, DistributionMetadata, GitSourceDist, SourceDist}; use pep508_rs::MarkerEnvironment; +use pypi_types::{Requirement, RequirementSource}; use uv_configuration::{Constraints, Overrides}; use uv_distribution::{DistributionDatabase, Reporter}; use uv_git::GitUrl; diff --git a/crates/uv-requirements/src/pyproject.rs b/crates/uv-requirements/src/pyproject.rs deleted file mode 100644 index 13b13d0ed..000000000 --- a/crates/uv-requirements/src/pyproject.rs +++ /dev/null @@ -1,957 +0,0 @@ -//! Reads the following fields from `pyproject.toml`: -//! -//! * `project.{dependencies,optional-dependencies}` -//! * `tool.uv.sources` -//! * `tool.uv.workspace` -//! -//! Then lowers them into a dependency specification. - -use std::collections::BTreeMap; -use std::io; -use std::ops::Deref; -use std::path::{Path, PathBuf}; -use std::str::FromStr; - -use glob::Pattern; -use indexmap::IndexMap; -use path_absolutize::Absolutize; -use rustc_hash::FxHashSet; -use serde::{Deserialize, Serialize}; -use thiserror::Error; -use url::Url; - -use distribution_types::{Requirement, RequirementSource, Requirements}; -use pep440_rs::VersionSpecifiers; -use pep508_rs::{Pep508Error, RequirementOrigin, VerbatimUrl, VersionOrUrl}; -use pypi_types::VerbatimParsedUrl; -use uv_configuration::{ExtrasSpecification, PreviewMode}; -use uv_fs::Simplified; -use uv_git::GitReference; -use uv_normalize::{ExtraName, PackageName}; -use uv_warnings::warn_user_once; - -use crate::Workspace; - -#[derive(Debug, Error)] -pub enum Pep621Error { - #[error(transparent)] - Pep508(#[from] Box>), - #[error("Must specify a `[project]` section alongside `[tool.uv.sources]`")] - MissingProjectSection, - #[error("pyproject.toml section is declared as dynamic, but must be static: `{0}`")] - DynamicNotAllowed(&'static str), - #[error("Failed to parse entry for: `{0}`")] - LoweringError(PackageName, #[source] LoweringError), -} - -impl From> for Pep621Error { - fn from(error: Pep508Error) -> Self { - Self::Pep508(Box::new(error)) - } -} - -/// An error parsing and merging `tool.uv.sources` with -/// `project.{dependencies,optional-dependencies}`. -#[derive(Debug, Error)] -pub enum LoweringError { - #[error("Unsupported path (can't convert to URL): `{}`", _0.user_display())] - PathToUrl(PathBuf), - #[error("Package is not included as workspace package in `tool.uv.workspace`")] - UndeclaredWorkspacePackage, - #[error("Can only specify one of rev, tag, or branch")] - MoreThanOneGitRef, - #[error("Unable to combine options in `tool.uv.sources`")] - InvalidEntry, - #[error(transparent)] - InvalidUrl(#[from] url::ParseError), - #[error(transparent)] - InvalidVerbatimUrl(#[from] pep508_rs::VerbatimUrlError), - #[error("Can't combine URLs from both `project.dependencies` and `tool.uv.sources`")] - ConflictingUrls, - #[error("Could not normalize path: `{}`", _0.user_display())] - AbsolutizeError(PathBuf, #[source] io::Error), - #[error("Fragments are not allowed in URLs: `{0}`")] - ForbiddenFragment(Url), - #[error("`workspace = false` is not yet supported")] - WorkspaceFalse, - #[error("`tool.uv.sources` is a preview feature; use `--preview` or set `UV_PREVIEW=1` to enable it")] - MissingPreview, - #[error("`editable = false` is not yet supported")] - NonEditableWorkspaceDependency, -} - -/// A `pyproject.toml` as specified in PEP 517. -#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq)] -#[serde(rename_all = "kebab-case")] -pub struct PyProjectToml { - /// PEP 621-compliant project metadata. - pub project: Option, - /// Tool-specific metadata. - pub tool: Option, -} - -/// PEP 621 project metadata (`project`). -/// -/// This is a subset of the full metadata specification, and only includes the fields that are -/// relevant for extracting static requirements. -/// -/// See . -#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq)] -#[serde(rename_all = "kebab-case")] -pub struct Project { - /// The name of the project - pub name: PackageName, - /// Project dependencies - pub dependencies: Option>, - /// Optional dependencies - pub optional_dependencies: Option>>, - /// Specifies which fields listed by PEP 621 were intentionally unspecified - /// so another tool can/will provide such metadata dynamically. - pub dynamic: Option>, -} - -#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq)] -#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] -pub struct Tool { - pub uv: Option, -} - -#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq)] -#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] -pub struct ToolUv { - pub sources: Option>, - pub workspace: Option, -} - -#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq)] -#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] -pub struct ToolUvWorkspace { - pub members: Option>, - pub exclude: Option>, -} - -/// (De)serialize globs as strings. -#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq)] -pub struct SerdePattern(#[serde(with = "serde_from_and_to_string")] pub Pattern); - -#[cfg(feature = "schemars")] -impl schemars::JsonSchema for SerdePattern { - fn schema_name() -> String { - ::schema_name() - } - - fn json_schema(gen: &mut schemars::gen::SchemaGenerator) -> schemars::schema::Schema { - ::json_schema(gen) - } -} - -impl Deref for SerdePattern { - type Target = Pattern; - - fn deref(&self) -> &Self::Target { - &self.0 - } -} - -/// A `tool.uv.sources` value. -#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq)] -#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] -#[serde(untagged, deny_unknown_fields)] -pub enum Source { - /// A remote Git repository, available over HTTPS or SSH. - /// - /// Example: - /// ```toml - /// flask = { git = "https://github.com/pallets/flask", tag = "3.0.0" } - /// ``` - Git { - /// The repository URL (without the `git+` prefix). - git: Url, - /// The path to the directory with the `pyproject.toml`, if it's not in the archive root. - subdirectory: Option, - // Only one of the three may be used; we'll validate this later and emit a custom error. - rev: Option, - tag: Option, - branch: Option, - }, - /// A remote `http://` or `https://` URL, either a wheel (`.whl`) or a source distribution - /// (`.zip`, `.tar.gz`). - /// - /// Example: - /// ```toml - /// flask = { url = "https://files.pythonhosted.org/packages/61/80/ffe1da13ad9300f87c93af113edd0638c75138c42a0994becfacac078c06/flask-3.0.3-py3-none-any.whl" } - /// ``` - Url { - url: Url, - /// For source distributions, the path to the directory with the `pyproject.toml`, if it's - /// not in the archive root. - subdirectory: Option, - }, - /// The path to a dependency, either a wheel (a `.whl` file), source distribution (a `.zip` or - /// `.tag.gz` file), or source tree (i.e., a directory containing a `pyproject.toml` or - /// `setup.py` file in the root). - Path { - path: String, - /// `false` by default. - editable: Option, - }, - /// A dependency pinned to a specific index, e.g., `torch` after setting `torch` to `https://download.pytorch.org/whl/cu118`. - Registry { - // TODO(konstin): The string is more-or-less a placeholder - index: String, - }, - /// A dependency on another package in the workspace. - Workspace { - /// When set to `false`, the package will be fetched from the remote index, rather than - /// included as a workspace package. - workspace: bool, - /// `true` by default. - editable: Option, - }, - /// A catch-all variant used to emit precise error messages when deserializing. - CatchAll { - git: String, - subdirectory: Option, - rev: Option, - tag: Option, - branch: Option, - url: String, - patch: String, - index: String, - workspace: bool, - }, -} - -/// The PEP 621 project metadata, with static requirements extracted in advance, joined -/// with `tool.uv.sources`. -#[derive(Debug)] -pub(crate) struct Pep621Metadata { - /// The name of the project. - pub(crate) name: PackageName, - /// The requirements extracted from the project. - pub(crate) requirements: Vec, - /// The extras used to collect requirements. - pub(crate) used_extras: FxHashSet, -} - -impl Pep621Metadata { - /// Extract the static [`Pep621Metadata`] from a [`Project`] and [`ExtrasSpecification`], if - /// possible. - /// - /// If the project specifies dynamic dependencies, or if the project specifies dynamic optional - /// dependencies and the extras are requested, the requirements cannot be extracted. - /// - /// Returns an error if the requirements are not valid PEP 508 requirements. - pub(crate) fn try_from( - pyproject: &PyProjectToml, - extras: &ExtrasSpecification, - pyproject_path: &Path, - project_dir: &Path, - workspace: &Workspace, - preview: PreviewMode, - ) -> Result, Pep621Error> { - let project_sources = pyproject - .tool - .as_ref() - .and_then(|tool| tool.uv.as_ref()) - .and_then(|uv| uv.sources.clone()); - - let has_sources = project_sources.is_some() || !workspace.sources().is_empty(); - - let Some(project) = &pyproject.project else { - return if has_sources { - Err(Pep621Error::MissingProjectSection) - } else { - Ok(None) - }; - }; - if let Some(dynamic) = project.dynamic.as_ref() { - // If the project specifies dynamic dependencies, we can't extract the requirements. - if dynamic.iter().any(|field| field == "dependencies") { - return if has_sources { - Err(Pep621Error::DynamicNotAllowed("project.dependencies")) - } else { - Ok(None) - }; - } - // If we requested extras, and the project specifies dynamic optional dependencies, we can't - // extract the requirements. - if !extras.is_empty() && dynamic.iter().any(|field| field == "optional-dependencies") { - return if has_sources { - Err(Pep621Error::DynamicNotAllowed( - "project.optional-dependencies", - )) - } else { - Ok(None) - }; - } - } - - let requirements = lower_requirements( - project.dependencies.as_deref(), - project.optional_dependencies.as_ref(), - pyproject_path, - &project.name, - project_dir, - &project_sources.unwrap_or_default(), - workspace, - preview, - )?; - - // Parse out the project requirements. - let mut requirements_with_extras = requirements.dependencies; - - // Include any optional dependencies specified in `extras`. - let mut used_extras = FxHashSet::default(); - if !extras.is_empty() { - // Include the optional dependencies if the extras are requested. - for (extra, optional_requirements) in &requirements.optional_dependencies { - if extras.contains(extra) { - used_extras.insert(extra.clone()); - requirements_with_extras.extend(flatten_extra( - &project.name, - optional_requirements, - &requirements.optional_dependencies, - )); - } - } - } - - Ok(Some(Self { - name: project.name.clone(), - requirements: requirements_with_extras, - used_extras, - })) - } -} - -#[allow(clippy::too_many_arguments)] -pub(crate) fn lower_requirements( - dependencies: Option<&[String]>, - optional_dependencies: Option<&IndexMap>>, - pyproject_path: &Path, - project_name: &PackageName, - project_dir: &Path, - project_sources: &BTreeMap, - workspace: &Workspace, - preview: PreviewMode, -) -> Result { - let dependencies = dependencies - .into_iter() - .flatten() - .map(|dependency| { - let requirement = pep508_rs::Requirement::from_str(dependency)?.with_origin( - RequirementOrigin::Project(pyproject_path.to_path_buf(), project_name.clone()), - ); - let name = requirement.name.clone(); - lower_requirement( - requirement, - project_name, - project_dir, - project_sources, - workspace, - preview, - ) - .map_err(|err| Pep621Error::LoweringError(name, err)) - }) - .collect::>()?; - let optional_dependencies = optional_dependencies - .into_iter() - .flatten() - .map(|(extra_name, dependencies)| { - let dependencies: Vec<_> = dependencies - .iter() - .map(|dependency| { - let requirement = pep508_rs::Requirement::from_str(dependency)?.with_origin( - RequirementOrigin::Project( - pyproject_path.to_path_buf(), - project_name.clone(), - ), - ); - let name = requirement.name.clone(); - lower_requirement( - requirement, - project_name, - project_dir, - project_sources, - workspace, - preview, - ) - .map_err(|err| Pep621Error::LoweringError(name, err)) - }) - .collect::>()?; - Ok((extra_name.clone(), dependencies)) - }) - .collect::>()?; - Ok(Requirements { - dependencies, - optional_dependencies, - }) -} - -/// Combine `project.dependencies` or `project.optional-dependencies` with `tool.uv.sources`. -pub(crate) fn lower_requirement( - requirement: pep508_rs::Requirement, - project_name: &PackageName, - project_dir: &Path, - project_sources: &BTreeMap, - workspace: &Workspace, - preview: PreviewMode, -) -> Result { - let source = project_sources - .get(&requirement.name) - .or(workspace.sources().get(&requirement.name)) - .cloned(); - - let workspace_package_declared = - // We require that when you use a package that's part of the workspace, ... - !workspace.packages().contains_key(&requirement.name) - // ... it must be declared as a workspace dependency (`workspace = true`), ... - || matches!( - source, - Some(Source::Workspace { - // By using toml, we technically support `workspace = false`. - workspace: true, - .. - }) - ) - // ... except for recursive self-inclusion (extras that activate other extras), e.g. - // `framework[machine_learning]` depends on `framework[cuda]`. - || &requirement.name == project_name; - if !workspace_package_declared { - return Err(LoweringError::UndeclaredWorkspacePackage); - } - - let Some(source) = source else { - let has_sources = !project_sources.is_empty() || !workspace.sources().is_empty(); - // Support recursive editable inclusions. - if has_sources && requirement.version_or_url.is_none() && &requirement.name != project_name - { - warn_user_once!( - "Missing version constraint (e.g., a lower bound) for `{}`", - requirement.name - ); - } - return Ok(Requirement::from(requirement)); - }; - - if preview.is_disabled() { - return Err(LoweringError::MissingPreview); - } - - let source = match source { - Source::Git { - git, - subdirectory, - rev, - tag, - branch, - } => { - if matches!(requirement.version_or_url, Some(VersionOrUrl::Url(_))) { - return Err(LoweringError::ConflictingUrls); - } - let reference = match (rev, tag, branch) { - (None, None, None) => GitReference::DefaultBranch, - (Some(rev), None, None) => { - if rev.starts_with("refs/") { - GitReference::NamedRef(rev.clone()) - } else if rev.len() == 40 { - GitReference::FullCommit(rev.clone()) - } else { - GitReference::ShortCommit(rev.clone()) - } - } - (None, Some(tag), None) => GitReference::Tag(tag), - (None, None, Some(branch)) => GitReference::Branch(branch), - _ => return Err(LoweringError::MoreThanOneGitRef), - }; - - // Create a PEP 508-compatible URL. - let mut url = Url::parse(&format!("git+{git}"))?; - if let Some(rev) = reference.as_str() { - url.set_path(&format!("{}@{}", url.path(), rev)); - } - if let Some(subdirectory) = &subdirectory { - url.set_fragment(Some(&format!("subdirectory={subdirectory}"))); - } - let url = VerbatimUrl::from_url(url); - - let repository = git.clone(); - - RequirementSource::Git { - url, - repository, - reference, - precise: None, - subdirectory: subdirectory.map(PathBuf::from), - } - } - Source::Url { url, subdirectory } => { - if matches!(requirement.version_or_url, Some(VersionOrUrl::Url(_))) { - return Err(LoweringError::ConflictingUrls); - } - - let mut verbatim_url = url.clone(); - if verbatim_url.fragment().is_some() { - return Err(LoweringError::ForbiddenFragment(url)); - } - if let Some(subdirectory) = &subdirectory { - verbatim_url.set_fragment(Some(subdirectory)); - } - - let verbatim_url = VerbatimUrl::from_url(verbatim_url); - RequirementSource::Url { - location: url, - subdirectory: subdirectory.map(PathBuf::from), - url: verbatim_url, - } - } - Source::Path { path, editable } => { - if matches!(requirement.version_or_url, Some(VersionOrUrl::Url(_))) { - return Err(LoweringError::ConflictingUrls); - } - path_source(path, project_dir, editable.unwrap_or(false))? - } - Source::Registry { index } => match requirement.version_or_url { - None => { - warn_user_once!( - "Missing version constraint (e.g., a lower bound) for `{}`", - requirement.name - ); - RequirementSource::Registry { - specifier: VersionSpecifiers::empty(), - index: Some(index), - } - } - Some(VersionOrUrl::VersionSpecifier(version)) => RequirementSource::Registry { - specifier: version, - index: Some(index), - }, - Some(VersionOrUrl::Url(_)) => return Err(LoweringError::ConflictingUrls), - }, - Source::Workspace { - workspace: is_workspace, - editable, - } => { - if !is_workspace { - return Err(LoweringError::WorkspaceFalse); - } - if matches!(requirement.version_or_url, Some(VersionOrUrl::Url(_))) { - return Err(LoweringError::ConflictingUrls); - } - let path = workspace - .packages() - .get(&requirement.name) - .ok_or(LoweringError::UndeclaredWorkspacePackage)? - .clone(); - path_source(path.root(), workspace.root(), editable.unwrap_or(true))? - } - Source::CatchAll { .. } => { - // Emit a dedicated error message, which is an improvement over Serde's default error. - return Err(LoweringError::InvalidEntry); - } - }; - Ok(Requirement { - name: requirement.name, - extras: requirement.extras, - marker: requirement.marker, - source, - origin: requirement.origin, - }) -} - -/// Convert a path string to a path section. -fn path_source( - path: impl AsRef, - project_dir: &Path, - editable: bool, -) -> Result { - let url = VerbatimUrl::parse_path(path.as_ref(), project_dir)? - .with_given(path.as_ref().to_string_lossy().to_string()); - let path_buf = path.as_ref().to_path_buf(); - let path_buf = path_buf - .absolutize_from(project_dir) - .map_err(|err| LoweringError::AbsolutizeError(path.as_ref().to_path_buf(), err))? - .to_path_buf(); - if !editable { - // TODO(konsti): Support this. Currently we support `{ workspace = true }`, but we don't - // support `{ workspace = true, editable = false }` since we only collect editables. - return Err(LoweringError::NonEditableWorkspaceDependency); - } - Ok(RequirementSource::Path { - path: path_buf, - url, - editable, - }) -} - -/// Given an extra in a project that may contain references to the project itself, flatten it into -/// a list of requirements. -/// -/// For example: -/// ```toml -/// [project] -/// name = "my-project" -/// version = "0.0.1" -/// dependencies = [ -/// "tomli", -/// ] -/// -/// [project.optional-dependencies] -/// test = [ -/// "pep517", -/// ] -/// dev = [ -/// "my-project[test]", -/// ] -/// ``` -fn flatten_extra( - project_name: &PackageName, - requirements: &[Requirement], - extras: &IndexMap>, -) -> Vec { - fn inner( - project_name: &PackageName, - requirements: &[Requirement], - extras: &IndexMap>, - seen: &mut FxHashSet, - ) -> Vec { - let mut flattened = Vec::with_capacity(requirements.len()); - for requirement in requirements { - if requirement.name == *project_name { - for extra in &requirement.extras { - // Avoid infinite recursion on mutually recursive extras. - if !seen.insert(extra.clone()) { - continue; - } - - // Flatten the extra requirements. - for (other_extra, extra_requirements) in extras { - if other_extra == extra { - flattened.extend(inner(project_name, extra_requirements, extras, seen)); - } - } - } - } else { - flattened.push(requirement.clone()); - } - } - flattened - } - - inner( - project_name, - requirements, - extras, - &mut FxHashSet::default(), - ) -} - -/// -mod serde_from_and_to_string { - use std::fmt::Display; - use std::str::FromStr; - - use serde::{de, Deserialize, Deserializer, Serializer}; - - pub(super) fn serialize(value: &T, serializer: S) -> Result - where - T: Display, - S: Serializer, - { - serializer.collect_str(value) - } - - pub(super) fn deserialize<'de, T, D>(deserializer: D) -> Result - where - T: FromStr, - T::Err: Display, - D: Deserializer<'de>, - { - String::deserialize(deserializer)? - .parse() - .map_err(de::Error::custom) - } -} - -#[cfg(test)] -mod test { - use std::path::Path; - use std::str::FromStr; - - use anyhow::Context; - use indoc::indoc; - use insta::assert_snapshot; - - use uv_configuration::{ExtrasSpecification, PreviewMode}; - use uv_fs::Simplified; - use uv_normalize::PackageName; - - use crate::ProjectWorkspace; - use crate::RequirementsSpecification; - - fn from_source( - contents: &str, - path: impl AsRef, - extras: &ExtrasSpecification, - ) -> anyhow::Result { - let path = uv_fs::absolutize_path(path.as_ref())?; - let project_workspace = - ProjectWorkspace::dummy(path.as_ref(), &PackageName::from_str("foo").unwrap()); - let pyproject_toml = - toml::from_str(contents).context("Failed to parse: `pyproject.toml`")?; - RequirementsSpecification::parse_direct_pyproject_toml( - &pyproject_toml, - project_workspace.workspace(), - extras, - path.as_ref(), - PreviewMode::Enabled, - ) - .with_context(|| format!("Failed to parse: `{}`", path.user_display()))? - .context("Missing workspace") - } - - fn format_err(input: &str) -> String { - let err = from_source(input, "pyproject.toml", &ExtrasSpecification::None).unwrap_err(); - let mut causes = err.chain(); - let mut message = String::new(); - message.push_str(&format!("error: {}\n", causes.next().unwrap())); - for err in causes { - message.push_str(&format!(" Caused by: {err}\n")); - } - message - } - - #[test] - fn conflict_project_and_sources() { - let input = indoc! {r#" - [project] - name = "foo" - version = "0.0.0" - dependencies = [ - "tqdm @ git+https://github.com/tqdm/tqdm", - ] - - [tool.uv.sources] - tqdm = { url = "https://files.pythonhosted.org/packages/a5/d6/502a859bac4ad5e274255576cd3e15ca273cdb91731bc39fb840dd422ee9/tqdm-4.66.0-py3-none-any.whl" } - "#}; - - assert_snapshot!(format_err(input), @r###" - error: Failed to parse: `pyproject.toml` - Caused by: Failed to parse entry for: `tqdm` - Caused by: Can't combine URLs from both `project.dependencies` and `tool.uv.sources` - "###); - } - - #[test] - fn too_many_git_specs() { - let input = indoc! {r#" - [project] - name = "foo" - version = "0.0.0" - dependencies = [ - "tqdm", - ] - - [tool.uv.sources] - tqdm = { git = "https://github.com/tqdm/tqdm", rev = "baaaaaab", tag = "v1.0.0" } - "#}; - - assert_snapshot!(format_err(input), @r###" - error: Failed to parse: `pyproject.toml` - Caused by: Failed to parse entry for: `tqdm` - Caused by: Can only specify one of rev, tag, or branch - "###); - } - - #[test] - fn too_many_git_typo() { - let input = indoc! {r#" - [project] - name = "foo" - version = "0.0.0" - dependencies = [ - "tqdm", - ] - - [tool.uv.sources] - tqdm = { git = "https://github.com/tqdm/tqdm", ref = "baaaaaab" } - "#}; - - // TODO(konsti): This should tell you the set of valid fields - assert_snapshot!(format_err(input), @r###" - error: Failed to parse: `pyproject.toml` - Caused by: TOML parse error at line 9, column 8 - | - 9 | tqdm = { git = "https://github.com/tqdm/tqdm", ref = "baaaaaab" } - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - data did not match any variant of untagged enum Source - - "###); - } - - #[test] - fn you_cant_mix_those() { - let input = indoc! {r#" - [project] - name = "foo" - version = "0.0.0" - dependencies = [ - "tqdm", - ] - - [tool.uv.sources] - tqdm = { path = "tqdm", index = "torch" } - "#}; - - // TODO(konsti): This should tell you the set of valid fields - assert_snapshot!(format_err(input), @r###" - error: Failed to parse: `pyproject.toml` - Caused by: TOML parse error at line 9, column 8 - | - 9 | tqdm = { path = "tqdm", index = "torch" } - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - data did not match any variant of untagged enum Source - - "###); - } - - #[test] - fn missing_constraint() { - let input = indoc! {r#" - [project] - name = "foo" - version = "0.0.0" - dependencies = [ - "tqdm", - ] - "#}; - assert!(from_source(input, "pyproject.toml", &ExtrasSpecification::None).is_ok()); - } - - #[test] - fn invalid_syntax() { - let input = indoc! {r#" - [project] - name = "foo" - version = "0.0.0" - dependencies = [ - "tqdm ==4.66.0", - ] - - [tool.uv.sources] - tqdm = { url = invalid url to tqdm-4.66.0-py3-none-any.whl" } - "#}; - - assert_snapshot!(format_err(input), @r###" - error: Failed to parse: `pyproject.toml` - Caused by: TOML parse error at line 9, column 16 - | - 9 | tqdm = { url = invalid url to tqdm-4.66.0-py3-none-any.whl" } - | ^ - invalid string - expected `"`, `'` - - "###); - } - - #[test] - fn invalid_url() { - let input = indoc! {r#" - [project] - name = "foo" - version = "0.0.0" - dependencies = [ - "tqdm ==4.66.0", - ] - - [tool.uv.sources] - tqdm = { url = "§invalid#+#*Ä" } - "#}; - - assert_snapshot!(format_err(input), @r###" - error: Failed to parse: `pyproject.toml` - Caused by: TOML parse error at line 9, column 8 - | - 9 | tqdm = { url = "§invalid#+#*Ä" } - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ - data did not match any variant of untagged enum Source - - "###); - } - - #[test] - fn workspace_and_url_spec() { - let input = indoc! {r#" - [project] - name = "foo" - version = "0.0.0" - dependencies = [ - "tqdm @ git+https://github.com/tqdm/tqdm", - ] - - [tool.uv.sources] - tqdm = { workspace = true } - "#}; - - assert_snapshot!(format_err(input), @r###" - error: Failed to parse: `pyproject.toml` - Caused by: Failed to parse entry for: `tqdm` - Caused by: Can't combine URLs from both `project.dependencies` and `tool.uv.sources` - "###); - } - - #[test] - fn missing_workspace_package() { - let input = indoc! {r#" - [project] - name = "foo" - version = "0.0.0" - dependencies = [ - "tqdm ==4.66.0", - ] - - [tool.uv.sources] - tqdm = { workspace = true } - "#}; - - assert_snapshot!(format_err(input), @r###" - error: Failed to parse: `pyproject.toml` - Caused by: Failed to parse entry for: `tqdm` - Caused by: Package is not included as workspace package in `tool.uv.workspace` - "###); - } - - #[test] - fn cant_be_dynamic() { - let input = indoc! {r#" - [project] - name = "foo" - version = "0.0.0" - dynamic = [ - "dependencies" - ] - - [tool.uv.sources] - tqdm = { workspace = true } - "#}; - - assert_snapshot!(format_err(input), @r###" - error: Failed to parse: `pyproject.toml` - Caused by: pyproject.toml section is declared as dynamic, but must be static: `project.dependencies` - "###); - } - - #[test] - fn missing_project_section() { - let input = indoc! {" - [tool.uv.sources] - tqdm = { workspace = true } - "}; - - assert_snapshot!(format_err(input), @r###" - error: Failed to parse: `pyproject.toml` - Caused by: Must specify a `[project]` section alongside `[tool.uv.sources]` - "###); - } -} diff --git a/crates/uv-requirements/src/source_tree.rs b/crates/uv-requirements/src/source_tree.rs index 9d4fbe634..9e5266b08 100644 --- a/crates/uv-requirements/src/source_tree.rs +++ b/crates/uv-requirements/src/source_tree.rs @@ -7,17 +7,26 @@ use futures::stream::FuturesOrdered; use futures::TryStreamExt; use url::Url; -use distribution_types::{ - BuildableSource, DirectorySourceUrl, HashPolicy, Requirement, SourceUrl, VersionId, -}; +use distribution_types::{BuildableSource, DirectorySourceUrl, HashPolicy, SourceUrl, VersionId}; use pep508_rs::RequirementOrigin; -use pypi_types::VerbatimParsedUrl; +use pypi_types::Requirement; use uv_configuration::ExtrasSpecification; use uv_distribution::{DistributionDatabase, Reporter}; use uv_fs::Simplified; +use uv_normalize::{ExtraName, PackageName}; use uv_resolver::{InMemoryIndex, MetadataResponse}; use uv_types::{BuildContext, HashStrategy}; +#[derive(Debug, Clone)] +pub struct SourceTreeResolution { + /// The requirements sourced from the source trees. + pub requirements: Vec, + /// The names of the projects that were resolved. + pub project: PackageName, + /// The extras used when resolving the requirements. + pub extras: Vec, +} + /// A resolver for requirements specified via source trees. /// /// Used, e.g., to determine the input requirements when a user specifies a `pyproject.toml` @@ -63,26 +72,19 @@ impl<'a, Context: BuildContext> SourceTreeResolver<'a, Context> { } /// Resolve the requirements from the provided source trees. - pub async fn resolve(self) -> Result> { - let requirements: Vec<_> = self + pub async fn resolve(self) -> Result> { + let resolutions: Vec<_> = self .source_trees .iter() .map(|source_tree| async { self.resolve_source_tree(source_tree).await }) .collect::>() .try_collect() .await?; - Ok(requirements - .into_iter() - .flatten() - .map(Requirement::from) - .collect()) + Ok(resolutions) } - /// Infer the package name for a given "unnamed" requirement. - async fn resolve_source_tree( - &self, - path: &Path, - ) -> Result>> { + /// Infer the dependencies for a directory dependency. + async fn resolve_source_tree(&self, path: &Path) -> Result { // Convert to a buildable source. let source_tree = fs_err::canonicalize(path).with_context(|| { format!( @@ -151,40 +153,59 @@ impl<'a, Context: BuildContext> SourceTreeResolver<'a, Context> { } }; - // Extract the origin. let origin = RequirementOrigin::Project(path.to_path_buf(), metadata.name.clone()); + // Determine the extras to include when resolving the requirements. + let extras = match self.extras { + ExtrasSpecification::All => metadata.provides_extras.as_slice(), + ExtrasSpecification::None => &[], + ExtrasSpecification::Some(extras) => extras, + }; + // Determine the appropriate requirements to return based on the extras. This involves // evaluating the `extras` expression in any markers, but preserving the remaining marker // conditions. - match self.extras { - ExtrasSpecification::None => Ok(metadata - .requires_dist - .into_iter() - .map(|requirement| requirement.with_origin(origin.clone())) - .collect()), - ExtrasSpecification::All => Ok(metadata - .requires_dist - .into_iter() - .map(|requirement| pep508_rs::Requirement { - origin: Some(origin.clone()), - marker: requirement - .marker - .and_then(|marker| marker.simplify_extras(&metadata.provides_extras)), - ..requirement - }) - .collect()), - ExtrasSpecification::Some(extras) => Ok(metadata - .requires_dist - .into_iter() - .map(|requirement| pep508_rs::Requirement { - origin: Some(origin.clone()), - marker: requirement - .marker - .and_then(|marker| marker.simplify_extras(extras)), - ..requirement - }) - .collect()), + let mut requirements: Vec = metadata + .requires_dist + .into_iter() + .map(|requirement| Requirement { + origin: Some(origin.clone()), + marker: requirement + .marker + .and_then(|marker| marker.simplify_extras(extras)), + ..requirement + }) + .collect(); + + // Resolve any recursive extras. + loop { + // Find the first recursive requirement. + // TODO(charlie): Respect markers on recursive extras. + let Some(index) = requirements.iter().position(|requirement| { + requirement.name == metadata.name && requirement.marker.is_none() + }) else { + break; + }; + + // Remove the requirement that points to us. + let recursive = requirements.remove(index); + + // Re-simplify the requirements. + for requirement in &mut requirements { + requirement.marker = requirement + .marker + .take() + .and_then(|marker| marker.simplify_extras(&recursive.extras)); + } } + + let project = metadata.name; + let extras = metadata.provides_extras; + + Ok(SourceTreeResolution { + requirements, + project, + extras, + }) } } diff --git a/crates/uv-requirements/src/specification.rs b/crates/uv-requirements/src/specification.rs index 8e9286595..d156318e8 100644 --- a/crates/uv-requirements/src/specification.rs +++ b/crates/uv-requirements/src/specification.rs @@ -27,33 +27,29 @@ //! * `setup.py` or `setup.cfg` instead of `pyproject.toml`: Directory is an entry in //! `source_trees`. -use std::collections::VecDeque; use std::path::{Path, PathBuf}; use anyhow::{Context, Result}; -use path_absolutize::Absolutize; use rustc_hash::FxHashSet; -use same_file::is_same_file; -use tracing::{debug, instrument, trace}; +use tracing::instrument; use cache_key::CanonicalUrl; use distribution_types::{ - FlatIndexLocation, IndexUrl, Requirement, RequirementSource, UnresolvedRequirement, - UnresolvedRequirementSpecification, + FlatIndexLocation, IndexUrl, UnresolvedRequirement, UnresolvedRequirementSpecification, }; use pep508_rs::{UnnamedRequirement, UnnamedRequirementUrl}; +use pypi_types::Requirement; use pypi_types::VerbatimParsedUrl; use requirements_txt::{ EditableRequirement, FindLink, RequirementEntry, RequirementsTxt, RequirementsTxtRequirement, }; use uv_client::BaseClientBuilder; -use uv_configuration::{ExtrasSpecification, NoBinary, NoBuild, PreviewMode}; +use uv_configuration::{NoBinary, NoBuild}; +use uv_distribution::pyproject::PyProjectToml; use uv_fs::Simplified; use uv_normalize::{ExtraName, PackageName}; -use crate::pyproject::{Pep621Metadata, PyProjectToml}; -use crate::ProjectWorkspace; -use crate::{RequirementsSource, Workspace, WorkspaceError}; +use crate::RequirementsSource; #[derive(Debug, Default)] pub struct RequirementsSpecification { @@ -88,10 +84,7 @@ impl RequirementsSpecification { #[instrument(skip_all, level = tracing::Level::DEBUG, fields(source = % source))] pub async fn from_source( source: &RequirementsSource, - extras: &ExtrasSpecification, client_builder: &BaseClientBuilder<'_>, - workspace: Option<&Workspace>, - preview: PreviewMode, ) -> Result { Ok(match source { RequirementsSource::Package(name) => { @@ -108,9 +101,22 @@ impl RequirementsSpecification { } } RequirementsSource::Editable(name) => { - Self::from_editable_source(name, extras, workspace, preview).await? + let requirement = EditableRequirement::parse(name, None, std::env::current_dir()?) + .with_context(|| format!("Failed to parse: `{name}`"))?; + Self { + requirements: vec![UnresolvedRequirementSpecification::from(requirement)], + ..Self::default() + } } RequirementsSource::RequirementsTxt(path) => { + if !(path == Path::new("-") + || path.starts_with("http://") + || path.starts_with("https://") + || path.is_file()) + { + return Err(anyhow::anyhow!("File not found: `{}`", path.user_display())); + } + let requirements_txt = RequirementsTxt::parse(path, std::env::current_dir()?, client_builder).await?; Self { @@ -151,317 +157,68 @@ impl RequirementsSpecification { } } RequirementsSource::PyprojectToml(path) => { - Self::from_pyproject_toml_source(path, extras, preview).await? + let contents = match fs_err::tokio::read_to_string(&path).await { + Ok(contents) => contents, + Err(err) if err.kind() == std::io::ErrorKind::NotFound => { + return Err(anyhow::anyhow!("File not found: `{}`", path.user_display())); + } + Err(err) => { + return Err(anyhow::anyhow!( + "Failed to read `{}`: {}", + path.user_display(), + err + )); + } + }; + let _ = toml::from_str::(&contents) + .with_context(|| format!("Failed to parse: `{}`", path.user_display()))?; + + Self { + source_trees: vec![path.clone()], + ..Self::default() + } + } + RequirementsSource::SetupPy(path) | RequirementsSource::SetupCfg(path) => { + if !path.is_file() { + return Err(anyhow::anyhow!("File not found: `{}`", path.user_display())); + } + + Self { + source_trees: vec![path.clone()], + ..Self::default() + } + } + RequirementsSource::SourceTree(path) => { + if !path.is_dir() { + return Err(anyhow::anyhow!( + "Directory not found: `{}`", + path.user_display() + )); + } + + Self { + project: None, + requirements: vec![UnresolvedRequirementSpecification { + requirement: UnresolvedRequirement::Unnamed(UnnamedRequirement { + url: VerbatimParsedUrl::parse_absolute_path(path)?, + extras: vec![], + marker: None, + origin: None, + }), + hashes: vec![], + }], + ..Self::default() + } } - RequirementsSource::SetupPy(path) | RequirementsSource::SetupCfg(path) => Self { - source_trees: vec![path.clone()], - ..Self::default() - }, - RequirementsSource::SourceTree(path) => Self { - project: None, - requirements: vec![UnresolvedRequirementSpecification { - requirement: UnresolvedRequirement::Unnamed(UnnamedRequirement { - url: VerbatimParsedUrl::parse_absolute_path(path)?, - extras: vec![], - marker: None, - origin: None, - }), - hashes: vec![], - }], - ..Self::default() - }, }) } - async fn from_editable_source( - name: &str, - extras: &ExtrasSpecification, - workspace: Option<&Workspace>, - preview: PreviewMode, - ) -> Result { - let requirement = EditableRequirement::parse(name, None, std::env::current_dir()?) - .with_context(|| format!("Failed to parse: `{name}`"))?; - - // If we're not in preview mode, return the editable without searching for a workspace. - if preview.is_disabled() { - return Ok(Self { - requirements: vec![UnresolvedRequirementSpecification::from(requirement)], - ..Self::default() - }); - } - - // First try to find the project in the existing workspace (if any), then try workspace - // discovery. - let project_in_exiting_workspace = workspace.and_then(|workspace| { - // We use `is_same_file` instead of indexing by path to support different versions of - // the same path (e.g. symlinks). - workspace - .packages() - .values() - .find(|member| is_same_file(member.root(), &requirement.path).unwrap_or(false)) - .map(|member| (member.pyproject_toml(), workspace)) - }); - - let editable_spec = if let Some((pyproject_toml, workspace)) = project_in_exiting_workspace - { - debug!( - "Found project in workspace at: `{}`", - requirement.path.user_display() - ); - - Self::parse_direct_pyproject_toml( - pyproject_toml, - workspace, - extras, - requirement.path.as_ref(), - preview, - ) - .with_context(|| format!("Failed to parse: `{}`", requirement.path.user_display()))? - } else if let Some(project_workspace) = - ProjectWorkspace::from_maybe_project_root(&requirement.path).await? - { - debug!( - "Found project at workspace root: `{}`", - requirement.path.user_display() - ); - - let pyproject_toml = project_workspace.current_project().pyproject_toml(); - let workspace = project_workspace.workspace(); - Self::parse_direct_pyproject_toml( - pyproject_toml, - workspace, - extras, - requirement.path.as_ref(), - preview, - ) - .with_context(|| format!("Failed to parse: `{}`", requirement.path.user_display()))? - } else { - // No `pyproject.toml` or no static metadata also means no workspace support (at the - // moment). - debug!( - "pyproject.toml has dynamic metadata at: `{}`", - requirement.path.user_display() - ); - - return Ok(Self { - requirements: vec![UnresolvedRequirementSpecification::from(requirement)], - ..Self::default() - }); - }; - - if let Some(editable_spec) = editable_spec { - // We only collect the editables here to keep the count of root packages correct. - // TODO(konsti): Collect all workspace packages, even the non-editable ones. - Ok(Self { - requirements: editable_spec - .requirements - .into_iter() - .chain(std::iter::once(UnresolvedRequirementSpecification::from( - requirement, - ))) - .filter(|entry| entry.requirement.is_editable()) - .collect(), - ..Self::default() - }) - } else { - debug!( - "pyproject.toml has dynamic metadata at: `{}`", - requirement.path.user_display() - ); - Ok(Self { - requirements: vec![UnresolvedRequirementSpecification::from(requirement)], - ..Self::default() - }) - } - } - - async fn from_pyproject_toml_source( - path: &Path, - extras: &ExtrasSpecification, - preview: PreviewMode, - ) -> Result { - let dir = path.parent().context("pyproject.toml must have a parent")?; - // We have to handle three cases: - // * There is a workspace (possibly implicit) with static dependencies. - // * There are dynamic dependencies, we have to build and don't use workspace information if - // present. - // * There was an error during workspace discovery, such as an IO error or a - // `pyproject.toml` in the workspace not matching the (lenient) schema. - match ProjectWorkspace::from_project_root(dir).await { - Ok(project_workspace) => { - let static_pyproject_toml = Self::parse_direct_pyproject_toml( - project_workspace.current_project().pyproject_toml(), - project_workspace.workspace(), - extras, - path, - preview, - ) - .with_context(|| format!("Failed to parse: `{}`", path.user_display()))?; - - if let Some(static_pyproject_toml) = static_pyproject_toml { - Ok(static_pyproject_toml) - } else { - debug!("Dynamic pyproject.toml at: `{}`", path.user_display()); - Ok(Self { - source_trees: vec![path.to_path_buf()], - ..Self::default() - }) - } - } - Err(WorkspaceError::MissingProject(_)) => { - debug!( - "Missing `project` table from pyproject.toml at: `{}`", - path.user_display() - ); - Ok(Self { - source_trees: vec![path.to_path_buf()], - ..Self::default() - }) - } - Err(err) => Err(anyhow::Error::new(err)), - } - } - - /// Parse and lower a `pyproject.toml`, including all editable workspace dependencies. - /// - /// When dependency information is dynamic or invalid `project.dependencies` (e.g., Hatch's - /// relative path support), we return `None` and query the metadata with PEP 517 later. - pub(crate) fn parse_direct_pyproject_toml( - pyproject: &PyProjectToml, - workspace: &Workspace, - extras: &ExtrasSpecification, - pyproject_path: &Path, - preview: PreviewMode, - ) -> Result> { - // We need use this path as base for the relative paths inside pyproject.toml, so - // we need the absolute path instead of a potentially relative path. E.g. with - // `foo = { path = "../foo" }`, we will join `../foo` onto this path. - let absolute_path = uv_fs::absolutize_path(pyproject_path)?; - let project_dir = absolute_path - .parent() - .context("`pyproject.toml` has no parent directory")?; - - let Some(project) = Pep621Metadata::try_from( - pyproject, - extras, - pyproject_path, - project_dir, - workspace, - preview, - )? - else { - debug!( - "Dynamic pyproject.toml at: `{}`", - pyproject_path.user_display() - ); - return Ok(None); - }; - - if preview.is_disabled() { - Ok(Some(Self { - project: Some(project.name), - requirements: project - .requirements - .into_iter() - .map(UnresolvedRequirementSpecification::from) - .collect(), - extras: project.used_extras, - ..Self::default() - })) - } else { - Ok(Some(Self::collect_transitive_editables( - workspace, extras, preview, project, - )?)) - } - } - - /// Perform a workspace dependency DAG traversal (breadth-first search) to collect all editables - /// eagerly. - /// - /// Consider a requirement on A in a workspace with workspace packages A, B, C where - /// A -> B and B -> C. - fn collect_transitive_editables( - workspace: &Workspace, - extras: &ExtrasSpecification, - preview: PreviewMode, - project: Pep621Metadata, - ) -> Result { - let mut seen = FxHashSet::from_iter([project.name.clone()]); - let mut queue = VecDeque::from([project.name.clone()]); - let mut requirements = Vec::new(); - let mut used_extras = FxHashSet::default(); - - while let Some(project_name) = queue.pop_front() { - let Some(current) = workspace.packages().get(&project_name) else { - continue; - }; - trace!("Processing metadata for workspace package {project_name}"); - - let project_root_absolute = current.root().absolutize_from(workspace.root())?; - let pyproject = current.pyproject_toml().clone(); - let project = Pep621Metadata::try_from( - &pyproject, - extras, - &project_root_absolute.join("pyproject.toml"), - project_root_absolute.as_ref(), - workspace, - preview, - ) - .with_context(|| { - format!( - "Invalid requirements in: `{}`", - current.root().join("pyproject.toml").user_display() - ) - })? - // TODO(konsti): We should support this by building and using the built PEP 517 metadata - .with_context(|| { - format!( - "Workspace member doesn't declare static metadata: `{}`", - current.root().user_display() - ) - })?; - - // Recurse into any editables. - for requirement in &project.requirements { - if matches!( - requirement.source, - RequirementSource::Path { editable: true, .. } - ) { - if seen.insert(requirement.name.clone()) { - queue.push_back(requirement.name.clone()); - } - } - } - - // Collect the requirements and extras. - used_extras.extend(project.used_extras); - requirements.extend(project.requirements); - } - - let spec = Self { - project: Some(project.name), - requirements: requirements - .into_iter() - .map(UnresolvedRequirementSpecification::from) - .collect(), - extras: used_extras, - ..Self::default() - }; - Ok(spec) - } - /// Read the combined requirements and constraints from a set of sources. - /// - /// If a [`Workspace`] is provided, it will be used as-is without re-discovering a workspace - /// from the filesystem. pub async fn from_sources( requirements: &[RequirementsSource], constraints: &[RequirementsSource], overrides: &[RequirementsSource], - workspace: Option<&Workspace>, - extras: &ExtrasSpecification, client_builder: &BaseClientBuilder<'_>, - preview: PreviewMode, ) -> Result { let mut spec = Self::default(); @@ -469,8 +226,7 @@ impl RequirementsSpecification { // A `requirements.txt` can contain a `-c constraints.txt` directive within it, so reading // a requirements file can also add constraints. for source in requirements { - let source = - Self::from_source(source, extras, client_builder, workspace, preview).await?; + let source = Self::from_source(source, client_builder).await?; spec.requirements.extend(source.requirements); spec.constraints.extend(source.constraints); spec.overrides.extend(source.overrides); @@ -502,8 +258,7 @@ impl RequirementsSpecification { // Read all constraints, treating both requirements _and_ constraints as constraints. // Overrides are ignored, as are the hashes, as they are not relevant for constraints. for source in constraints { - let source = - Self::from_source(source, extras, client_builder, workspace, preview).await?; + let source = Self::from_source(source, client_builder).await?; for entry in source.requirements { match entry.requirement { UnresolvedRequirement::Named(requirement) => { @@ -538,7 +293,7 @@ impl RequirementsSpecification { // Read all overrides, treating both requirements _and_ overrides as overrides. // Constraints are ignored. for source in overrides { - let source = Self::from_source(source, extras, client_builder, None, preview).await?; + let source = Self::from_source(source, client_builder).await?; spec.overrides.extend(source.requirements); spec.overrides.extend(source.overrides); @@ -566,17 +321,7 @@ impl RequirementsSpecification { pub async fn from_simple_sources( requirements: &[RequirementsSource], client_builder: &BaseClientBuilder<'_>, - preview: PreviewMode, ) -> Result { - Self::from_sources( - requirements, - &[], - &[], - None, - &ExtrasSpecification::None, - client_builder, - preview, - ) - .await + Self::from_sources(requirements, &[], &[], client_builder).await } } diff --git a/crates/uv-requirements/src/unnamed.rs b/crates/uv-requirements/src/unnamed.rs index bf728a35c..8e5aae721 100644 --- a/crates/uv-requirements/src/unnamed.rs +++ b/crates/uv-requirements/src/unnamed.rs @@ -12,10 +12,10 @@ use tracing::debug; use distribution_filename::{SourceDistFilename, WheelFilename}; use distribution_types::{ BuildableSource, DirectSourceUrl, DirectorySourceUrl, GitSourceUrl, PathSourceUrl, - RemoteSource, Requirement, SourceUrl, UnresolvedRequirement, - UnresolvedRequirementSpecification, VersionId, + RemoteSource, SourceUrl, UnresolvedRequirement, UnresolvedRequirementSpecification, VersionId, }; use pep508_rs::{UnnamedRequirement, VersionOrUrl}; +use pypi_types::Requirement; use pypi_types::{Metadata10, ParsedUrl, VerbatimParsedUrl}; use uv_distribution::{DistributionDatabase, Reporter}; use uv_normalize::PackageName; diff --git a/crates/uv-requirements/src/upgrade.rs b/crates/uv-requirements/src/upgrade.rs index 6ed477ff3..4cff3b0be 100644 --- a/crates/uv-requirements/src/upgrade.rs +++ b/crates/uv-requirements/src/upgrade.rs @@ -1,15 +1,14 @@ use std::path::Path; +use anstream::eprint; use anyhow::Result; -use anstream::eprint; use requirements_txt::RequirementsTxt; use uv_client::{BaseClientBuilder, Connectivity}; use uv_configuration::Upgrade; +use uv_distribution::ProjectWorkspace; use uv_resolver::{Lock, Preference, PreferenceError}; -use crate::ProjectWorkspace; - /// Load the preferred requirements from an existing `requirements.txt`, applying the upgrade strategy. pub async fn read_requirements_txt( output_file: Option<&Path>, diff --git a/crates/uv-resolver/src/manifest.rs b/crates/uv-resolver/src/manifest.rs index 943f72b90..fd1af0c38 100644 --- a/crates/uv-resolver/src/manifest.rs +++ b/crates/uv-resolver/src/manifest.rs @@ -1,7 +1,7 @@ use either::Either; -use distribution_types::Requirement; use pep508_rs::MarkerEnvironment; +use pypi_types::Requirement; use uv_configuration::{Constraints, Overrides}; use uv_normalize::PackageName; use uv_types::RequestedRequirements; diff --git a/crates/uv-resolver/src/prerelease_mode.rs b/crates/uv-resolver/src/prerelease_mode.rs index 20a406b28..db6cec125 100644 --- a/crates/uv-resolver/src/prerelease_mode.rs +++ b/crates/uv-resolver/src/prerelease_mode.rs @@ -1,4 +1,4 @@ -use distribution_types::RequirementSource; +use pypi_types::RequirementSource; use rustc_hash::FxHashSet; use pep508_rs::MarkerEnvironment; diff --git a/crates/uv-resolver/src/pubgrub/dependencies.rs b/crates/uv-resolver/src/pubgrub/dependencies.rs index 6f8539d3f..8c4b6f2e6 100644 --- a/crates/uv-resolver/src/pubgrub/dependencies.rs +++ b/crates/uv-resolver/src/pubgrub/dependencies.rs @@ -3,9 +3,10 @@ use pubgrub::range::Range; use rustc_hash::FxHashSet; use tracing::warn; -use distribution_types::{Requirement, RequirementSource, Verbatim}; +use distribution_types::Verbatim; use pep440_rs::Version; use pep508_rs::MarkerEnvironment; +use pypi_types::{Requirement, RequirementSource}; use uv_configuration::{Constraints, Overrides}; use uv_normalize::{ExtraName, PackageName}; diff --git a/crates/uv-resolver/src/resolution/graph.rs b/crates/uv-resolver/src/resolution/graph.rs index b61548d03..e8653cf54 100644 --- a/crates/uv-resolver/src/resolution/graph.rs +++ b/crates/uv-resolver/src/resolution/graph.rs @@ -1,17 +1,17 @@ use std::hash::BuildHasherDefault; -use rustc_hash::{FxHashMap, FxHashSet}; - -use distribution_types::{ - Dist, DistributionMetadata, Name, Requirement, ResolutionDiagnostic, VersionId, VersionOrUrlRef, -}; -use pep440_rs::{Version, VersionSpecifier}; -use pep508_rs::{MarkerEnvironment, MarkerTree}; use petgraph::{ graph::{Graph, NodeIndex}, Directed, }; -use pypi_types::{ParsedUrlError, Yanked}; +use rustc_hash::{FxHashMap, FxHashSet}; + +use distribution_types::{ + Dist, DistributionMetadata, Name, ResolutionDiagnostic, VersionId, VersionOrUrlRef, +}; +use pep440_rs::{Version, VersionSpecifier}; +use pep508_rs::{MarkerEnvironment, MarkerTree}; +use pypi_types::{ParsedUrlError, Requirement, Yanked}; use uv_normalize::{ExtraName, PackageName}; use crate::preferences::Preferences; diff --git a/crates/uv-resolver/src/resolution/mod.rs b/crates/uv-resolver/src/resolution/mod.rs index 99f378916..f0c382eae 100644 --- a/crates/uv-resolver/src/resolution/mod.rs +++ b/crates/uv-resolver/src/resolution/mod.rs @@ -6,7 +6,8 @@ use itertools::Itertools; use distribution_types::{DistributionMetadata, Name, ResolvedDist, Verbatim, VersionOrUrlRef}; use pep508_rs::{split_scheme, MarkerTree, Scheme}; -use pypi_types::{HashDigest, Metadata23}; +use pypi_types::HashDigest; +use uv_distribution::Metadata; use uv_normalize::{ExtraName, PackageName}; pub use crate::resolution::display::{AnnotationStyle, DisplayResolutionGraph}; @@ -24,7 +25,7 @@ pub(crate) struct AnnotatedDist { pub(crate) extra: Option, pub(crate) marker: Option, pub(crate) hashes: Vec, - pub(crate) metadata: Metadata23, + pub(crate) metadata: Metadata, } impl Name for AnnotatedDist { diff --git a/crates/uv-resolver/src/resolver/locals.rs b/crates/uv-resolver/src/resolver/locals.rs index 414df49c5..4a2c06c46 100644 --- a/crates/uv-resolver/src/resolver/locals.rs +++ b/crates/uv-resolver/src/resolver/locals.rs @@ -4,9 +4,10 @@ use std::str::FromStr; use rustc_hash::FxHashMap; use distribution_filename::{SourceDistFilename, WheelFilename}; -use distribution_types::{RemoteSource, RequirementSource}; +use distribution_types::RemoteSource; use pep440_rs::{Operator, Version, VersionSpecifier, VersionSpecifierBuildError}; use pep508_rs::MarkerEnvironment; +use pypi_types::RequirementSource; use uv_normalize::PackageName; use crate::{DependencyMode, Manifest}; @@ -203,10 +204,10 @@ mod tests { use anyhow::Result; use url::Url; - use distribution_types::RequirementSource; use pep440_rs::{Operator, Version, VersionSpecifier, VersionSpecifiers}; use pep508_rs::VerbatimUrl; use pypi_types::ParsedUrl; + use pypi_types::RequirementSource; use crate::resolver::locals::{iter_locals, Locals}; diff --git a/crates/uv-resolver/src/resolver/mod.rs b/crates/uv-resolver/src/resolver/mod.rs index e0e31458a..63e5a2561 100644 --- a/crates/uv-resolver/src/resolver/mod.rs +++ b/crates/uv-resolver/src/resolver/mod.rs @@ -21,14 +21,13 @@ use tracing::{debug, enabled, instrument, trace, warn, Level}; use distribution_types::{ BuiltDist, Dist, DistributionMetadata, IncompatibleDist, IncompatibleSource, IncompatibleWheel, - InstalledDist, RemoteSource, Requirement, ResolvedDist, ResolvedDistRef, SourceDist, - VersionOrUrlRef, + InstalledDist, RemoteSource, ResolvedDist, ResolvedDistRef, SourceDist, VersionOrUrlRef, }; pub(crate) use locals::Locals; use pep440_rs::{Version, MIN_VERSION}; use pep508_rs::MarkerEnvironment; use platform_tags::Tags; -use pypi_types::Metadata23; +use pypi_types::{Metadata23, Requirement}; pub(crate) use urls::Urls; use uv_configuration::{Constraints, Overrides}; use uv_distribution::{ArchiveMetadata, DistributionDatabase}; @@ -1144,7 +1143,9 @@ impl ResolverState>(); + if !unused_extras.is_empty() { + unused_extras.sort_unstable(); + unused_extras.dedup(); + let s = if unused_extras.len() == 1 { "" } else { "s" }; + return Err(anyhow!( + "Requested extra{s} not found: {}", + unused_extras.iter().join(", ") + )); + } + } + + // Extend the requirements with the resolved source trees. requirements.extend( - SourceTreeResolver::new( - source_trees, - &extras, - &hasher, - &top_level_index, - DistributionDatabase::new(&client, &build_dispatch, concurrency.downloads), - ) - .with_reporter(ResolverReporter::from(printer)) - .resolve() - .await?, + resolutions + .into_iter() + .flat_map(|resolution| resolution.requirements), ); } @@ -367,7 +400,7 @@ pub(crate) async fn pip_compile( overrides, &hasher, &top_level_index, - DistributionDatabase::new(&client, &build_dispatch, concurrency.downloads), + DistributionDatabase::new(&client, &build_dispatch, concurrency.downloads, preview), ) .with_reporter(ResolverReporter::from(printer)) .resolve() @@ -425,7 +458,7 @@ pub(crate) async fn pip_compile( &overrides, &hasher, &top_level_index, - DistributionDatabase::new(&client, &build_dispatch, concurrency.downloads), + DistributionDatabase::new(&client, &build_dispatch, concurrency.downloads, preview), ) .with_reporter(ResolverReporter::from(printer)) .resolve(marker_filter) @@ -466,7 +499,7 @@ pub(crate) async fn pip_compile( &hasher, &build_dispatch, EmptyInstalledPackages, - DistributionDatabase::new(&client, &build_dispatch, concurrency.downloads), + DistributionDatabase::new(&client, &build_dispatch, concurrency.downloads, preview), )? .with_reporter(ResolverReporter::from(printer)); diff --git a/crates/uv/src/commands/pip/install.rs b/crates/uv/src/commands/pip/install.rs index 2a76116c9..83a20d6c8 100644 --- a/crates/uv/src/commands/pip/install.rs +++ b/crates/uv/src/commands/pip/install.rs @@ -100,10 +100,8 @@ pub(crate) async fn pip_install( requirements, constraints, overrides, - None, extras, &client_builder, - preview, ) .await?; @@ -318,6 +316,7 @@ pub(crate) async fn pip_install( &no_build, &no_binary, concurrency, + preview, ) .with_options(OptionsBuilder::new().exclude_newer(exclude_newer).build()); @@ -358,6 +357,7 @@ pub(crate) async fn pip_install( concurrency, options, printer, + preview, ) .await { @@ -395,6 +395,7 @@ pub(crate) async fn pip_install( &no_build, &no_binary, concurrency, + preview, ) .with_options(OptionsBuilder::new().exclude_newer(exclude_newer).build()) }; @@ -419,6 +420,7 @@ pub(crate) async fn pip_install( &venv, dry_run, printer, + preview, ) .await?; diff --git a/crates/uv/src/commands/pip/operations.rs b/crates/uv/src/commands/pip/operations.rs index 8a3ac0b3e..89b12f3e2 100644 --- a/crates/uv/src/commands/pip/operations.rs +++ b/crates/uv/src/commands/pip/operations.rs @@ -9,8 +9,7 @@ use owo_colors::OwoColorize; use tracing::debug; use distribution_types::{ - CachedDist, Diagnostic, InstalledDist, Requirement, ResolutionDiagnostic, - UnresolvedRequirementSpecification, + CachedDist, Diagnostic, InstalledDist, ResolutionDiagnostic, UnresolvedRequirementSpecification, }; use distribution_types::{ DistributionMetadata, IndexLocations, InstalledMetadata, LocalDist, Name, Resolution, @@ -18,6 +17,7 @@ use distribution_types::{ use install_wheel_rs::linker::LinkMode; use pep508_rs::MarkerEnvironment; use platform_tags::Tags; +use pypi_types::Requirement; use uv_cache::Cache; use uv_client::{BaseClientBuilder, RegistryClient}; use uv_configuration::{ @@ -32,7 +32,7 @@ use uv_interpreter::{Interpreter, PythonEnvironment}; use uv_normalize::PackageName; use uv_requirements::{ LookaheadResolver, NamedRequirementsResolver, RequirementsSource, RequirementsSpecification, - SourceTreeResolver, Workspace, + SourceTreeResolver, }; use uv_resolver::{ DependencyMode, Exclusions, FlatIndex, InMemoryIndex, Manifest, Options, Preference, @@ -42,8 +42,7 @@ use uv_types::{HashStrategy, InFlight, InstalledPackagesProvider}; use uv_warnings::warn_user; use crate::commands::reporters::{DownloadReporter, InstallReporter, ResolverReporter}; -use crate::commands::DryRunEvent; -use crate::commands::{compile_bytecode, elapsed, ChangeEvent, ChangeEventKind}; +use crate::commands::{compile_bytecode, elapsed, ChangeEvent, ChangeEventKind, DryRunEvent}; use crate::printer::Printer; /// Consolidate the requirements for an installation. @@ -51,10 +50,8 @@ pub(crate) async fn read_requirements( requirements: &[RequirementsSource], constraints: &[RequirementsSource], overrides: &[RequirementsSource], - workspace: Option<&Workspace>, extras: &ExtrasSpecification, client_builder: &BaseClientBuilder<'_>, - preview: PreviewMode, ) -> Result { // If the user requests `extras` but does not provide a valid source (e.g., a `pyproject.toml`), // return an error. @@ -66,39 +63,13 @@ pub(crate) async fn read_requirements( } // Read all requirements from the provided sources. - let spec = RequirementsSpecification::from_sources( + Ok(RequirementsSpecification::from_sources( requirements, constraints, overrides, - workspace, - extras, client_builder, - preview, ) - .await?; - - // If all the metadata could be statically resolved, validate that every extra was used. If we - // need to resolve metadata via PEP 517, we don't know which extras are used until much later. - if spec.source_trees.is_empty() { - if let ExtrasSpecification::Some(extras) = extras { - let mut unused_extras = extras - .iter() - .filter(|extra| !spec.extras.contains(extra)) - .collect::>(); - if !unused_extras.is_empty() { - unused_extras.sort_unstable(); - unused_extras.dedup(); - let s = if unused_extras.len() == 1 { "" } else { "s" }; - return Err(anyhow!( - "Requested extra{s} not found: {}", - unused_extras.iter().join(", ") - ) - .into()); - } - } - } - - Ok(spec) + .await?) } /// Resolve a set of requirements, similar to running `pip compile`. @@ -108,7 +79,7 @@ pub(crate) async fn resolve( constraints: Vec, overrides: Vec, source_trees: Vec, - project: Option, + mut project: Option, extras: &ExtrasSpecification, preferences: Vec, installed_packages: InstalledPackages, @@ -125,6 +96,7 @@ pub(crate) async fn resolve( concurrency: Concurrency, options: Options, printer: Printer, + preview: PreviewMode, ) -> Result { let start = std::time::Instant::now(); @@ -135,7 +107,7 @@ pub(crate) async fn resolve( requirements, hasher, index, - DistributionDatabase::new(client, build_dispatch, concurrency.downloads), + DistributionDatabase::new(client, build_dispatch, concurrency.downloads, preview), ) .with_reporter(ResolverReporter::from(printer)) .resolve() @@ -143,17 +115,53 @@ pub(crate) async fn resolve( // Resolve any source trees into requirements. if !source_trees.is_empty() { + let resolutions = SourceTreeResolver::new( + source_trees, + extras, + hasher, + index, + DistributionDatabase::new(client, build_dispatch, concurrency.downloads, preview), + ) + .with_reporter(ResolverReporter::from(printer)) + .resolve() + .await?; + + // If we resolved a single project, use it for the project name. + project = project.or_else(|| { + if let [resolution] = &resolutions[..] { + Some(resolution.project.clone()) + } else { + None + } + }); + + // If any of the extras were unused, surface a warning. + if let ExtrasSpecification::Some(extras) = extras { + let mut unused_extras = extras + .iter() + .filter(|extra| { + !resolutions + .iter() + .any(|resolution| resolution.extras.contains(extra)) + }) + .collect::>(); + if !unused_extras.is_empty() { + unused_extras.sort_unstable(); + unused_extras.dedup(); + let s = if unused_extras.len() == 1 { "" } else { "s" }; + return Err(anyhow!( + "Requested extra{s} not found: {}", + unused_extras.iter().join(", ") + ) + .into()); + } + } + + // Extend the requirements with the resolved source trees. requirements.extend( - SourceTreeResolver::new( - source_trees, - extras, - hasher, - index, - DistributionDatabase::new(client, build_dispatch, concurrency.downloads), - ) - .with_reporter(ResolverReporter::from(printer)) - .resolve() - .await?, + resolutions + .into_iter() + .flat_map(|resolution| resolution.requirements), ); } @@ -165,7 +173,7 @@ pub(crate) async fn resolve( overrides, hasher, index, - DistributionDatabase::new(client, build_dispatch, concurrency.downloads), + DistributionDatabase::new(client, build_dispatch, concurrency.downloads, preview), ) .with_reporter(ResolverReporter::from(printer)) .resolve() @@ -185,7 +193,7 @@ pub(crate) async fn resolve( &overrides, hasher, index, - DistributionDatabase::new(client, build_dispatch, concurrency.downloads), + DistributionDatabase::new(client, build_dispatch, concurrency.downloads, preview), ) .with_reporter(ResolverReporter::from(printer)) .resolve(Some(markers)) @@ -229,7 +237,7 @@ pub(crate) async fn resolve( hasher, build_dispatch, installed_packages, - DistributionDatabase::new(client, build_dispatch, concurrency.downloads), + DistributionDatabase::new(client, build_dispatch, concurrency.downloads, preview), )? .with_reporter(reporter); @@ -287,6 +295,7 @@ pub(crate) async fn install( venv: &PythonEnvironment, dry_run: bool, printer: Printer, + preview: PreviewMode, ) -> Result<(), Error> { let start = std::time::Instant::now(); @@ -362,7 +371,7 @@ pub(crate) async fn install( cache, tags, hasher, - DistributionDatabase::new(client, build_dispatch, concurrency.downloads), + DistributionDatabase::new(client, build_dispatch, concurrency.downloads, preview), ) .with_reporter(DownloadReporter::from(printer).with_length(remote.len() as u64)); diff --git a/crates/uv/src/commands/pip/sync.rs b/crates/uv/src/commands/pip/sync.rs index 052b64d71..b902dabae 100644 --- a/crates/uv/src/commands/pip/sync.rs +++ b/crates/uv/src/commands/pip/sync.rs @@ -97,10 +97,8 @@ pub(crate) async fn pip_sync( requirements, constraints, overrides, - None, &extras, &client_builder, - preview, ) .await?; @@ -269,6 +267,7 @@ pub(crate) async fn pip_sync( &no_build, &no_binary, concurrency, + preview, ) .with_options(OptionsBuilder::new().exclude_newer(exclude_newer).build()); @@ -308,6 +307,7 @@ pub(crate) async fn pip_sync( concurrency, options, printer, + preview, ) .await { @@ -344,6 +344,7 @@ pub(crate) async fn pip_sync( &no_build, &no_binary, concurrency, + preview, ) .with_options(OptionsBuilder::new().exclude_newer(exclude_newer).build()) }; @@ -368,6 +369,7 @@ pub(crate) async fn pip_sync( &venv, dry_run, printer, + preview, ) .await?; diff --git a/crates/uv/src/commands/pip/uninstall.rs b/crates/uv/src/commands/pip/uninstall.rs index 87905435e..c7453fccb 100644 --- a/crates/uv/src/commands/pip/uninstall.rs +++ b/crates/uv/src/commands/pip/uninstall.rs @@ -5,8 +5,9 @@ use itertools::{Either, Itertools}; use owo_colors::OwoColorize; use tracing::debug; -use distribution_types::{InstalledMetadata, Name, Requirement, UnresolvedRequirement}; +use distribution_types::{InstalledMetadata, Name, UnresolvedRequirement}; use pep508_rs::UnnamedRequirement; +use pypi_types::Requirement; use pypi_types::VerbatimParsedUrl; use uv_cache::Cache; use uv_client::{BaseClientBuilder, Connectivity}; @@ -40,8 +41,7 @@ pub(crate) async fn pip_uninstall( .keyring(keyring_provider); // Read all requirements from the provided sources. - let spec = - RequirementsSpecification::from_simple_sources(sources, &client_builder, preview).await?; + let spec = RequirementsSpecification::from_simple_sources(sources, &client_builder).await?; // Detect the current Python interpreter. let system = if system { diff --git a/crates/uv/src/commands/project/lock.rs b/crates/uv/src/commands/project/lock.rs index 0e3430162..a0137c660 100644 --- a/crates/uv/src/commands/project/lock.rs +++ b/crates/uv/src/commands/project/lock.rs @@ -10,9 +10,9 @@ use uv_configuration::{ SetupPyStrategy, Upgrade, }; use uv_dispatch::BuildDispatch; +use uv_distribution::ProjectWorkspace; use uv_interpreter::PythonEnvironment; use uv_requirements::upgrade::read_lockfile; -use uv_requirements::ProjectWorkspace; use uv_resolver::{ExcludeNewer, FlatIndex, InMemoryIndex, Lock, OptionsBuilder}; use uv_types::{BuildIsolation, EmptyInstalledPackages, HashStrategy, InFlight}; use uv_warnings::warn_user; @@ -41,7 +41,17 @@ pub(crate) async fn lock( let venv = project::init_environment(&project, preview, cache, printer)?; // Perform the lock operation. - match do_lock(&project, &venv, upgrade, exclude_newer, cache, printer).await { + match do_lock( + &project, + &venv, + upgrade, + exclude_newer, + preview, + cache, + printer, + ) + .await + { Ok(_) => Ok(ExitStatus::Success), Err(ProjectError::Operation(pip::operations::Error::Resolve( uv_resolver::ResolveError::NoSolution(err), @@ -61,6 +71,7 @@ pub(super) async fn do_lock( venv: &PythonEnvironment, upgrade: Upgrade, exclude_newer: Option, + preview: PreviewMode, cache: &Cache, printer: Printer, ) -> Result { @@ -124,6 +135,7 @@ pub(super) async fn do_lock( &no_build, &no_binary, concurrency, + preview, ); // Resolve the requirements. @@ -149,6 +161,7 @@ pub(super) async fn do_lock( concurrency, options, printer, + preview, ) .await?; diff --git a/crates/uv/src/commands/project/mod.rs b/crates/uv/src/commands/project/mod.rs index 3e3b71847..ca85ad15f 100644 --- a/crates/uv/src/commands/project/mod.rs +++ b/crates/uv/src/commands/project/mod.rs @@ -14,10 +14,11 @@ use uv_configuration::{ SetupPyStrategy, Upgrade, }; use uv_dispatch::BuildDispatch; +use uv_distribution::ProjectWorkspace; use uv_fs::Simplified; use uv_installer::{SatisfiesResult, SitePackages}; use uv_interpreter::{find_default_interpreter, PythonEnvironment}; -use uv_requirements::{ProjectWorkspace, RequirementsSource, RequirementsSpecification, Workspace}; +use uv_requirements::{RequirementsSource, RequirementsSpecification}; use uv_resolver::{FlatIndex, InMemoryIndex, Options}; use uv_types::{BuildIsolation, HashStrategy, InFlight}; @@ -107,11 +108,10 @@ pub(crate) fn init_environment( pub(crate) async fn update_environment( venv: PythonEnvironment, requirements: &[RequirementsSource], - workspace: Option<&Workspace>, - preview: PreviewMode, connectivity: Connectivity, cache: &Cache, printer: Printer, + preview: PreviewMode, ) -> Result { // TODO(zanieb): Support client configuration let client_builder = BaseClientBuilder::default().connectivity(connectivity); @@ -119,16 +119,8 @@ pub(crate) async fn update_environment( // Read all requirements from the provided sources. // TODO(zanieb): Consider allowing constraints and extras // TODO(zanieb): Allow specifying extras somehow - let spec = RequirementsSpecification::from_sources( - requirements, - &[], - &[], - workspace, - &ExtrasSpecification::None, - &client_builder, - preview, - ) - .await?; + let spec = + RequirementsSpecification::from_sources(requirements, &[], &[], &client_builder).await?; // Check if the current environment satisfies the requirements let site_packages = SitePackages::from_executable(&venv)?; @@ -204,6 +196,7 @@ pub(crate) async fn update_environment( &no_build, &no_binary, concurrency, + preview, ); // Resolve the requirements. @@ -229,6 +222,7 @@ pub(crate) async fn update_environment( concurrency, options, printer, + preview, ) .await { @@ -259,6 +253,7 @@ pub(crate) async fn update_environment( &no_build, &no_binary, concurrency, + preview, ) }; @@ -282,6 +277,7 @@ pub(crate) async fn update_environment( &venv, dry_run, printer, + preview, ) .await?; diff --git a/crates/uv/src/commands/project/run.rs b/crates/uv/src/commands/project/run.rs index 5cdddc0fc..2b981b3f5 100644 --- a/crates/uv/src/commands/project/run.rs +++ b/crates/uv/src/commands/project/run.rs @@ -10,8 +10,9 @@ use tracing::debug; use uv_cache::Cache; use uv_client::Connectivity; use uv_configuration::{ExtrasSpecification, PreviewMode, Upgrade}; +use uv_distribution::ProjectWorkspace; use uv_interpreter::{PythonEnvironment, SystemPython}; -use uv_requirements::{ProjectWorkspace, RequirementsSource}; +use uv_requirements::RequirementsSource; use uv_resolver::ExcludeNewer; use uv_warnings::warn_user; @@ -48,9 +49,17 @@ pub(crate) async fn run( let venv = project::init_environment(&project, preview, cache, printer)?; // Lock and sync the environment. - let lock = - project::lock::do_lock(&project, &venv, upgrade, exclude_newer, cache, printer).await?; - project::sync::do_sync(&project, &venv, &lock, extras, cache, printer).await?; + let lock = project::lock::do_lock( + &project, + &venv, + upgrade, + exclude_newer, + preview, + cache, + printer, + ) + .await?; + project::sync::do_sync(&project, &venv, &lock, extras, preview, cache, printer).await?; Some(venv) }; @@ -92,16 +101,8 @@ pub(crate) async fn run( // Install the ephemeral requirements. Some( - project::update_environment( - venv, - &requirements, - None, - preview, - connectivity, - cache, - printer, - ) - .await?, + project::update_environment(venv, &requirements, connectivity, cache, printer, preview) + .await?, ) }; diff --git a/crates/uv/src/commands/project/sync.rs b/crates/uv/src/commands/project/sync.rs index 1df96fe52..0e1f92de1 100644 --- a/crates/uv/src/commands/project/sync.rs +++ b/crates/uv/src/commands/project/sync.rs @@ -9,9 +9,9 @@ use uv_configuration::{ SetupPyStrategy, }; use uv_dispatch::BuildDispatch; +use uv_distribution::ProjectWorkspace; use uv_installer::SitePackages; use uv_interpreter::PythonEnvironment; -use uv_requirements::ProjectWorkspace; use uv_resolver::{FlatIndex, InMemoryIndex, Lock}; use uv_types::{BuildIsolation, HashStrategy, InFlight}; use uv_warnings::warn_user; @@ -47,7 +47,7 @@ pub(crate) async fn sync( }; // Perform the sync operation. - do_sync(&project, &venv, &lock, extras, cache, printer).await?; + do_sync(&project, &venv, &lock, extras, preview, cache, printer).await?; Ok(ExitStatus::Success) } @@ -58,6 +58,7 @@ pub(super) async fn do_sync( venv: &PythonEnvironment, lock: &Lock, extras: ExtrasSpecification, + preview: PreviewMode, cache: &Cache, printer: Printer, ) -> Result<(), ProjectError> { @@ -112,6 +113,7 @@ pub(super) async fn do_sync( &no_build, &no_binary, concurrency, + preview, ); let site_packages = SitePackages::from_executable(venv)?; @@ -136,6 +138,7 @@ pub(super) async fn do_sync( venv, dry_run, printer, + preview, ) .await?; diff --git a/crates/uv/src/commands/tool/run.rs b/crates/uv/src/commands/tool/run.rs index 68d935873..b57ffb82d 100644 --- a/crates/uv/src/commands/tool/run.rs +++ b/crates/uv/src/commands/tool/run.rs @@ -71,18 +71,8 @@ pub(crate) async fn run( )?; // Install the ephemeral requirements. - let ephemeral_env = Some( - update_environment( - venv, - &requirements, - None, - preview, - connectivity, - cache, - printer, - ) - .await?, - ); + let ephemeral_env = + Some(update_environment(venv, &requirements, connectivity, cache, printer, preview).await?); // TODO(zanieb): Determine the command via the package entry points let command = target; diff --git a/crates/uv/src/commands/venv.rs b/crates/uv/src/commands/venv.rs index 9fc1da5d6..61700664d 100644 --- a/crates/uv/src/commands/venv.rs +++ b/crates/uv/src/commands/venv.rs @@ -9,8 +9,9 @@ use miette::{Diagnostic, IntoDiagnostic}; use owo_colors::OwoColorize; use thiserror::Error; -use distribution_types::{IndexLocations, Requirement}; +use distribution_types::IndexLocations; use install_wheel_rs::linker::LinkMode; +use pypi_types::Requirement; use uv_auth::store_credentials_from_url; use uv_cache::Cache; use uv_client::{Connectivity, FlatIndexClient, RegistryClientBuilder}; @@ -221,6 +222,7 @@ async fn venv_impl( &NoBuild::All, &NoBinary::None, concurrency, + preview, ) .with_options(OptionsBuilder::new().exclude_newer(exclude_newer).build()); diff --git a/crates/uv/tests/pip_compile.rs b/crates/uv/tests/pip_compile.rs index 46bfde9c8..1d2a455da 100644 --- a/crates/uv/tests/pip_compile.rs +++ b/crates/uv/tests/pip_compile.rs @@ -116,8 +116,7 @@ fn missing_requirements_in() { ----- stdout ----- ----- stderr ----- - error: failed to read from file `requirements.in` - Caused by: No such file or directory (os error 2) + error: File not found: `requirements.in` "### ); @@ -177,6 +176,7 @@ requires = ["setuptools", "wheel"] [project] name = "project" +version = "0.1.0" dependencies = [ "anyio==3.7.0", ] @@ -216,6 +216,7 @@ requires = ["setuptools", "wheel"] [project] name = "project" +version = "0.1.0" dependencies = [ "anyio==3.7.0", ] @@ -405,6 +406,7 @@ requires = ["setuptools", "wheel"] [project] name = "project" +version = "0.1.0" dependencies = [] optional-dependencies.foo = [ "anyio==3.7.0", @@ -447,6 +449,7 @@ requires = ["setuptools", "wheel"] [project] name = "project" +version = "0.1.0" dependencies = [] optional-dependencies."FrIeNdLy-._.-bArD" = [ "anyio==3.7.0", @@ -489,6 +492,7 @@ requires = ["setuptools", "wheel"] [project] name = "project" +version = "0.1.0" dependencies = [] optional-dependencies.foo = [ "anyio==3.7.0", @@ -789,6 +793,7 @@ requires = ["setuptools", "wheel"] [project] name = "project" +version = "0.1.0" dependencies = [] optional-dependencies.foo = [ "anyio==3.7.0", @@ -2165,6 +2170,7 @@ requires = ["setuptools", "wheel"] [project] name = "project" +version = "0.1.0" dependencies = ["anyio==3.7.0"] optional-dependencies.foo = [ "iniconfig==1.1.1", @@ -2220,6 +2226,7 @@ requires = ["setuptools", "wheel"] [project] name = "project" +version = "0.1.0" dependencies = ["anyio==3.7.0"] optional-dependencies.foo = [ "iniconfig==1.1.1", @@ -2309,6 +2316,7 @@ requires = ["setuptools", "wheel"] [project] name = "my-project" +version = "0.1.0" dependencies = ["anyio==3.7.0", "anyio==4.0.0"] "#, )?; @@ -2340,6 +2348,7 @@ requires = ["setuptools", "wheel"] [project] name = "my-project" +version = "0.1.0" dependencies = ["anyio==300.1.4"] "#, )?; @@ -6228,27 +6237,27 @@ fn pre_release_constraint() -> Result<()> { Ok(()) } -/// Resolve from a `pyproject.toml` file with a recursive extra. +/// Resolve from a `pyproject.toml` file with a mutually recursive extra. #[test] -fn compile_pyproject_toml_recursive_extra() -> Result<()> { +fn compile_pyproject_toml_mutually_recursive_extra() -> Result<()> { let context = TestContext::new("3.12"); let pyproject_toml = context.temp_dir.child("pyproject.toml"); pyproject_toml.write_str( r#" [project] -name = "my-project" +name = "project" version = "0.0.1" dependencies = [ - "tomli>=2,<3", + "anyio" ] [project.optional-dependencies] test = [ - "pep517>=0.13,<0.14", - "my-project[dev]" + "iniconfig", + "project[dev]" ] dev = [ - "my-project[test]", + "project[test]", ] "#, )?; @@ -6262,13 +6271,67 @@ dev = [ ----- stdout ----- # This file was autogenerated by uv via the following command: # uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z pyproject.toml --extra dev - pep517==0.13.1 - # via my-project (pyproject.toml) - tomli==2.0.1 - # via my-project (pyproject.toml) + anyio==4.3.0 + # via project (pyproject.toml) + idna==3.6 + # via anyio + iniconfig==2.0.0 + # via project (pyproject.toml) + sniffio==1.3.1 + # via anyio ----- stderr ----- - Resolved 2 packages in [TIME] + Resolved 4 packages in [TIME] + "### + ); + + Ok(()) +} + +/// Resolve from a `pyproject.toml` file with a recursive extra. +#[test] +fn compile_pyproject_toml_recursive_extra() -> Result<()> { + let context = TestContext::new("3.12"); + let pyproject_toml = context.temp_dir.child("pyproject.toml"); + pyproject_toml.write_str( + r#" +[project] +name = "project" +version = "0.0.1" +dependencies = [ + "anyio" +] + +[project.optional-dependencies] +test = [ + "iniconfig", +] +dev = [ + "project[test]", +] +"#, + )?; + + uv_snapshot!(context.compile() + .arg("pyproject.toml") + .arg("--extra") + .arg("dev"), @r###" + success: true + exit_code: 0 + ----- stdout ----- + # This file was autogenerated by uv via the following command: + # uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z pyproject.toml --extra dev + anyio==4.3.0 + # via project (pyproject.toml) + idna==3.6 + # via anyio + iniconfig==2.0.0 + # via project (pyproject.toml) + sniffio==1.3.1 + # via anyio + + ----- stderr ----- + Resolved 4 packages in [TIME] "### ); diff --git a/crates/uv/tests/pip_install.rs b/crates/uv/tests/pip_install.rs index 9d5179952..3b4e9b8ee 100644 --- a/crates/uv/tests/pip_install.rs +++ b/crates/uv/tests/pip_install.rs @@ -81,8 +81,7 @@ fn missing_requirements_txt() { ----- stdout ----- ----- stderr ----- - error: failed to read from file `requirements.txt` - Caused by: No such file or directory (os error 2) + error: File not found: `requirements.txt` "### ); @@ -124,8 +123,7 @@ fn missing_pyproject_toml() { ----- stdout ----- ----- stderr ----- - error: failed to read from file `pyproject.toml` - Caused by: No such file or directory (os error 2) + error: File not found: `pyproject.toml` "### ); } @@ -184,41 +182,6 @@ fn invalid_pyproject_toml_schema() -> Result<()> { Ok(()) } -/// For user controlled pyproject.toml files, we enforce PEP 621. -#[test] -fn invalid_pyproject_toml_requirement_direct() -> Result<()> { - let context = TestContext::new("3.12"); - let pyproject_toml = context.temp_dir.child("pyproject.toml"); - pyproject_toml.write_str( - r#"[project] -name = "project" -dependencies = ["flask==1.0.x"] -"#, - )?; - - let filters = [("exit status", "exit code")] - .into_iter() - .chain(context.filters()) - .collect::>(); - - uv_snapshot!(filters, context.install() - .arg("-r") - .arg("pyproject.toml"), @r###" - success: false - exit_code: 2 - ----- stdout ----- - - ----- stderr ----- - error: Failed to parse: `pyproject.toml` - Caused by: after parsing '1.0', found '.x', which is not part of a valid version - flask==1.0.x - ^^^^^^^ - "### - ); - - Ok(()) -} - /// For indirect, non-user controlled pyproject.toml, we don't enforce correctness. /// /// If we fail to extract the PEP 621 metadata, we fall back to treating it as a source @@ -4980,7 +4943,8 @@ fn tool_uv_sources() -> Result<()> { ----- stdout ----- ----- stderr ----- - Audited 6 packages in [TIME] + Resolved 9 packages in [TIME] + Audited 9 packages in [TIME] "### ); Ok(()) @@ -5013,9 +4977,7 @@ fn tool_uv_sources_is_in_preview() -> Result<()> { ----- stdout ----- ----- stderr ----- - error: Failed to parse: `pyproject.toml` - Caused by: Failed to parse entry for: `tqdm` - Caused by: `tool.uv.sources` is a preview feature; use `--preview` or set `UV_PREVIEW=1` to enable it + error: `tool.uv.sources` is a preview feature; use `--preview` or set `UV_PREVIEW=1` to enable it "### ); diff --git a/crates/uv/tests/pip_sync.rs b/crates/uv/tests/pip_sync.rs index 43bb1ea94..a4c0fb6f2 100644 --- a/crates/uv/tests/pip_sync.rs +++ b/crates/uv/tests/pip_sync.rs @@ -95,8 +95,7 @@ fn missing_requirements_txt() { ----- stdout ----- ----- stderr ----- - error: failed to read from file `requirements.txt` - Caused by: No such file or directory (os error 2) + error: File not found: `requirements.txt` "###); requirements_txt.assert(predicates::path::missing()); diff --git a/crates/uv/tests/pip_uninstall.rs b/crates/uv/tests/pip_uninstall.rs index 59f267f30..23762dd3d 100644 --- a/crates/uv/tests/pip_uninstall.rs +++ b/crates/uv/tests/pip_uninstall.rs @@ -116,8 +116,7 @@ fn missing_requirements_txt() -> Result<()> { ----- stdout ----- ----- stderr ----- - error: failed to read from file `requirements.txt` - Caused by: No such file or directory (os error 2) + error: File not found: `requirements.txt` "### ); diff --git a/crates/uv/tests/workspace.rs b/crates/uv/tests/workspace.rs index b35c90f94..bb2e44d47 100644 --- a/crates/uv/tests/workspace.rs +++ b/crates/uv/tests/workspace.rs @@ -218,7 +218,7 @@ fn test_albatross_root_workspace() { ----- stdout ----- ----- stderr ----- - Audited 3 packages in [TIME] + Audited 1 package in [TIME] "### ); @@ -257,7 +257,7 @@ fn test_albatross_root_workspace_bird_feeder() { ----- stdout ----- ----- stderr ----- - Audited 2 packages in [TIME] + Audited 1 package in [TIME] "### ); @@ -296,7 +296,7 @@ fn test_albatross_root_workspace_albatross() { ----- stdout ----- ----- stderr ----- - Audited 2 packages in [TIME] + Audited 1 package in [TIME] "### ); @@ -335,7 +335,7 @@ fn test_albatross_virtual_workspace() { ----- stdout ----- ----- stderr ----- - Audited 2 packages in [TIME] + Audited 1 package in [TIME] "### );