Add support for tool.uv into distribution building (#3904)

With the change, we remove the special casing of workspace dependencies
and resolve `tool.uv` for all git and directory distributions. This
gives us support for non-editable workspace dependencies and path
dependencies in other workspaces. It removes a lot of special casing
around workspaces. These changes are the groundwork for supporting
`tool.uv` with dynamic metadata.

The basis for this change is moving `Requirement` from
`distribution-types` to `pypi-types` and the lowering logic from
`uv-requirements` to `uv-distribution`. This changes should be split out
in separate PRs.

I've included an example workspace `albatross-root-workspace2` where
`bird-feeder` depends on `a` from another workspace `ab`. There's a
bunch of failing tests and regressed error messages that still need
fixing. It does fix the audited package count for the workspace tests.
This commit is contained in:
konsti 2024-05-31 04:42:03 +02:00 committed by GitHub
parent 09f55482a0
commit 081f20c53e
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
69 changed files with 1159 additions and 1680 deletions

24
Cargo.lock generated
View file

@ -399,6 +399,7 @@ dependencies = [
"once_cell",
"pep508_rs",
"platform-tags",
"pypi-types",
"tokio",
"uv-cache",
"uv-client",
@ -1103,7 +1104,6 @@ dependencies = [
"cache-key",
"distribution-filename",
"fs-err",
"indexmap",
"itertools 0.13.0",
"once_cell",
"pep440_rs",
@ -4567,10 +4567,10 @@ version = "0.0.1"
dependencies = [
"anyhow",
"clap",
"distribution-types",
"either",
"pep508_rs",
"platform-tags",
"pypi-types",
"rustc-hash",
"schemars",
"serde",
@ -4615,10 +4615,10 @@ dependencies = [
"uv-client",
"uv-configuration",
"uv-dispatch",
"uv-distribution",
"uv-fs",
"uv-installer",
"uv-interpreter",
"uv-requirements",
"uv-resolver",
"uv-types",
"uv-workspace",
@ -4634,6 +4634,7 @@ dependencies = [
"futures",
"install-wheel-rs",
"itertools 0.13.0",
"pypi-types",
"rustc-hash",
"tracing",
"uv-build",
@ -4657,22 +4658,28 @@ dependencies = [
"distribution-types",
"fs-err",
"futures",
"glob",
"insta",
"install-wheel-rs",
"nanoid",
"once_cell",
"path-absolutize",
"pep440_rs",
"pep508_rs",
"platform-tags",
"pypi-types",
"regex",
"reqwest",
"reqwest-middleware",
"rmp-serde",
"rustc-hash",
"schemars",
"serde",
"tempfile",
"thiserror",
"tokio",
"tokio-util",
"toml",
"tracing",
"url",
"uv-cache",
@ -4683,6 +4690,7 @@ dependencies = [
"uv-git",
"uv-normalize",
"uv-types",
"uv-warnings",
"zip",
]
@ -4848,22 +4856,12 @@ dependencies = [
"distribution-types",
"fs-err",
"futures",
"glob",
"indexmap",
"indoc",
"insta",
"path-absolutize",
"pep440_rs",
"pep508_rs",
"pypi-types",
"regex",
"requirements-txt",
"rustc-hash",
"same-file",
"schemars",
"serde",
"thiserror",
"tokio",
"toml",
"tracing",
"url",

View file

@ -30,17 +30,18 @@ harness = false
[dependencies]
distribution-filename = { workspace = true }
distribution-types = { workspace = true }
install-wheel-rs = { workspace = true }
pep508_rs = { workspace = true }
platform-tags = { workspace = true }
pypi-types = { workspace = true }
uv-cache = { workspace = true }
uv-client = { workspace = true }
uv-dispatch = { workspace = true }
uv-configuration = { workspace = true }
uv-dispatch = { workspace = true }
uv-distribution = { workspace = true }
uv-interpreter = { workspace = true }
uv-resolver = { workspace = true }
uv-types = { workspace = true }
install-wheel-rs = { workspace = true }
anyhow = { workspace = true }
codspeed-criterion-compat = { version = "2.6.0", default-features = false, optional = true }

View file

@ -2,7 +2,7 @@ use std::str::FromStr;
use bench::criterion::black_box;
use bench::criterion::{criterion_group, criterion_main, measurement::WallTime, Criterion};
use distribution_types::Requirement;
use pypi_types::Requirement;
use uv_cache::Cache;
use uv_client::RegistryClientBuilder;
use uv_interpreter::PythonEnvironment;
@ -80,7 +80,9 @@ mod resolver {
use platform_tags::{Arch, Os, Platform, Tags};
use uv_cache::Cache;
use uv_client::RegistryClient;
use uv_configuration::{Concurrency, ConfigSettings, NoBinary, NoBuild, SetupPyStrategy};
use uv_configuration::{
Concurrency, ConfigSettings, NoBinary, NoBuild, PreviewMode, SetupPyStrategy,
};
use uv_dispatch::BuildDispatch;
use uv_distribution::DistributionDatabase;
use uv_interpreter::PythonEnvironment;
@ -149,6 +151,7 @@ mod resolver {
&NoBuild::None,
&NoBinary::None,
concurrency,
PreviewMode::Disabled,
);
let resolver = Resolver::new(
@ -162,7 +165,12 @@ mod resolver {
&hashes,
&build_context,
installed_packages,
DistributionDatabase::new(client, &build_context, concurrency.downloads),
DistributionDatabase::new(
client,
&build_context,
concurrency.downloads,
PreviewMode::Disabled,
),
)?;
Ok(resolver.resolve().await?)

View file

@ -25,7 +25,6 @@ uv-normalize = { workspace = true }
anyhow = { workspace = true }
fs-err = { workspace = true }
indexmap = { workspace = true }
itertools = { workspace = true }
once_cell = { workspace = true }
rkyv = { workspace = true }

View file

@ -58,7 +58,6 @@ pub use crate::id::*;
pub use crate::index_url::*;
pub use crate::installed::*;
pub use crate::prioritized_distribution::*;
pub use crate::requirement::*;
pub use crate::resolution::*;
pub use crate::resolved::*;
pub use crate::specified_requirement::*;
@ -76,7 +75,6 @@ mod id;
mod index_url;
mod installed;
mod prioritized_distribution;
mod requirement;
mod resolution;
mod resolved;
mod specified_requirement;

View file

@ -1,10 +1,9 @@
use pypi_types::{Requirement, RequirementSource};
use std::collections::BTreeMap;
use uv_normalize::{ExtraName, PackageName};
use crate::{
BuiltDist, Diagnostic, Dist, Name, Requirement, RequirementSource, ResolvedDist, SourceDist,
};
use crate::{BuiltDist, Diagnostic, Dist, Name, ResolvedDist, SourceDist};
/// A set of packages pinned at specific versions.
#[derive(Debug, Default, Clone)]

View file

@ -2,9 +2,10 @@ use std::borrow::Cow;
use std::fmt::{Display, Formatter};
use pep508_rs::{MarkerEnvironment, UnnamedRequirement};
use pypi_types::{Requirement, RequirementSource};
use uv_normalize::ExtraName;
use crate::{Requirement, RequirementSource, VerbatimParsedUrl};
use crate::VerbatimParsedUrl;
/// An [`UnresolvedRequirement`] with additional metadata from `requirements.txt`, currently only
/// hashes but in the future also editable and similar information.

View file

@ -1541,6 +1541,25 @@ pub enum MarkerTree {
Or(Vec<MarkerTree>),
}
impl<'de> Deserialize<'de> for MarkerTree {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
let s = String::deserialize(deserializer)?;
FromStr::from_str(&s).map_err(de::Error::custom)
}
}
impl Serialize for MarkerTree {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
serializer.serialize_str(&self.to_string())
}
}
impl FromStr for MarkerTree {
type Err = Pep508Error;

View file

@ -3,6 +3,7 @@ pub use direct_url::*;
pub use lenient_requirement::*;
pub use metadata::*;
pub use parsed_url::*;
pub use requirement::*;
pub use scheme::*;
pub use simple_json::*;
@ -11,5 +12,6 @@ mod direct_url;
mod lenient_requirement;
mod metadata;
mod parsed_url;
mod requirement;
mod scheme;
mod simple_json;

View file

@ -6,10 +6,11 @@ use url::Url;
use pep440_rs::VersionSpecifiers;
use pep508_rs::{MarkerEnvironment, MarkerTree, RequirementOrigin, VerbatimUrl, VersionOrUrl};
use pypi_types::{ParsedUrl, VerbatimParsedUrl};
use uv_git::{GitReference, GitSha};
use uv_normalize::{ExtraName, PackageName};
use crate::{ParsedUrl, VerbatimParsedUrl};
/// The requirements of a distribution, an extension over PEP 508's requirements.
#[derive(Debug, Clone, Eq, PartialEq)]
pub struct Requirements {

View file

@ -44,12 +44,12 @@ use tracing::instrument;
use unscanny::{Pattern, Scanner};
use url::Url;
use distribution_types::{Requirement, UnresolvedRequirement, UnresolvedRequirementSpecification};
use distribution_types::{UnresolvedRequirement, UnresolvedRequirementSpecification};
use pep508_rs::{
expand_env_vars, split_scheme, strip_host, Extras, MarkerTree, Pep508Error, Pep508ErrorSource,
RequirementOrigin, Scheme, UnnamedRequirement, VerbatimUrl,
};
use pypi_types::{ParsedPathUrl, ParsedUrl, VerbatimParsedUrl};
use pypi_types::{ParsedPathUrl, ParsedUrl, Requirement, VerbatimParsedUrl};
#[cfg(feature = "http")]
use uv_client::BaseClient;
use uv_client::BaseClientBuilder;

View file

@ -25,10 +25,10 @@ use tokio::process::Command;
use tokio::sync::{Mutex, Semaphore};
use tracing::{debug, info_span, instrument, Instrument};
use distribution_types::{Requirement, Resolution};
use distribution_types::Resolution;
use pep440_rs::Version;
use pep508_rs::PackageName;
use pypi_types::VerbatimParsedUrl;
use pypi_types::{Requirement, VerbatimParsedUrl};
use uv_configuration::{BuildKind, ConfigSettings, SetupPyStrategy};
use uv_fs::{PythonExt, Simplified};
use uv_interpreter::{Interpreter, PythonEnvironment};

View file

@ -13,9 +13,9 @@ license = { workspace = true }
workspace = true
[dependencies]
distribution-types = { workspace = true }
pep508_rs = { workspace = true }
platform-tags = { workspace = true }
pypi-types = { workspace = true }
uv-auth = { workspace = true }
uv-normalize = { workspace = true }

View file

@ -1,8 +1,8 @@
use std::hash::BuildHasherDefault;
use distribution_types::Requirement;
use rustc_hash::FxHashMap;
use pypi_types::Requirement;
use uv_normalize::PackageName;
/// A set of constraints for a set of requirements.

View file

@ -1,9 +1,9 @@
use std::hash::BuildHasherDefault;
use either::Either;
use pypi_types::Requirement;
use rustc_hash::FxHashMap;
use distribution_types::Requirement;
use uv_normalize::PackageName;
/// A set of overrides for a set of requirements.

View file

@ -25,11 +25,11 @@ uv-build = { workspace = true }
uv-cache = { workspace = true, features = ["clap"] }
uv-client = { workspace = true }
uv-configuration = { workspace = true }
uv-distribution = { workspace = true, features = ["schemars"] }
uv-dispatch = { workspace = true }
uv-fs = { workspace = true }
uv-installer = { workspace = true }
uv-interpreter = { workspace = true }
uv-requirements = { workspace = true, features = ["schemars"] }
uv-resolver = { workspace = true }
uv-types = { workspace = true }
uv-workspace = { workspace = true, features = ["schemars"] }

View file

@ -11,7 +11,7 @@ use uv_build::{SourceBuild, SourceBuildContext};
use uv_cache::{Cache, CacheArgs};
use uv_client::RegistryClientBuilder;
use uv_configuration::{
BuildKind, Concurrency, ConfigSettings, NoBinary, NoBuild, SetupPyStrategy,
BuildKind, Concurrency, ConfigSettings, NoBinary, NoBuild, PreviewMode, SetupPyStrategy,
};
use uv_dispatch::BuildDispatch;
use uv_interpreter::PythonEnvironment;
@ -80,6 +80,7 @@ pub(crate) async fn build(args: BuildArgs) -> Result<PathBuf> {
&NoBuild::None,
&NoBinary::None,
concurrency,
PreviewMode::Enabled,
);
let builder = SourceBuild::setup(

View file

@ -20,7 +20,7 @@ struct ToolUv {
#[serde(flatten)]
options: Options,
#[serde(flatten)]
dep_spec: uv_requirements::pyproject::ToolUv,
dep_spec: uv_distribution::pyproject::ToolUv,
}
#[derive(clap::Args)]

View file

@ -16,13 +16,14 @@ workspace = true
[dependencies]
distribution-types = { workspace = true }
install-wheel-rs = { workspace = true }
pypi-types = { workspace = true }
uv-build = { workspace = true }
uv-cache = { workspace = true }
uv-client = { workspace = true }
uv-configuration = { workspace = true }
uv-distribution = { workspace = true }
uv-installer = { workspace = true }
uv-interpreter = { workspace = true }
uv-distribution = { workspace = true }
uv-resolver = { workspace = true }
uv-types = { workspace = true }

View file

@ -11,12 +11,13 @@ use itertools::Itertools;
use rustc_hash::FxHashMap;
use tracing::{debug, instrument};
use distribution_types::{CachedDist, IndexLocations, Name, Requirement, Resolution, SourceDist};
use distribution_types::{CachedDist, IndexLocations, Name, Resolution, SourceDist};
use pypi_types::Requirement;
use uv_build::{SourceBuild, SourceBuildContext};
use uv_cache::Cache;
use uv_client::RegistryClient;
use uv_configuration::Concurrency;
use uv_configuration::{BuildKind, ConfigSettings, NoBinary, NoBuild, Reinstall, SetupPyStrategy};
use uv_configuration::{Concurrency, PreviewMode};
use uv_distribution::DistributionDatabase;
use uv_installer::{Downloader, Installer, Plan, Planner, SitePackages};
use uv_interpreter::{Interpreter, PythonEnvironment};
@ -43,6 +44,7 @@ pub struct BuildDispatch<'a> {
options: Options,
build_extra_env_vars: FxHashMap<OsString, OsString>,
concurrency: Concurrency,
preview_mode: PreviewMode,
}
impl<'a> BuildDispatch<'a> {
@ -62,6 +64,7 @@ impl<'a> BuildDispatch<'a> {
no_build: &'a NoBuild,
no_binary: &'a NoBinary,
concurrency: Concurrency,
preview_mode: PreviewMode,
) -> Self {
Self {
client,
@ -81,6 +84,7 @@ impl<'a> BuildDispatch<'a> {
source_build_context: SourceBuildContext::default(),
options: Options::default(),
build_extra_env_vars: FxHashMap::default(),
preview_mode,
}
}
@ -138,7 +142,12 @@ impl<'a> BuildContext for BuildDispatch<'a> {
&HashStrategy::None,
self,
EmptyInstalledPackages,
DistributionDatabase::new(self.client, self, self.concurrency.downloads),
DistributionDatabase::new(
self.client,
self,
self.concurrency.downloads,
self.preview_mode,
),
)?;
let graph = resolver.resolve().await.with_context(|| {
format!(
@ -220,7 +229,12 @@ impl<'a> BuildContext for BuildDispatch<'a> {
self.cache,
tags,
&HashStrategy::None,
DistributionDatabase::new(self.client, self, self.concurrency.downloads),
DistributionDatabase::new(
self.client,
self,
self.concurrency.downloads,
self.preview_mode,
),
);
debug!(

View file

@ -23,27 +23,39 @@ platform-tags = { workspace = true }
pypi-types = { workspace = true }
uv-cache = { workspace = true }
uv-client = { workspace = true }
uv-configuration = { workspace = true }
uv-extract = { workspace = true }
uv-fs = { workspace = true, features = ["tokio"] }
uv-git = { workspace = true }
uv-normalize = { workspace = true }
uv-types = { workspace = true }
uv-configuration = { workspace = true }
uv-warnings = { workspace = true }
anyhow = { workspace = true }
fs-err = { workspace = true }
futures = { workspace = true }
glob = { workspace = true }
nanoid = { workspace = true }
once_cell = { workspace = true }
path-absolutize = { workspace = true }
reqwest = { workspace = true }
reqwest-middleware = { workspace = true }
rmp-serde = { workspace = true }
rustc-hash = { workspace = true }
schemars = { workspace = true, optional = true }
serde = { workspace = true, features = ["derive"] }
tempfile = { workspace = true }
thiserror = { workspace = true }
tokio = { workspace = true }
tokio-util = { workspace = true, features = ["compat"] }
toml = { workspace = true }
tracing = { workspace = true }
url = { workspace = true }
zip = { workspace = true }
[dev-dependencies]
insta = { version = "1.39.0", features = ["filters", "json", "redactions"] }
regex = { workspace = true }
[features]
schemars = ["dep:schemars"]

View file

@ -25,7 +25,7 @@ use uv_cache::{ArchiveId, ArchiveTimestamp, CacheBucket, CacheEntry, Timestamp,
use uv_client::{
CacheControl, CachedClientError, Connectivity, DataWithCachePolicy, RegistryClient,
};
use uv_configuration::{NoBinary, NoBuild};
use uv_configuration::{NoBinary, NoBuild, PreviewMode};
use uv_extract::hash::Hasher;
use uv_fs::write_atomic;
use uv_types::BuildContext;
@ -33,7 +33,7 @@ use uv_types::BuildContext;
use crate::archive::Archive;
use crate::locks::Locks;
use crate::source::SourceDistributionBuilder;
use crate::{ArchiveMetadata, Error, LocalWheel, Reporter};
use crate::{ArchiveMetadata, Error, LocalWheel, Metadata, Reporter};
/// A cached high-level interface to convert distributions (a requirement resolved to a location)
/// to a wheel or wheel metadata.
@ -60,10 +60,11 @@ impl<'a, Context: BuildContext> DistributionDatabase<'a, Context> {
client: &'a RegistryClient,
build_context: &'a Context,
concurrent_downloads: usize,
preview_mode: PreviewMode,
) -> Self {
Self {
build_context,
builder: SourceDistributionBuilder::new(build_context),
builder: SourceDistributionBuilder::new(build_context, preview_mode),
locks: Rc::new(Locks::default()),
client: ManagedClient::new(client, concurrent_downloads),
reporter: None,
@ -364,7 +365,10 @@ impl<'a, Context: BuildContext> DistributionDatabase<'a, Context> {
let wheel = self.get_wheel(dist, hashes).await?;
let metadata = wheel.metadata()?;
let hashes = wheel.hashes;
return Ok(ArchiveMetadata { metadata, hashes });
return Ok(ArchiveMetadata {
metadata: Metadata::from_metadata23(metadata),
hashes,
});
}
let result = self
@ -373,7 +377,7 @@ impl<'a, Context: BuildContext> DistributionDatabase<'a, Context> {
.await;
match result {
Ok(metadata) => Ok(ArchiveMetadata::from(metadata)),
Ok(metadata) => Ok(ArchiveMetadata::from_metadata23(metadata)),
Err(err) if err.is_http_streaming_unsupported() => {
warn!("Streaming unsupported when fetching metadata for {dist}; downloading wheel directly ({err})");
@ -382,7 +386,10 @@ impl<'a, Context: BuildContext> DistributionDatabase<'a, Context> {
let wheel = self.get_wheel(dist, hashes).await?;
let metadata = wheel.metadata()?;
let hashes = wheel.hashes;
Ok(ArchiveMetadata { metadata, hashes })
Ok(ArchiveMetadata {
metadata: Metadata::from_metadata23(metadata),
hashes,
})
}
Err(err) => Err(err.into()),
}

View file

@ -3,6 +3,7 @@ use std::path::PathBuf;
use tokio::task::JoinError;
use zip::result::ZipError;
use crate::MetadataLoweringError;
use distribution_filename::WheelFilenameError;
use pep440_rs::Version;
use pypi_types::HashDigest;
@ -77,6 +78,8 @@ pub enum Error {
DynamicPyprojectToml(#[source] pypi_types::MetadataError),
#[error("Unsupported scheme in URL: {0}")]
UnsupportedScheme(String),
#[error(transparent)]
MetadataLowering(#[from] MetadataLoweringError),
/// A generic request middleware error happened while making a request.
/// Refer to the error message for more details.

View file

@ -1,11 +1,21 @@
pub use archive::Archive;
use std::collections::BTreeMap;
use std::path::Path;
use thiserror::Error;
use archive::Archive;
pub use distribution_database::{DistributionDatabase, HttpArchivePointer, LocalArchivePointer};
pub use download::LocalWheel;
pub use error::Error;
pub use git::{git_url_to_precise, is_same_reference};
pub use index::{BuiltWheelIndex, RegistryWheelIndex};
use pep440_rs::{Version, VersionSpecifiers};
use pypi_types::{HashDigest, Metadata23};
pub use reporter::Reporter;
use requirement_lowering::{lower_requirement, LoweringError};
use uv_configuration::PreviewMode;
use uv_normalize::{ExtraName, PackageName};
pub use workspace::{ProjectWorkspace, Workspace, WorkspaceError, WorkspaceMember};
mod archive;
mod distribution_database;
@ -14,20 +24,120 @@ mod error;
mod git;
mod index;
mod locks;
pub mod pyproject;
mod reporter;
mod requirement_lowering;
mod source;
mod workspace;
#[derive(Debug, Error)]
pub enum MetadataLoweringError {
#[error(transparent)]
Workspace(#[from] WorkspaceError),
#[error(transparent)]
Lowering(#[from] LoweringError),
}
#[derive(Debug, Clone)]
pub struct Metadata {
// Mandatory fields
pub name: PackageName,
pub version: Version,
// Optional fields
pub requires_dist: Vec<pypi_types::Requirement>,
pub requires_python: Option<VersionSpecifiers>,
pub provides_extras: Vec<ExtraName>,
}
impl Metadata {
/// Lower without considering `tool.uv` in `pyproject.toml`, used for index and other archive
/// dependencies.
pub fn from_metadata23(metadata: Metadata23) -> Self {
Self {
name: metadata.name,
version: metadata.version,
requires_dist: metadata
.requires_dist
.into_iter()
.map(pypi_types::Requirement::from)
.collect(),
requires_python: metadata.requires_python,
provides_extras: metadata.provides_extras,
}
}
/// Lower by considering `tool.uv` in `pyproject.toml` if present, used for Git and directory
/// dependencies.
pub async fn from_workspace(
metadata: Metadata23,
project_root: &Path,
preview_mode: PreviewMode,
) -> Result<Self, MetadataLoweringError> {
// TODO(konsti): Limit discovery for Git checkouts to Git root.
// TODO(konsti): Cache workspace discovery.
let Some(project_workspace) =
ProjectWorkspace::from_maybe_project_root(project_root).await?
else {
return Ok(Self::from_metadata23(metadata));
};
let empty = BTreeMap::default();
let sources = project_workspace
.current_project()
.pyproject_toml()
.tool
.as_ref()
.and_then(|tool| tool.uv.as_ref())
.and_then(|uv| uv.sources.as_ref())
.unwrap_or(&empty);
let requires_dist = metadata
.requires_dist
.into_iter()
.map(|requirement| {
lower_requirement(
requirement,
&metadata.name,
project_workspace.project_root(),
sources,
project_workspace.workspace(),
preview_mode,
)
})
.collect::<Result<_, _>>()?;
Ok(Self {
name: metadata.name,
version: metadata.version,
requires_dist,
requires_python: metadata.requires_python,
provides_extras: metadata.provides_extras,
})
}
}
/// The metadata associated with an archive.
#[derive(Debug, Clone)]
pub struct ArchiveMetadata {
/// The [`Metadata23`] for the underlying distribution.
pub metadata: Metadata23,
/// The [`Metadata`] for the underlying distribution.
pub metadata: Metadata,
/// The hashes of the source or built archive.
pub hashes: Vec<HashDigest>,
}
impl From<Metadata23> for ArchiveMetadata {
fn from(metadata: Metadata23) -> Self {
impl ArchiveMetadata {
/// Lower without considering `tool.uv` in `pyproject.toml`, used for index and other archive
/// dependencies.
pub fn from_metadata23(metadata: Metadata23) -> Self {
Self {
metadata: Metadata::from_metadata23(metadata),
hashes: vec![],
}
}
}
impl From<Metadata> for ArchiveMetadata {
fn from(metadata: Metadata) -> Self {
Self {
metadata,
hashes: vec![],

View file

@ -0,0 +1,199 @@
//! Reads the following fields from `pyproject.toml`:
//!
//! * `project.{dependencies,optional-dependencies}`
//! * `tool.uv.sources`
//! * `tool.uv.workspace`
//!
//! Then lowers them into a dependency specification.
use std::collections::BTreeMap;
use std::ops::Deref;
use glob::Pattern;
use serde::{Deserialize, Serialize};
use thiserror::Error;
use url::Url;
use pep508_rs::Pep508Error;
use pypi_types::VerbatimParsedUrl;
use uv_normalize::{ExtraName, PackageName};
use crate::LoweringError;
#[derive(Debug, Error)]
pub enum Pep621Error {
#[error(transparent)]
Pep508(#[from] Box<Pep508Error<VerbatimParsedUrl>>),
#[error("Must specify a `[project]` section alongside `[tool.uv.sources]`")]
MissingProjectSection,
#[error("pyproject.toml section is declared as dynamic, but must be static: `{0}`")]
DynamicNotAllowed(&'static str),
#[error("Failed to parse entry for: `{0}`")]
LoweringError(PackageName, #[source] LoweringError),
}
impl From<Pep508Error<VerbatimParsedUrl>> for Pep621Error {
fn from(error: Pep508Error<VerbatimParsedUrl>) -> Self {
Self::Pep508(Box::new(error))
}
}
/// A `pyproject.toml` as specified in PEP 517.
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq)]
#[serde(rename_all = "kebab-case")]
pub struct PyProjectToml {
/// PEP 621-compliant project metadata.
pub project: Option<Project>,
/// Tool-specific metadata.
pub tool: Option<Tool>,
}
/// PEP 621 project metadata (`project`).
///
/// See <https://packaging.python.org/en/latest/specifications/pyproject-toml>.
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq)]
#[serde(rename_all = "kebab-case")]
pub struct Project {
/// The name of the project
pub name: PackageName,
/// The optional dependencies of the project.
pub optional_dependencies: Option<BTreeMap<ExtraName, Vec<String>>>,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq)]
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
pub struct Tool {
pub uv: Option<ToolUv>,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq)]
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
pub struct ToolUv {
pub sources: Option<BTreeMap<PackageName, Source>>,
pub workspace: Option<ToolUvWorkspace>,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq)]
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
pub struct ToolUvWorkspace {
pub members: Option<Vec<SerdePattern>>,
pub exclude: Option<Vec<SerdePattern>>,
}
/// (De)serialize globs as strings.
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq)]
pub struct SerdePattern(#[serde(with = "serde_from_and_to_string")] pub Pattern);
#[cfg(feature = "schemars")]
impl schemars::JsonSchema for SerdePattern {
fn schema_name() -> String {
<String as schemars::JsonSchema>::schema_name()
}
fn json_schema(gen: &mut schemars::gen::SchemaGenerator) -> schemars::schema::Schema {
<String as schemars::JsonSchema>::json_schema(gen)
}
}
impl Deref for SerdePattern {
type Target = Pattern;
fn deref(&self) -> &Self::Target {
&self.0
}
}
/// A `tool.uv.sources` value.
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq)]
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
#[serde(untagged, deny_unknown_fields)]
pub enum Source {
/// A remote Git repository, available over HTTPS or SSH.
///
/// Example:
/// ```toml
/// flask = { git = "https://github.com/pallets/flask", tag = "3.0.0" }
/// ```
Git {
/// The repository URL (without the `git+` prefix).
git: Url,
/// The path to the directory with the `pyproject.toml`, if it's not in the archive root.
subdirectory: Option<String>,
// Only one of the three may be used; we'll validate this later and emit a custom error.
rev: Option<String>,
tag: Option<String>,
branch: Option<String>,
},
/// A remote `http://` or `https://` URL, either a wheel (`.whl`) or a source distribution
/// (`.zip`, `.tar.gz`).
///
/// Example:
/// ```toml
/// flask = { url = "https://files.pythonhosted.org/packages/61/80/ffe1da13ad9300f87c93af113edd0638c75138c42a0994becfacac078c06/flask-3.0.3-py3-none-any.whl" }
/// ```
Url {
url: Url,
/// For source distributions, the path to the directory with the `pyproject.toml`, if it's
/// not in the archive root.
subdirectory: Option<String>,
},
/// The path to a dependency, either a wheel (a `.whl` file), source distribution (a `.zip` or
/// `.tag.gz` file), or source tree (i.e., a directory containing a `pyproject.toml` or
/// `setup.py` file in the root).
Path {
path: String,
/// `false` by default.
editable: Option<bool>,
},
/// A dependency pinned to a specific index, e.g., `torch` after setting `torch` to `https://download.pytorch.org/whl/cu118`.
Registry {
// TODO(konstin): The string is more-or-less a placeholder
index: String,
},
/// A dependency on another package in the workspace.
Workspace {
/// When set to `false`, the package will be fetched from the remote index, rather than
/// included as a workspace package.
workspace: bool,
/// `true` by default.
editable: Option<bool>,
},
/// A catch-all variant used to emit precise error messages when deserializing.
CatchAll {
git: String,
subdirectory: Option<String>,
rev: Option<String>,
tag: Option<String>,
branch: Option<String>,
url: String,
patch: String,
index: String,
workspace: bool,
},
}
/// <https://github.com/serde-rs/serde/issues/1316#issue-332908452>
mod serde_from_and_to_string {
use std::fmt::Display;
use std::str::FromStr;
use serde::{de, Deserialize, Deserializer, Serializer};
pub(super) fn serialize<T, S>(value: &T, serializer: S) -> Result<S::Ok, S::Error>
where
T: Display,
S: Serializer,
{
serializer.collect_str(value)
}
pub(super) fn deserialize<'de, T, D>(deserializer: D) -> Result<T, D::Error>
where
T: FromStr,
T::Err: Display,
D: Deserializer<'de>,
{
String::deserialize(deserializer)?
.parse()
.map_err(de::Error::custom)
}
}

View file

@ -0,0 +1,241 @@
use std::collections::BTreeMap;
use std::io;
use std::path::{Path, PathBuf};
use path_absolutize::Absolutize;
use thiserror::Error;
use url::Url;
use pep440_rs::VersionSpecifiers;
use pep508_rs::{VerbatimUrl, VersionOrUrl};
use pypi_types::{Requirement, RequirementSource, VerbatimParsedUrl};
use uv_configuration::PreviewMode;
use uv_fs::Simplified;
use uv_git::GitReference;
use uv_normalize::PackageName;
use uv_warnings::warn_user_once;
use crate::pyproject::Source;
use crate::Workspace;
/// An error parsing and merging `tool.uv.sources` with
/// `project.{dependencies,optional-dependencies}`.
#[derive(Debug, Error)]
pub enum LoweringError {
#[error("Package is not included as workspace package in `tool.uv.workspace`")]
UndeclaredWorkspacePackage,
#[error("Can only specify one of: `rev`, `tag`, or `branch`")]
MoreThanOneGitRef,
#[error("Unable to combine options in `tool.uv.sources`")]
InvalidEntry,
#[error(transparent)]
InvalidUrl(#[from] url::ParseError),
#[error(transparent)]
InvalidVerbatimUrl(#[from] pep508_rs::VerbatimUrlError),
#[error("Can't combine URLs from both `project.dependencies` and `tool.uv.sources`")]
ConflictingUrls,
#[error("Could not normalize path: `{}`", _0.user_display())]
Absolutize(PathBuf, #[source] io::Error),
#[error("Fragments are not allowed in URLs: `{0}`")]
ForbiddenFragment(Url),
#[error("`workspace = false` is not yet supported")]
WorkspaceFalse,
#[error("`tool.uv.sources` is a preview feature; use `--preview` or set `UV_PREVIEW=1` to enable it")]
MissingPreview,
}
/// Combine `project.dependencies` or `project.optional-dependencies` with `tool.uv.sources`.
pub(crate) fn lower_requirement(
requirement: pep508_rs::Requirement<VerbatimParsedUrl>,
project_name: &PackageName,
project_dir: &Path,
project_sources: &BTreeMap<PackageName, Source>,
workspace: &Workspace,
preview: PreviewMode,
) -> Result<Requirement, LoweringError> {
let source = project_sources
.get(&requirement.name)
.or(workspace.sources().get(&requirement.name))
.cloned();
let workspace_package_declared =
// We require that when you use a package that's part of the workspace, ...
!workspace.packages().contains_key(&requirement.name)
// ... it must be declared as a workspace dependency (`workspace = true`), ...
|| matches!(
source,
Some(Source::Workspace {
// By using toml, we technically support `workspace = false`.
workspace: true,
..
})
)
// ... except for recursive self-inclusion (extras that activate other extras), e.g.
// `framework[machine_learning]` depends on `framework[cuda]`.
|| &requirement.name == project_name;
if !workspace_package_declared {
return Err(LoweringError::UndeclaredWorkspacePackage);
}
let Some(source) = source else {
let has_sources = !project_sources.is_empty() || !workspace.sources().is_empty();
// Support recursive editable inclusions.
if has_sources && requirement.version_or_url.is_none() && &requirement.name != project_name
{
warn_user_once!(
"Missing version constraint (e.g., a lower bound) for `{}`",
requirement.name
);
}
return Ok(Requirement::from(requirement));
};
if preview.is_disabled() {
return Err(LoweringError::MissingPreview);
}
let source = match source {
Source::Git {
git,
subdirectory,
rev,
tag,
branch,
} => {
if matches!(requirement.version_or_url, Some(VersionOrUrl::Url(_))) {
return Err(LoweringError::ConflictingUrls);
}
let reference = match (rev, tag, branch) {
(None, None, None) => GitReference::DefaultBranch,
(Some(rev), None, None) => {
if rev.starts_with("refs/") {
GitReference::NamedRef(rev.clone())
} else if rev.len() == 40 {
GitReference::FullCommit(rev.clone())
} else {
GitReference::ShortCommit(rev.clone())
}
}
(None, Some(tag), None) => GitReference::Tag(tag),
(None, None, Some(branch)) => GitReference::Branch(branch),
_ => return Err(LoweringError::MoreThanOneGitRef),
};
// Create a PEP 508-compatible URL.
let mut url = Url::parse(&format!("git+{git}"))?;
if let Some(rev) = reference.as_str() {
url.set_path(&format!("{}@{}", url.path(), rev));
}
if let Some(subdirectory) = &subdirectory {
url.set_fragment(Some(&format!("subdirectory={subdirectory}")));
}
let url = VerbatimUrl::from_url(url);
let repository = git.clone();
RequirementSource::Git {
url,
repository,
reference,
precise: None,
subdirectory: subdirectory.map(PathBuf::from),
}
}
Source::Url { url, subdirectory } => {
if matches!(requirement.version_or_url, Some(VersionOrUrl::Url(_))) {
return Err(LoweringError::ConflictingUrls);
}
let mut verbatim_url = url.clone();
if verbatim_url.fragment().is_some() {
return Err(LoweringError::ForbiddenFragment(url));
}
if let Some(subdirectory) = &subdirectory {
verbatim_url.set_fragment(Some(subdirectory));
}
let verbatim_url = VerbatimUrl::from_url(verbatim_url);
RequirementSource::Url {
location: url,
subdirectory: subdirectory.map(PathBuf::from),
url: verbatim_url,
}
}
Source::Path { path, editable } => {
if matches!(requirement.version_or_url, Some(VersionOrUrl::Url(_))) {
return Err(LoweringError::ConflictingUrls);
}
path_source(path, project_dir, editable.unwrap_or(false))?
}
Source::Registry { index } => match requirement.version_or_url {
None => {
warn_user_once!(
"Missing version constraint (e.g., a lower bound) for `{}`",
requirement.name
);
RequirementSource::Registry {
specifier: VersionSpecifiers::empty(),
index: Some(index),
}
}
Some(VersionOrUrl::VersionSpecifier(version)) => RequirementSource::Registry {
specifier: version,
index: Some(index),
},
Some(VersionOrUrl::Url(_)) => return Err(LoweringError::ConflictingUrls),
},
Source::Workspace {
workspace: is_workspace,
editable,
} => {
if !is_workspace {
return Err(LoweringError::WorkspaceFalse);
}
if matches!(requirement.version_or_url, Some(VersionOrUrl::Url(_))) {
return Err(LoweringError::ConflictingUrls);
}
let path = workspace
.packages()
.get(&requirement.name)
.ok_or(LoweringError::UndeclaredWorkspacePackage)?
.clone();
path_source(path.root(), workspace.root(), editable.unwrap_or(true))?
}
Source::CatchAll { .. } => {
// Emit a dedicated error message, which is an improvement over Serde's default error.
return Err(LoweringError::InvalidEntry);
}
};
Ok(Requirement {
name: requirement.name,
extras: requirement.extras,
marker: requirement.marker,
source,
origin: requirement.origin,
})
}
/// Convert a path string to a path section.
fn path_source(
path: impl AsRef<Path>,
project_dir: &Path,
editable: bool,
) -> Result<RequirementSource, LoweringError> {
let url = VerbatimUrl::parse_path(path.as_ref(), project_dir)?
.with_given(path.as_ref().to_string_lossy().to_string());
let path_buf = path.as_ref().to_path_buf();
let path_buf = path_buf
.absolutize_from(project_dir)
.map_err(|err| LoweringError::Absolutize(path.as_ref().to_path_buf(), err))?
.to_path_buf();
//if !editable {
// // TODO(konsti): Support this. Currently we support `{ workspace = true }`, but we don't
// // support `{ workspace = true, editable = false }` since we only collect editables.
// return Err(LoweringError::NonEditableWorkspaceDependency);
//}
Ok(RequirementSource::Path {
path: path_buf,
url,
editable,
})
}

View file

@ -29,7 +29,7 @@ use uv_cache::{
use uv_client::{
CacheControl, CachedClientError, Connectivity, DataWithCachePolicy, RegistryClient,
};
use uv_configuration::{BuildKind, NoBuild};
use uv_configuration::{BuildKind, NoBuild, PreviewMode};
use uv_extract::hash::Hasher;
use uv_fs::{write_atomic, LockedFile};
use uv_types::{BuildContext, SourceBuildTrait};
@ -39,7 +39,7 @@ use crate::error::Error;
use crate::git::{fetch_git_archive, resolve_precise};
use crate::source::built_wheel_metadata::BuiltWheelMetadata;
use crate::source::revision::Revision;
use crate::{ArchiveMetadata, Reporter};
use crate::{ArchiveMetadata, Metadata, Reporter};
mod built_wheel_metadata;
mod revision;
@ -48,6 +48,7 @@ mod revision;
pub(crate) struct SourceDistributionBuilder<'a, T: BuildContext> {
build_context: &'a T,
reporter: Option<Arc<dyn Reporter>>,
preview_mode: PreviewMode,
}
/// The name of the file that contains the revision ID for a remote distribution, encoded via `MsgPack`.
@ -61,10 +62,11 @@ pub(crate) const METADATA: &str = "metadata.msgpack";
impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
/// Initialize a [`SourceDistributionBuilder`] from a [`BuildContext`].
pub(crate) fn new(build_context: &'a T) -> Self {
pub(crate) fn new(build_context: &'a T, preview_mode: PreviewMode) -> Self {
Self {
build_context,
reporter: None,
preview_mode,
}
}
@ -492,7 +494,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
if let Some(metadata) = read_cached_metadata(&metadata_entry).await? {
debug!("Using cached metadata for: {source}");
return Ok(ArchiveMetadata {
metadata,
metadata: Metadata::from_metadata23(metadata),
hashes: revision.into_hashes(),
});
}
@ -515,7 +517,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
.map_err(Error::CacheWrite)?;
return Ok(ArchiveMetadata {
metadata,
metadata: Metadata::from_metadata23(metadata),
hashes: revision.into_hashes(),
});
}
@ -542,7 +544,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
}
Ok(ArchiveMetadata {
metadata,
metadata: Metadata::from_metadata23(metadata),
hashes: revision.into_hashes(),
})
}
@ -720,7 +722,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
if let Some(metadata) = read_cached_metadata(&metadata_entry).await? {
debug!("Using cached metadata for: {source}");
return Ok(ArchiveMetadata {
metadata,
metadata: Metadata::from_metadata23(metadata),
hashes: revision.into_hashes(),
});
}
@ -742,7 +744,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
.map_err(Error::CacheWrite)?;
return Ok(ArchiveMetadata {
metadata,
metadata: Metadata::from_metadata23(metadata),
hashes: revision.into_hashes(),
});
}
@ -769,7 +771,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
.map_err(Error::CacheWrite)?;
Ok(ArchiveMetadata {
metadata,
metadata: Metadata::from_metadata23(metadata),
hashes: revision.into_hashes(),
})
}
@ -929,7 +931,10 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
let metadata_entry = cache_shard.entry(METADATA);
if let Some(metadata) = read_cached_metadata(&metadata_entry).await? {
debug!("Using cached metadata for: {source}");
return Ok(ArchiveMetadata::from(metadata));
return Ok(ArchiveMetadata::from(
Metadata::from_workspace(metadata, resource.path.as_ref(), self.preview_mode)
.await?,
));
}
// If the backend supports `prepare_metadata_for_build_wheel`, use it.
@ -946,7 +951,10 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
.await
.map_err(Error::CacheWrite)?;
return Ok(ArchiveMetadata::from(metadata));
return Ok(ArchiveMetadata::from(
Metadata::from_workspace(metadata, resource.path.as_ref(), self.preview_mode)
.await?,
));
}
// Otherwise, we need to build a wheel.
@ -970,7 +978,9 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
.await
.map_err(Error::CacheWrite)?;
Ok(ArchiveMetadata::from(metadata))
Ok(ArchiveMetadata::from(
Metadata::from_workspace(metadata, resource.path.as_ref(), self.preview_mode).await?,
))
}
/// Return the [`Revision`] for a local source tree, refreshing it if necessary.
@ -1137,7 +1147,9 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
{
if let Some(metadata) = read_cached_metadata(&metadata_entry).await? {
debug!("Using cached metadata for: {source}");
return Ok(ArchiveMetadata::from(metadata));
return Ok(ArchiveMetadata::from(
Metadata::from_workspace(metadata, fetch.path(), self.preview_mode).await?,
));
}
}
@ -1155,7 +1167,9 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
.await
.map_err(Error::CacheWrite)?;
return Ok(ArchiveMetadata::from(metadata));
return Ok(ArchiveMetadata::from(
Metadata::from_workspace(metadata, fetch.path(), self.preview_mode).await?,
));
}
// Otherwise, we need to build a wheel.
@ -1179,7 +1193,9 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
.await
.map_err(Error::CacheWrite)?;
Ok(ArchiveMetadata::from(metadata))
Ok(ArchiveMetadata::from(
Metadata::from_workspace(metadata, fetch.path(), self.preview_mode).await?,
))
}
/// Download and unzip a source distribution into the cache from an HTTP response.
@ -1592,7 +1608,7 @@ async fn read_pyproject_toml(
/// Read an existing cached [`Metadata23`], if it exists.
async fn read_cached_metadata(cache_entry: &CacheEntry) -> Result<Option<Metadata23>, Error> {
match fs::read(&cache_entry.path()).await {
Ok(cached) => Ok(Some(rmp_serde::from_slice::<Metadata23>(&cached)?)),
Ok(cached) => Ok(Some(rmp_serde::from_slice(&cached)?)),
Err(err) if err.kind() == std::io::ErrorKind::NotFound => Ok(None),
Err(err) => Err(Error::CacheRead(err)),
}

View file

@ -3,12 +3,12 @@
use std::collections::BTreeMap;
use std::path::{Path, PathBuf};
use distribution_types::{Requirement, RequirementSource};
use glob::{glob, GlobError, PatternError};
use pep508_rs::{VerbatimUrl, VerbatimUrlError};
use rustc_hash::FxHashSet;
use tracing::{debug, trace};
use pep508_rs::VerbatimUrl;
use pypi_types::{Requirement, RequirementSource};
use uv_fs::{absolutize_path, Simplified};
use uv_normalize::{ExtraName, PackageName};
use uv_warnings::warn_user;
@ -29,12 +29,8 @@ pub enum WorkspaceError {
Toml(PathBuf, #[source] Box<toml::de::Error>),
#[error("No `project` table found in: `{}`", _0.simplified_display())]
MissingProject(PathBuf),
#[error("pyproject.toml section is declared as dynamic, but must be static: `{0}`")]
DynamicNotAllowed(&'static str),
#[error("Failed to normalize workspace member path")]
Normalize(#[source] std::io::Error),
#[error("Failed to normalize workspace member path")]
VerbatimUrl(#[from] VerbatimUrlError),
}
/// A workspace, consisting of a root directory and members. See [`ProjectWorkspace`].
@ -371,6 +367,7 @@ impl ProjectWorkspace {
let mut seen = FxHashSet::default();
for member_glob in workspace_definition.members.unwrap_or_default() {
let absolute_glob = workspace_root
.simplified()
.join(member_glob.as_str())
.to_string_lossy()
.to_string();
@ -427,8 +424,8 @@ impl ProjectWorkspace {
})
}
#[cfg(test)]
pub(crate) fn dummy(root: &Path, project_name: &PackageName) -> Self {
/// Used in tests.
pub fn dummy(root: &Path, project_name: &PackageName) -> Self {
// This doesn't necessarily match the exact test case, but we don't use the other fields
// for the test cases atm.
let root_member = WorkspaceMember {
@ -436,9 +433,7 @@ impl ProjectWorkspace {
pyproject_toml: PyProjectToml {
project: Some(crate::pyproject::Project {
name: project_name.clone(),
dependencies: None,
optional_dependencies: None,
dynamic: None,
}),
tool: None,
},
@ -605,6 +600,7 @@ fn is_excluded_from_workspace(
) -> Result<bool, WorkspaceError> {
for exclude_glob in workspace.exclude.iter().flatten() {
let absolute_glob = workspace_root
.simplified()
.join(exclude_glob.as_str())
.to_string_lossy()
.to_string();

View file

@ -12,23 +12,6 @@ pub use crate::path::*;
pub mod cachedir;
mod path;
/// Reads data from the path and requires that it be valid UTF-8.
///
/// If the file path is `-`, then contents are read from stdin instead.
#[cfg(feature = "tokio")]
pub async fn read_to_string(path: impl AsRef<Path>) -> std::io::Result<String> {
use std::io::Read;
let path = path.as_ref();
if path == Path::new("-") {
let mut buf = String::with_capacity(1024);
std::io::stdin().read_to_string(&mut buf)?;
Ok(buf)
} else {
fs_err::tokio::read_to_string(path).await
}
}
/// Reads data from the path and requires that it be valid UTF-8 or UTF-16.
///
/// This uses BOM sniffing to determine if the data should be transcoded

View file

@ -11,9 +11,10 @@ use distribution_filename::WheelFilename;
use distribution_types::{
CachedDirectUrlDist, CachedDist, DirectUrlBuiltDist, DirectUrlSourceDist, DirectorySourceDist,
Error, GitSourceDist, Hashed, IndexLocations, InstalledDist, Name, PathBuiltDist,
PathSourceDist, RemoteSource, Requirement, RequirementSource, Verbatim,
PathSourceDist, RemoteSource, Verbatim,
};
use platform_tags::Tags;
use pypi_types::{Requirement, RequirementSource};
use uv_cache::{ArchiveTimestamp, Cache, CacheBucket, WheelCache};
use uv_configuration::{NoBinary, Reinstall};
use uv_distribution::{

View file

@ -6,8 +6,8 @@ use serde::Deserialize;
use tracing::{debug, trace};
use cache_key::{CanonicalUrl, RepositoryUrl};
use distribution_types::{InstalledDirectUrlDist, InstalledDist, RequirementSource};
use pypi_types::{DirInfo, DirectUrl, VcsInfo, VcsKind};
use distribution_types::{InstalledDirectUrlDist, InstalledDist};
use pypi_types::{DirInfo, DirectUrl, RequirementSource, VcsInfo, VcsKind};
use uv_cache::{ArchiveTarget, ArchiveTimestamp};
#[derive(Debug, Copy, Clone)]

View file

@ -8,11 +8,10 @@ use rustc_hash::{FxHashMap, FxHashSet};
use url::Url;
use distribution_types::{
Diagnostic, InstalledDist, Name, Requirement, UnresolvedRequirement,
UnresolvedRequirementSpecification,
Diagnostic, InstalledDist, Name, UnresolvedRequirement, UnresolvedRequirementSpecification,
};
use pep440_rs::{Version, VersionSpecifiers};
use pypi_types::VerbatimParsedUrl;
use pypi_types::{Requirement, VerbatimParsedUrl};
use uv_interpreter::PythonEnvironment;
use uv_normalize::PackageName;
use uv_types::InstalledPackagesProvider;

View file

@ -13,7 +13,6 @@ license.workspace = true
cache-key = { workspace = true }
distribution-filename = { workspace = true }
distribution-types = { workspace = true }
pep440_rs = { workspace = true }
pep508_rs = { workspace = true }
pypi-types = { workspace = true }
requirements-txt = { workspace = true, features = ["http"] }
@ -34,26 +33,12 @@ console = { workspace = true }
ctrlc = { workspace = true }
fs-err = { workspace = true, features = ["tokio"] }
futures = { workspace = true }
glob = { workspace = true }
indexmap = { workspace = true }
path-absolutize = { workspace = true }
rustc-hash = { workspace = true }
same-file = { workspace = true }
schemars = { workspace = true, optional = true }
serde = { workspace = true }
thiserror = { workspace = true }
toml = { workspace = true }
tracing = { workspace = true }
url = { workspace = true }
[features]
schemars = ["dep:schemars"]
[dev-dependencies]
indoc = "2.0.5"
insta = { version = "1.38.0", features = ["filters", "redactions", "json"] }
regex = { workspace = true }
tokio = { workspace = true }
[lints]
workspace = true

View file

@ -3,14 +3,11 @@ pub use crate::source_tree::*;
pub use crate::sources::*;
pub use crate::specification::*;
pub use crate::unnamed::*;
pub use crate::workspace::*;
mod confirm;
mod lookahead;
pub mod pyproject;
mod source_tree;
mod sources;
mod specification;
mod unnamed;
pub mod upgrade;
mod workspace;

View file

@ -6,11 +6,9 @@ use rustc_hash::FxHashSet;
use thiserror::Error;
use tracing::trace;
use distribution_types::{
BuiltDist, Dist, DistributionMetadata, GitSourceDist, Requirement, RequirementSource,
SourceDist,
};
use distribution_types::{BuiltDist, Dist, DistributionMetadata, GitSourceDist, SourceDist};
use pep508_rs::MarkerEnvironment;
use pypi_types::{Requirement, RequirementSource};
use uv_configuration::{Constraints, Overrides};
use uv_distribution::{DistributionDatabase, Reporter};
use uv_git::GitUrl;

View file

@ -1,957 +0,0 @@
//! Reads the following fields from `pyproject.toml`:
//!
//! * `project.{dependencies,optional-dependencies}`
//! * `tool.uv.sources`
//! * `tool.uv.workspace`
//!
//! Then lowers them into a dependency specification.
use std::collections::BTreeMap;
use std::io;
use std::ops::Deref;
use std::path::{Path, PathBuf};
use std::str::FromStr;
use glob::Pattern;
use indexmap::IndexMap;
use path_absolutize::Absolutize;
use rustc_hash::FxHashSet;
use serde::{Deserialize, Serialize};
use thiserror::Error;
use url::Url;
use distribution_types::{Requirement, RequirementSource, Requirements};
use pep440_rs::VersionSpecifiers;
use pep508_rs::{Pep508Error, RequirementOrigin, VerbatimUrl, VersionOrUrl};
use pypi_types::VerbatimParsedUrl;
use uv_configuration::{ExtrasSpecification, PreviewMode};
use uv_fs::Simplified;
use uv_git::GitReference;
use uv_normalize::{ExtraName, PackageName};
use uv_warnings::warn_user_once;
use crate::Workspace;
#[derive(Debug, Error)]
pub enum Pep621Error {
#[error(transparent)]
Pep508(#[from] Box<Pep508Error<VerbatimParsedUrl>>),
#[error("Must specify a `[project]` section alongside `[tool.uv.sources]`")]
MissingProjectSection,
#[error("pyproject.toml section is declared as dynamic, but must be static: `{0}`")]
DynamicNotAllowed(&'static str),
#[error("Failed to parse entry for: `{0}`")]
LoweringError(PackageName, #[source] LoweringError),
}
impl From<Pep508Error<VerbatimParsedUrl>> for Pep621Error {
fn from(error: Pep508Error<VerbatimParsedUrl>) -> Self {
Self::Pep508(Box::new(error))
}
}
/// An error parsing and merging `tool.uv.sources` with
/// `project.{dependencies,optional-dependencies}`.
#[derive(Debug, Error)]
pub enum LoweringError {
#[error("Unsupported path (can't convert to URL): `{}`", _0.user_display())]
PathToUrl(PathBuf),
#[error("Package is not included as workspace package in `tool.uv.workspace`")]
UndeclaredWorkspacePackage,
#[error("Can only specify one of rev, tag, or branch")]
MoreThanOneGitRef,
#[error("Unable to combine options in `tool.uv.sources`")]
InvalidEntry,
#[error(transparent)]
InvalidUrl(#[from] url::ParseError),
#[error(transparent)]
InvalidVerbatimUrl(#[from] pep508_rs::VerbatimUrlError),
#[error("Can't combine URLs from both `project.dependencies` and `tool.uv.sources`")]
ConflictingUrls,
#[error("Could not normalize path: `{}`", _0.user_display())]
AbsolutizeError(PathBuf, #[source] io::Error),
#[error("Fragments are not allowed in URLs: `{0}`")]
ForbiddenFragment(Url),
#[error("`workspace = false` is not yet supported")]
WorkspaceFalse,
#[error("`tool.uv.sources` is a preview feature; use `--preview` or set `UV_PREVIEW=1` to enable it")]
MissingPreview,
#[error("`editable = false` is not yet supported")]
NonEditableWorkspaceDependency,
}
/// A `pyproject.toml` as specified in PEP 517.
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq)]
#[serde(rename_all = "kebab-case")]
pub struct PyProjectToml {
/// PEP 621-compliant project metadata.
pub project: Option<Project>,
/// Tool-specific metadata.
pub tool: Option<Tool>,
}
/// PEP 621 project metadata (`project`).
///
/// This is a subset of the full metadata specification, and only includes the fields that are
/// relevant for extracting static requirements.
///
/// See <https://packaging.python.org/en/latest/specifications/pyproject-toml>.
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq)]
#[serde(rename_all = "kebab-case")]
pub struct Project {
/// The name of the project
pub name: PackageName,
/// Project dependencies
pub dependencies: Option<Vec<String>>,
/// Optional dependencies
pub optional_dependencies: Option<IndexMap<ExtraName, Vec<String>>>,
/// Specifies which fields listed by PEP 621 were intentionally unspecified
/// so another tool can/will provide such metadata dynamically.
pub dynamic: Option<Vec<String>>,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq)]
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
pub struct Tool {
pub uv: Option<ToolUv>,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq)]
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
pub struct ToolUv {
pub sources: Option<BTreeMap<PackageName, Source>>,
pub workspace: Option<ToolUvWorkspace>,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq)]
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
pub struct ToolUvWorkspace {
pub members: Option<Vec<SerdePattern>>,
pub exclude: Option<Vec<SerdePattern>>,
}
/// (De)serialize globs as strings.
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq)]
pub struct SerdePattern(#[serde(with = "serde_from_and_to_string")] pub Pattern);
#[cfg(feature = "schemars")]
impl schemars::JsonSchema for SerdePattern {
fn schema_name() -> String {
<String as schemars::JsonSchema>::schema_name()
}
fn json_schema(gen: &mut schemars::gen::SchemaGenerator) -> schemars::schema::Schema {
<String as schemars::JsonSchema>::json_schema(gen)
}
}
impl Deref for SerdePattern {
type Target = Pattern;
fn deref(&self) -> &Self::Target {
&self.0
}
}
/// A `tool.uv.sources` value.
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq)]
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
#[serde(untagged, deny_unknown_fields)]
pub enum Source {
/// A remote Git repository, available over HTTPS or SSH.
///
/// Example:
/// ```toml
/// flask = { git = "https://github.com/pallets/flask", tag = "3.0.0" }
/// ```
Git {
/// The repository URL (without the `git+` prefix).
git: Url,
/// The path to the directory with the `pyproject.toml`, if it's not in the archive root.
subdirectory: Option<String>,
// Only one of the three may be used; we'll validate this later and emit a custom error.
rev: Option<String>,
tag: Option<String>,
branch: Option<String>,
},
/// A remote `http://` or `https://` URL, either a wheel (`.whl`) or a source distribution
/// (`.zip`, `.tar.gz`).
///
/// Example:
/// ```toml
/// flask = { url = "https://files.pythonhosted.org/packages/61/80/ffe1da13ad9300f87c93af113edd0638c75138c42a0994becfacac078c06/flask-3.0.3-py3-none-any.whl" }
/// ```
Url {
url: Url,
/// For source distributions, the path to the directory with the `pyproject.toml`, if it's
/// not in the archive root.
subdirectory: Option<String>,
},
/// The path to a dependency, either a wheel (a `.whl` file), source distribution (a `.zip` or
/// `.tag.gz` file), or source tree (i.e., a directory containing a `pyproject.toml` or
/// `setup.py` file in the root).
Path {
path: String,
/// `false` by default.
editable: Option<bool>,
},
/// A dependency pinned to a specific index, e.g., `torch` after setting `torch` to `https://download.pytorch.org/whl/cu118`.
Registry {
// TODO(konstin): The string is more-or-less a placeholder
index: String,
},
/// A dependency on another package in the workspace.
Workspace {
/// When set to `false`, the package will be fetched from the remote index, rather than
/// included as a workspace package.
workspace: bool,
/// `true` by default.
editable: Option<bool>,
},
/// A catch-all variant used to emit precise error messages when deserializing.
CatchAll {
git: String,
subdirectory: Option<String>,
rev: Option<String>,
tag: Option<String>,
branch: Option<String>,
url: String,
patch: String,
index: String,
workspace: bool,
},
}
/// The PEP 621 project metadata, with static requirements extracted in advance, joined
/// with `tool.uv.sources`.
#[derive(Debug)]
pub(crate) struct Pep621Metadata {
/// The name of the project.
pub(crate) name: PackageName,
/// The requirements extracted from the project.
pub(crate) requirements: Vec<Requirement>,
/// The extras used to collect requirements.
pub(crate) used_extras: FxHashSet<ExtraName>,
}
impl Pep621Metadata {
/// Extract the static [`Pep621Metadata`] from a [`Project`] and [`ExtrasSpecification`], if
/// possible.
///
/// If the project specifies dynamic dependencies, or if the project specifies dynamic optional
/// dependencies and the extras are requested, the requirements cannot be extracted.
///
/// Returns an error if the requirements are not valid PEP 508 requirements.
pub(crate) fn try_from(
pyproject: &PyProjectToml,
extras: &ExtrasSpecification,
pyproject_path: &Path,
project_dir: &Path,
workspace: &Workspace,
preview: PreviewMode,
) -> Result<Option<Self>, Pep621Error> {
let project_sources = pyproject
.tool
.as_ref()
.and_then(|tool| tool.uv.as_ref())
.and_then(|uv| uv.sources.clone());
let has_sources = project_sources.is_some() || !workspace.sources().is_empty();
let Some(project) = &pyproject.project else {
return if has_sources {
Err(Pep621Error::MissingProjectSection)
} else {
Ok(None)
};
};
if let Some(dynamic) = project.dynamic.as_ref() {
// If the project specifies dynamic dependencies, we can't extract the requirements.
if dynamic.iter().any(|field| field == "dependencies") {
return if has_sources {
Err(Pep621Error::DynamicNotAllowed("project.dependencies"))
} else {
Ok(None)
};
}
// If we requested extras, and the project specifies dynamic optional dependencies, we can't
// extract the requirements.
if !extras.is_empty() && dynamic.iter().any(|field| field == "optional-dependencies") {
return if has_sources {
Err(Pep621Error::DynamicNotAllowed(
"project.optional-dependencies",
))
} else {
Ok(None)
};
}
}
let requirements = lower_requirements(
project.dependencies.as_deref(),
project.optional_dependencies.as_ref(),
pyproject_path,
&project.name,
project_dir,
&project_sources.unwrap_or_default(),
workspace,
preview,
)?;
// Parse out the project requirements.
let mut requirements_with_extras = requirements.dependencies;
// Include any optional dependencies specified in `extras`.
let mut used_extras = FxHashSet::default();
if !extras.is_empty() {
// Include the optional dependencies if the extras are requested.
for (extra, optional_requirements) in &requirements.optional_dependencies {
if extras.contains(extra) {
used_extras.insert(extra.clone());
requirements_with_extras.extend(flatten_extra(
&project.name,
optional_requirements,
&requirements.optional_dependencies,
));
}
}
}
Ok(Some(Self {
name: project.name.clone(),
requirements: requirements_with_extras,
used_extras,
}))
}
}
#[allow(clippy::too_many_arguments)]
pub(crate) fn lower_requirements(
dependencies: Option<&[String]>,
optional_dependencies: Option<&IndexMap<ExtraName, Vec<String>>>,
pyproject_path: &Path,
project_name: &PackageName,
project_dir: &Path,
project_sources: &BTreeMap<PackageName, Source>,
workspace: &Workspace,
preview: PreviewMode,
) -> Result<Requirements, Pep621Error> {
let dependencies = dependencies
.into_iter()
.flatten()
.map(|dependency| {
let requirement = pep508_rs::Requirement::from_str(dependency)?.with_origin(
RequirementOrigin::Project(pyproject_path.to_path_buf(), project_name.clone()),
);
let name = requirement.name.clone();
lower_requirement(
requirement,
project_name,
project_dir,
project_sources,
workspace,
preview,
)
.map_err(|err| Pep621Error::LoweringError(name, err))
})
.collect::<Result<_, Pep621Error>>()?;
let optional_dependencies = optional_dependencies
.into_iter()
.flatten()
.map(|(extra_name, dependencies)| {
let dependencies: Vec<_> = dependencies
.iter()
.map(|dependency| {
let requirement = pep508_rs::Requirement::from_str(dependency)?.with_origin(
RequirementOrigin::Project(
pyproject_path.to_path_buf(),
project_name.clone(),
),
);
let name = requirement.name.clone();
lower_requirement(
requirement,
project_name,
project_dir,
project_sources,
workspace,
preview,
)
.map_err(|err| Pep621Error::LoweringError(name, err))
})
.collect::<Result<_, Pep621Error>>()?;
Ok((extra_name.clone(), dependencies))
})
.collect::<Result<_, Pep621Error>>()?;
Ok(Requirements {
dependencies,
optional_dependencies,
})
}
/// Combine `project.dependencies` or `project.optional-dependencies` with `tool.uv.sources`.
pub(crate) fn lower_requirement(
requirement: pep508_rs::Requirement<VerbatimParsedUrl>,
project_name: &PackageName,
project_dir: &Path,
project_sources: &BTreeMap<PackageName, Source>,
workspace: &Workspace,
preview: PreviewMode,
) -> Result<Requirement, LoweringError> {
let source = project_sources
.get(&requirement.name)
.or(workspace.sources().get(&requirement.name))
.cloned();
let workspace_package_declared =
// We require that when you use a package that's part of the workspace, ...
!workspace.packages().contains_key(&requirement.name)
// ... it must be declared as a workspace dependency (`workspace = true`), ...
|| matches!(
source,
Some(Source::Workspace {
// By using toml, we technically support `workspace = false`.
workspace: true,
..
})
)
// ... except for recursive self-inclusion (extras that activate other extras), e.g.
// `framework[machine_learning]` depends on `framework[cuda]`.
|| &requirement.name == project_name;
if !workspace_package_declared {
return Err(LoweringError::UndeclaredWorkspacePackage);
}
let Some(source) = source else {
let has_sources = !project_sources.is_empty() || !workspace.sources().is_empty();
// Support recursive editable inclusions.
if has_sources && requirement.version_or_url.is_none() && &requirement.name != project_name
{
warn_user_once!(
"Missing version constraint (e.g., a lower bound) for `{}`",
requirement.name
);
}
return Ok(Requirement::from(requirement));
};
if preview.is_disabled() {
return Err(LoweringError::MissingPreview);
}
let source = match source {
Source::Git {
git,
subdirectory,
rev,
tag,
branch,
} => {
if matches!(requirement.version_or_url, Some(VersionOrUrl::Url(_))) {
return Err(LoweringError::ConflictingUrls);
}
let reference = match (rev, tag, branch) {
(None, None, None) => GitReference::DefaultBranch,
(Some(rev), None, None) => {
if rev.starts_with("refs/") {
GitReference::NamedRef(rev.clone())
} else if rev.len() == 40 {
GitReference::FullCommit(rev.clone())
} else {
GitReference::ShortCommit(rev.clone())
}
}
(None, Some(tag), None) => GitReference::Tag(tag),
(None, None, Some(branch)) => GitReference::Branch(branch),
_ => return Err(LoweringError::MoreThanOneGitRef),
};
// Create a PEP 508-compatible URL.
let mut url = Url::parse(&format!("git+{git}"))?;
if let Some(rev) = reference.as_str() {
url.set_path(&format!("{}@{}", url.path(), rev));
}
if let Some(subdirectory) = &subdirectory {
url.set_fragment(Some(&format!("subdirectory={subdirectory}")));
}
let url = VerbatimUrl::from_url(url);
let repository = git.clone();
RequirementSource::Git {
url,
repository,
reference,
precise: None,
subdirectory: subdirectory.map(PathBuf::from),
}
}
Source::Url { url, subdirectory } => {
if matches!(requirement.version_or_url, Some(VersionOrUrl::Url(_))) {
return Err(LoweringError::ConflictingUrls);
}
let mut verbatim_url = url.clone();
if verbatim_url.fragment().is_some() {
return Err(LoweringError::ForbiddenFragment(url));
}
if let Some(subdirectory) = &subdirectory {
verbatim_url.set_fragment(Some(subdirectory));
}
let verbatim_url = VerbatimUrl::from_url(verbatim_url);
RequirementSource::Url {
location: url,
subdirectory: subdirectory.map(PathBuf::from),
url: verbatim_url,
}
}
Source::Path { path, editable } => {
if matches!(requirement.version_or_url, Some(VersionOrUrl::Url(_))) {
return Err(LoweringError::ConflictingUrls);
}
path_source(path, project_dir, editable.unwrap_or(false))?
}
Source::Registry { index } => match requirement.version_or_url {
None => {
warn_user_once!(
"Missing version constraint (e.g., a lower bound) for `{}`",
requirement.name
);
RequirementSource::Registry {
specifier: VersionSpecifiers::empty(),
index: Some(index),
}
}
Some(VersionOrUrl::VersionSpecifier(version)) => RequirementSource::Registry {
specifier: version,
index: Some(index),
},
Some(VersionOrUrl::Url(_)) => return Err(LoweringError::ConflictingUrls),
},
Source::Workspace {
workspace: is_workspace,
editable,
} => {
if !is_workspace {
return Err(LoweringError::WorkspaceFalse);
}
if matches!(requirement.version_or_url, Some(VersionOrUrl::Url(_))) {
return Err(LoweringError::ConflictingUrls);
}
let path = workspace
.packages()
.get(&requirement.name)
.ok_or(LoweringError::UndeclaredWorkspacePackage)?
.clone();
path_source(path.root(), workspace.root(), editable.unwrap_or(true))?
}
Source::CatchAll { .. } => {
// Emit a dedicated error message, which is an improvement over Serde's default error.
return Err(LoweringError::InvalidEntry);
}
};
Ok(Requirement {
name: requirement.name,
extras: requirement.extras,
marker: requirement.marker,
source,
origin: requirement.origin,
})
}
/// Convert a path string to a path section.
fn path_source(
path: impl AsRef<Path>,
project_dir: &Path,
editable: bool,
) -> Result<RequirementSource, LoweringError> {
let url = VerbatimUrl::parse_path(path.as_ref(), project_dir)?
.with_given(path.as_ref().to_string_lossy().to_string());
let path_buf = path.as_ref().to_path_buf();
let path_buf = path_buf
.absolutize_from(project_dir)
.map_err(|err| LoweringError::AbsolutizeError(path.as_ref().to_path_buf(), err))?
.to_path_buf();
if !editable {
// TODO(konsti): Support this. Currently we support `{ workspace = true }`, but we don't
// support `{ workspace = true, editable = false }` since we only collect editables.
return Err(LoweringError::NonEditableWorkspaceDependency);
}
Ok(RequirementSource::Path {
path: path_buf,
url,
editable,
})
}
/// Given an extra in a project that may contain references to the project itself, flatten it into
/// a list of requirements.
///
/// For example:
/// ```toml
/// [project]
/// name = "my-project"
/// version = "0.0.1"
/// dependencies = [
/// "tomli",
/// ]
///
/// [project.optional-dependencies]
/// test = [
/// "pep517",
/// ]
/// dev = [
/// "my-project[test]",
/// ]
/// ```
fn flatten_extra(
project_name: &PackageName,
requirements: &[Requirement],
extras: &IndexMap<ExtraName, Vec<Requirement>>,
) -> Vec<Requirement> {
fn inner(
project_name: &PackageName,
requirements: &[Requirement],
extras: &IndexMap<ExtraName, Vec<Requirement>>,
seen: &mut FxHashSet<ExtraName>,
) -> Vec<Requirement> {
let mut flattened = Vec::with_capacity(requirements.len());
for requirement in requirements {
if requirement.name == *project_name {
for extra in &requirement.extras {
// Avoid infinite recursion on mutually recursive extras.
if !seen.insert(extra.clone()) {
continue;
}
// Flatten the extra requirements.
for (other_extra, extra_requirements) in extras {
if other_extra == extra {
flattened.extend(inner(project_name, extra_requirements, extras, seen));
}
}
}
} else {
flattened.push(requirement.clone());
}
}
flattened
}
inner(
project_name,
requirements,
extras,
&mut FxHashSet::default(),
)
}
/// <https://github.com/serde-rs/serde/issues/1316#issue-332908452>
mod serde_from_and_to_string {
use std::fmt::Display;
use std::str::FromStr;
use serde::{de, Deserialize, Deserializer, Serializer};
pub(super) fn serialize<T, S>(value: &T, serializer: S) -> Result<S::Ok, S::Error>
where
T: Display,
S: Serializer,
{
serializer.collect_str(value)
}
pub(super) fn deserialize<'de, T, D>(deserializer: D) -> Result<T, D::Error>
where
T: FromStr,
T::Err: Display,
D: Deserializer<'de>,
{
String::deserialize(deserializer)?
.parse()
.map_err(de::Error::custom)
}
}
#[cfg(test)]
mod test {
use std::path::Path;
use std::str::FromStr;
use anyhow::Context;
use indoc::indoc;
use insta::assert_snapshot;
use uv_configuration::{ExtrasSpecification, PreviewMode};
use uv_fs::Simplified;
use uv_normalize::PackageName;
use crate::ProjectWorkspace;
use crate::RequirementsSpecification;
fn from_source(
contents: &str,
path: impl AsRef<Path>,
extras: &ExtrasSpecification,
) -> anyhow::Result<RequirementsSpecification> {
let path = uv_fs::absolutize_path(path.as_ref())?;
let project_workspace =
ProjectWorkspace::dummy(path.as_ref(), &PackageName::from_str("foo").unwrap());
let pyproject_toml =
toml::from_str(contents).context("Failed to parse: `pyproject.toml`")?;
RequirementsSpecification::parse_direct_pyproject_toml(
&pyproject_toml,
project_workspace.workspace(),
extras,
path.as_ref(),
PreviewMode::Enabled,
)
.with_context(|| format!("Failed to parse: `{}`", path.user_display()))?
.context("Missing workspace")
}
fn format_err(input: &str) -> String {
let err = from_source(input, "pyproject.toml", &ExtrasSpecification::None).unwrap_err();
let mut causes = err.chain();
let mut message = String::new();
message.push_str(&format!("error: {}\n", causes.next().unwrap()));
for err in causes {
message.push_str(&format!(" Caused by: {err}\n"));
}
message
}
#[test]
fn conflict_project_and_sources() {
let input = indoc! {r#"
[project]
name = "foo"
version = "0.0.0"
dependencies = [
"tqdm @ git+https://github.com/tqdm/tqdm",
]
[tool.uv.sources]
tqdm = { url = "https://files.pythonhosted.org/packages/a5/d6/502a859bac4ad5e274255576cd3e15ca273cdb91731bc39fb840dd422ee9/tqdm-4.66.0-py3-none-any.whl" }
"#};
assert_snapshot!(format_err(input), @r###"
error: Failed to parse: `pyproject.toml`
Caused by: Failed to parse entry for: `tqdm`
Caused by: Can't combine URLs from both `project.dependencies` and `tool.uv.sources`
"###);
}
#[test]
fn too_many_git_specs() {
let input = indoc! {r#"
[project]
name = "foo"
version = "0.0.0"
dependencies = [
"tqdm",
]
[tool.uv.sources]
tqdm = { git = "https://github.com/tqdm/tqdm", rev = "baaaaaab", tag = "v1.0.0" }
"#};
assert_snapshot!(format_err(input), @r###"
error: Failed to parse: `pyproject.toml`
Caused by: Failed to parse entry for: `tqdm`
Caused by: Can only specify one of rev, tag, or branch
"###);
}
#[test]
fn too_many_git_typo() {
let input = indoc! {r#"
[project]
name = "foo"
version = "0.0.0"
dependencies = [
"tqdm",
]
[tool.uv.sources]
tqdm = { git = "https://github.com/tqdm/tqdm", ref = "baaaaaab" }
"#};
// TODO(konsti): This should tell you the set of valid fields
assert_snapshot!(format_err(input), @r###"
error: Failed to parse: `pyproject.toml`
Caused by: TOML parse error at line 9, column 8
|
9 | tqdm = { git = "https://github.com/tqdm/tqdm", ref = "baaaaaab" }
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
data did not match any variant of untagged enum Source
"###);
}
#[test]
fn you_cant_mix_those() {
let input = indoc! {r#"
[project]
name = "foo"
version = "0.0.0"
dependencies = [
"tqdm",
]
[tool.uv.sources]
tqdm = { path = "tqdm", index = "torch" }
"#};
// TODO(konsti): This should tell you the set of valid fields
assert_snapshot!(format_err(input), @r###"
error: Failed to parse: `pyproject.toml`
Caused by: TOML parse error at line 9, column 8
|
9 | tqdm = { path = "tqdm", index = "torch" }
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
data did not match any variant of untagged enum Source
"###);
}
#[test]
fn missing_constraint() {
let input = indoc! {r#"
[project]
name = "foo"
version = "0.0.0"
dependencies = [
"tqdm",
]
"#};
assert!(from_source(input, "pyproject.toml", &ExtrasSpecification::None).is_ok());
}
#[test]
fn invalid_syntax() {
let input = indoc! {r#"
[project]
name = "foo"
version = "0.0.0"
dependencies = [
"tqdm ==4.66.0",
]
[tool.uv.sources]
tqdm = { url = invalid url to tqdm-4.66.0-py3-none-any.whl" }
"#};
assert_snapshot!(format_err(input), @r###"
error: Failed to parse: `pyproject.toml`
Caused by: TOML parse error at line 9, column 16
|
9 | tqdm = { url = invalid url to tqdm-4.66.0-py3-none-any.whl" }
| ^
invalid string
expected `"`, `'`
"###);
}
#[test]
fn invalid_url() {
let input = indoc! {r#"
[project]
name = "foo"
version = "0.0.0"
dependencies = [
"tqdm ==4.66.0",
]
[tool.uv.sources]
tqdm = { url = "§invalid#+#*Ä" }
"#};
assert_snapshot!(format_err(input), @r###"
error: Failed to parse: `pyproject.toml`
Caused by: TOML parse error at line 9, column 8
|
9 | tqdm = { url = "§invalid#+#*Ä" }
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^
data did not match any variant of untagged enum Source
"###);
}
#[test]
fn workspace_and_url_spec() {
let input = indoc! {r#"
[project]
name = "foo"
version = "0.0.0"
dependencies = [
"tqdm @ git+https://github.com/tqdm/tqdm",
]
[tool.uv.sources]
tqdm = { workspace = true }
"#};
assert_snapshot!(format_err(input), @r###"
error: Failed to parse: `pyproject.toml`
Caused by: Failed to parse entry for: `tqdm`
Caused by: Can't combine URLs from both `project.dependencies` and `tool.uv.sources`
"###);
}
#[test]
fn missing_workspace_package() {
let input = indoc! {r#"
[project]
name = "foo"
version = "0.0.0"
dependencies = [
"tqdm ==4.66.0",
]
[tool.uv.sources]
tqdm = { workspace = true }
"#};
assert_snapshot!(format_err(input), @r###"
error: Failed to parse: `pyproject.toml`
Caused by: Failed to parse entry for: `tqdm`
Caused by: Package is not included as workspace package in `tool.uv.workspace`
"###);
}
#[test]
fn cant_be_dynamic() {
let input = indoc! {r#"
[project]
name = "foo"
version = "0.0.0"
dynamic = [
"dependencies"
]
[tool.uv.sources]
tqdm = { workspace = true }
"#};
assert_snapshot!(format_err(input), @r###"
error: Failed to parse: `pyproject.toml`
Caused by: pyproject.toml section is declared as dynamic, but must be static: `project.dependencies`
"###);
}
#[test]
fn missing_project_section() {
let input = indoc! {"
[tool.uv.sources]
tqdm = { workspace = true }
"};
assert_snapshot!(format_err(input), @r###"
error: Failed to parse: `pyproject.toml`
Caused by: Must specify a `[project]` section alongside `[tool.uv.sources]`
"###);
}
}

View file

@ -7,17 +7,26 @@ use futures::stream::FuturesOrdered;
use futures::TryStreamExt;
use url::Url;
use distribution_types::{
BuildableSource, DirectorySourceUrl, HashPolicy, Requirement, SourceUrl, VersionId,
};
use distribution_types::{BuildableSource, DirectorySourceUrl, HashPolicy, SourceUrl, VersionId};
use pep508_rs::RequirementOrigin;
use pypi_types::VerbatimParsedUrl;
use pypi_types::Requirement;
use uv_configuration::ExtrasSpecification;
use uv_distribution::{DistributionDatabase, Reporter};
use uv_fs::Simplified;
use uv_normalize::{ExtraName, PackageName};
use uv_resolver::{InMemoryIndex, MetadataResponse};
use uv_types::{BuildContext, HashStrategy};
#[derive(Debug, Clone)]
pub struct SourceTreeResolution {
/// The requirements sourced from the source trees.
pub requirements: Vec<Requirement>,
/// The names of the projects that were resolved.
pub project: PackageName,
/// The extras used when resolving the requirements.
pub extras: Vec<ExtraName>,
}
/// A resolver for requirements specified via source trees.
///
/// Used, e.g., to determine the input requirements when a user specifies a `pyproject.toml`
@ -63,26 +72,19 @@ impl<'a, Context: BuildContext> SourceTreeResolver<'a, Context> {
}
/// Resolve the requirements from the provided source trees.
pub async fn resolve(self) -> Result<Vec<Requirement>> {
let requirements: Vec<_> = self
pub async fn resolve(self) -> Result<Vec<SourceTreeResolution>> {
let resolutions: Vec<_> = self
.source_trees
.iter()
.map(|source_tree| async { self.resolve_source_tree(source_tree).await })
.collect::<FuturesOrdered<_>>()
.try_collect()
.await?;
Ok(requirements
.into_iter()
.flatten()
.map(Requirement::from)
.collect())
Ok(resolutions)
}
/// Infer the package name for a given "unnamed" requirement.
async fn resolve_source_tree(
&self,
path: &Path,
) -> Result<Vec<pep508_rs::Requirement<VerbatimParsedUrl>>> {
/// Infer the dependencies for a directory dependency.
async fn resolve_source_tree(&self, path: &Path) -> Result<SourceTreeResolution> {
// Convert to a buildable source.
let source_tree = fs_err::canonicalize(path).with_context(|| {
format!(
@ -151,40 +153,59 @@ impl<'a, Context: BuildContext> SourceTreeResolver<'a, Context> {
}
};
// Extract the origin.
let origin = RequirementOrigin::Project(path.to_path_buf(), metadata.name.clone());
// Determine the extras to include when resolving the requirements.
let extras = match self.extras {
ExtrasSpecification::All => metadata.provides_extras.as_slice(),
ExtrasSpecification::None => &[],
ExtrasSpecification::Some(extras) => extras,
};
// Determine the appropriate requirements to return based on the extras. This involves
// evaluating the `extras` expression in any markers, but preserving the remaining marker
// conditions.
match self.extras {
ExtrasSpecification::None => Ok(metadata
.requires_dist
.into_iter()
.map(|requirement| requirement.with_origin(origin.clone()))
.collect()),
ExtrasSpecification::All => Ok(metadata
.requires_dist
.into_iter()
.map(|requirement| pep508_rs::Requirement {
origin: Some(origin.clone()),
marker: requirement
.marker
.and_then(|marker| marker.simplify_extras(&metadata.provides_extras)),
..requirement
})
.collect()),
ExtrasSpecification::Some(extras) => Ok(metadata
.requires_dist
.into_iter()
.map(|requirement| pep508_rs::Requirement {
origin: Some(origin.clone()),
marker: requirement
.marker
.and_then(|marker| marker.simplify_extras(extras)),
..requirement
})
.collect()),
let mut requirements: Vec<Requirement> = metadata
.requires_dist
.into_iter()
.map(|requirement| Requirement {
origin: Some(origin.clone()),
marker: requirement
.marker
.and_then(|marker| marker.simplify_extras(extras)),
..requirement
})
.collect();
// Resolve any recursive extras.
loop {
// Find the first recursive requirement.
// TODO(charlie): Respect markers on recursive extras.
let Some(index) = requirements.iter().position(|requirement| {
requirement.name == metadata.name && requirement.marker.is_none()
}) else {
break;
};
// Remove the requirement that points to us.
let recursive = requirements.remove(index);
// Re-simplify the requirements.
for requirement in &mut requirements {
requirement.marker = requirement
.marker
.take()
.and_then(|marker| marker.simplify_extras(&recursive.extras));
}
}
let project = metadata.name;
let extras = metadata.provides_extras;
Ok(SourceTreeResolution {
requirements,
project,
extras,
})
}
}

View file

@ -27,33 +27,29 @@
//! * `setup.py` or `setup.cfg` instead of `pyproject.toml`: Directory is an entry in
//! `source_trees`.
use std::collections::VecDeque;
use std::path::{Path, PathBuf};
use anyhow::{Context, Result};
use path_absolutize::Absolutize;
use rustc_hash::FxHashSet;
use same_file::is_same_file;
use tracing::{debug, instrument, trace};
use tracing::instrument;
use cache_key::CanonicalUrl;
use distribution_types::{
FlatIndexLocation, IndexUrl, Requirement, RequirementSource, UnresolvedRequirement,
UnresolvedRequirementSpecification,
FlatIndexLocation, IndexUrl, UnresolvedRequirement, UnresolvedRequirementSpecification,
};
use pep508_rs::{UnnamedRequirement, UnnamedRequirementUrl};
use pypi_types::Requirement;
use pypi_types::VerbatimParsedUrl;
use requirements_txt::{
EditableRequirement, FindLink, RequirementEntry, RequirementsTxt, RequirementsTxtRequirement,
};
use uv_client::BaseClientBuilder;
use uv_configuration::{ExtrasSpecification, NoBinary, NoBuild, PreviewMode};
use uv_configuration::{NoBinary, NoBuild};
use uv_distribution::pyproject::PyProjectToml;
use uv_fs::Simplified;
use uv_normalize::{ExtraName, PackageName};
use crate::pyproject::{Pep621Metadata, PyProjectToml};
use crate::ProjectWorkspace;
use crate::{RequirementsSource, Workspace, WorkspaceError};
use crate::RequirementsSource;
#[derive(Debug, Default)]
pub struct RequirementsSpecification {
@ -88,10 +84,7 @@ impl RequirementsSpecification {
#[instrument(skip_all, level = tracing::Level::DEBUG, fields(source = % source))]
pub async fn from_source(
source: &RequirementsSource,
extras: &ExtrasSpecification,
client_builder: &BaseClientBuilder<'_>,
workspace: Option<&Workspace>,
preview: PreviewMode,
) -> Result<Self> {
Ok(match source {
RequirementsSource::Package(name) => {
@ -108,9 +101,22 @@ impl RequirementsSpecification {
}
}
RequirementsSource::Editable(name) => {
Self::from_editable_source(name, extras, workspace, preview).await?
let requirement = EditableRequirement::parse(name, None, std::env::current_dir()?)
.with_context(|| format!("Failed to parse: `{name}`"))?;
Self {
requirements: vec![UnresolvedRequirementSpecification::from(requirement)],
..Self::default()
}
}
RequirementsSource::RequirementsTxt(path) => {
if !(path == Path::new("-")
|| path.starts_with("http://")
|| path.starts_with("https://")
|| path.is_file())
{
return Err(anyhow::anyhow!("File not found: `{}`", path.user_display()));
}
let requirements_txt =
RequirementsTxt::parse(path, std::env::current_dir()?, client_builder).await?;
Self {
@ -151,317 +157,68 @@ impl RequirementsSpecification {
}
}
RequirementsSource::PyprojectToml(path) => {
Self::from_pyproject_toml_source(path, extras, preview).await?
let contents = match fs_err::tokio::read_to_string(&path).await {
Ok(contents) => contents,
Err(err) if err.kind() == std::io::ErrorKind::NotFound => {
return Err(anyhow::anyhow!("File not found: `{}`", path.user_display()));
}
Err(err) => {
return Err(anyhow::anyhow!(
"Failed to read `{}`: {}",
path.user_display(),
err
));
}
};
let _ = toml::from_str::<PyProjectToml>(&contents)
.with_context(|| format!("Failed to parse: `{}`", path.user_display()))?;
Self {
source_trees: vec![path.clone()],
..Self::default()
}
}
RequirementsSource::SetupPy(path) | RequirementsSource::SetupCfg(path) => {
if !path.is_file() {
return Err(anyhow::anyhow!("File not found: `{}`", path.user_display()));
}
Self {
source_trees: vec![path.clone()],
..Self::default()
}
}
RequirementsSource::SourceTree(path) => {
if !path.is_dir() {
return Err(anyhow::anyhow!(
"Directory not found: `{}`",
path.user_display()
));
}
Self {
project: None,
requirements: vec![UnresolvedRequirementSpecification {
requirement: UnresolvedRequirement::Unnamed(UnnamedRequirement {
url: VerbatimParsedUrl::parse_absolute_path(path)?,
extras: vec![],
marker: None,
origin: None,
}),
hashes: vec![],
}],
..Self::default()
}
}
RequirementsSource::SetupPy(path) | RequirementsSource::SetupCfg(path) => Self {
source_trees: vec![path.clone()],
..Self::default()
},
RequirementsSource::SourceTree(path) => Self {
project: None,
requirements: vec![UnresolvedRequirementSpecification {
requirement: UnresolvedRequirement::Unnamed(UnnamedRequirement {
url: VerbatimParsedUrl::parse_absolute_path(path)?,
extras: vec![],
marker: None,
origin: None,
}),
hashes: vec![],
}],
..Self::default()
},
})
}
async fn from_editable_source(
name: &str,
extras: &ExtrasSpecification,
workspace: Option<&Workspace>,
preview: PreviewMode,
) -> Result<RequirementsSpecification> {
let requirement = EditableRequirement::parse(name, None, std::env::current_dir()?)
.with_context(|| format!("Failed to parse: `{name}`"))?;
// If we're not in preview mode, return the editable without searching for a workspace.
if preview.is_disabled() {
return Ok(Self {
requirements: vec![UnresolvedRequirementSpecification::from(requirement)],
..Self::default()
});
}
// First try to find the project in the existing workspace (if any), then try workspace
// discovery.
let project_in_exiting_workspace = workspace.and_then(|workspace| {
// We use `is_same_file` instead of indexing by path to support different versions of
// the same path (e.g. symlinks).
workspace
.packages()
.values()
.find(|member| is_same_file(member.root(), &requirement.path).unwrap_or(false))
.map(|member| (member.pyproject_toml(), workspace))
});
let editable_spec = if let Some((pyproject_toml, workspace)) = project_in_exiting_workspace
{
debug!(
"Found project in workspace at: `{}`",
requirement.path.user_display()
);
Self::parse_direct_pyproject_toml(
pyproject_toml,
workspace,
extras,
requirement.path.as_ref(),
preview,
)
.with_context(|| format!("Failed to parse: `{}`", requirement.path.user_display()))?
} else if let Some(project_workspace) =
ProjectWorkspace::from_maybe_project_root(&requirement.path).await?
{
debug!(
"Found project at workspace root: `{}`",
requirement.path.user_display()
);
let pyproject_toml = project_workspace.current_project().pyproject_toml();
let workspace = project_workspace.workspace();
Self::parse_direct_pyproject_toml(
pyproject_toml,
workspace,
extras,
requirement.path.as_ref(),
preview,
)
.with_context(|| format!("Failed to parse: `{}`", requirement.path.user_display()))?
} else {
// No `pyproject.toml` or no static metadata also means no workspace support (at the
// moment).
debug!(
"pyproject.toml has dynamic metadata at: `{}`",
requirement.path.user_display()
);
return Ok(Self {
requirements: vec![UnresolvedRequirementSpecification::from(requirement)],
..Self::default()
});
};
if let Some(editable_spec) = editable_spec {
// We only collect the editables here to keep the count of root packages correct.
// TODO(konsti): Collect all workspace packages, even the non-editable ones.
Ok(Self {
requirements: editable_spec
.requirements
.into_iter()
.chain(std::iter::once(UnresolvedRequirementSpecification::from(
requirement,
)))
.filter(|entry| entry.requirement.is_editable())
.collect(),
..Self::default()
})
} else {
debug!(
"pyproject.toml has dynamic metadata at: `{}`",
requirement.path.user_display()
);
Ok(Self {
requirements: vec![UnresolvedRequirementSpecification::from(requirement)],
..Self::default()
})
}
}
async fn from_pyproject_toml_source(
path: &Path,
extras: &ExtrasSpecification,
preview: PreviewMode,
) -> Result<RequirementsSpecification> {
let dir = path.parent().context("pyproject.toml must have a parent")?;
// We have to handle three cases:
// * There is a workspace (possibly implicit) with static dependencies.
// * There are dynamic dependencies, we have to build and don't use workspace information if
// present.
// * There was an error during workspace discovery, such as an IO error or a
// `pyproject.toml` in the workspace not matching the (lenient) schema.
match ProjectWorkspace::from_project_root(dir).await {
Ok(project_workspace) => {
let static_pyproject_toml = Self::parse_direct_pyproject_toml(
project_workspace.current_project().pyproject_toml(),
project_workspace.workspace(),
extras,
path,
preview,
)
.with_context(|| format!("Failed to parse: `{}`", path.user_display()))?;
if let Some(static_pyproject_toml) = static_pyproject_toml {
Ok(static_pyproject_toml)
} else {
debug!("Dynamic pyproject.toml at: `{}`", path.user_display());
Ok(Self {
source_trees: vec![path.to_path_buf()],
..Self::default()
})
}
}
Err(WorkspaceError::MissingProject(_)) => {
debug!(
"Missing `project` table from pyproject.toml at: `{}`",
path.user_display()
);
Ok(Self {
source_trees: vec![path.to_path_buf()],
..Self::default()
})
}
Err(err) => Err(anyhow::Error::new(err)),
}
}
/// Parse and lower a `pyproject.toml`, including all editable workspace dependencies.
///
/// When dependency information is dynamic or invalid `project.dependencies` (e.g., Hatch's
/// relative path support), we return `None` and query the metadata with PEP 517 later.
pub(crate) fn parse_direct_pyproject_toml(
pyproject: &PyProjectToml,
workspace: &Workspace,
extras: &ExtrasSpecification,
pyproject_path: &Path,
preview: PreviewMode,
) -> Result<Option<Self>> {
// We need use this path as base for the relative paths inside pyproject.toml, so
// we need the absolute path instead of a potentially relative path. E.g. with
// `foo = { path = "../foo" }`, we will join `../foo` onto this path.
let absolute_path = uv_fs::absolutize_path(pyproject_path)?;
let project_dir = absolute_path
.parent()
.context("`pyproject.toml` has no parent directory")?;
let Some(project) = Pep621Metadata::try_from(
pyproject,
extras,
pyproject_path,
project_dir,
workspace,
preview,
)?
else {
debug!(
"Dynamic pyproject.toml at: `{}`",
pyproject_path.user_display()
);
return Ok(None);
};
if preview.is_disabled() {
Ok(Some(Self {
project: Some(project.name),
requirements: project
.requirements
.into_iter()
.map(UnresolvedRequirementSpecification::from)
.collect(),
extras: project.used_extras,
..Self::default()
}))
} else {
Ok(Some(Self::collect_transitive_editables(
workspace, extras, preview, project,
)?))
}
}
/// Perform a workspace dependency DAG traversal (breadth-first search) to collect all editables
/// eagerly.
///
/// Consider a requirement on A in a workspace with workspace packages A, B, C where
/// A -> B and B -> C.
fn collect_transitive_editables(
workspace: &Workspace,
extras: &ExtrasSpecification,
preview: PreviewMode,
project: Pep621Metadata,
) -> Result<RequirementsSpecification> {
let mut seen = FxHashSet::from_iter([project.name.clone()]);
let mut queue = VecDeque::from([project.name.clone()]);
let mut requirements = Vec::new();
let mut used_extras = FxHashSet::default();
while let Some(project_name) = queue.pop_front() {
let Some(current) = workspace.packages().get(&project_name) else {
continue;
};
trace!("Processing metadata for workspace package {project_name}");
let project_root_absolute = current.root().absolutize_from(workspace.root())?;
let pyproject = current.pyproject_toml().clone();
let project = Pep621Metadata::try_from(
&pyproject,
extras,
&project_root_absolute.join("pyproject.toml"),
project_root_absolute.as_ref(),
workspace,
preview,
)
.with_context(|| {
format!(
"Invalid requirements in: `{}`",
current.root().join("pyproject.toml").user_display()
)
})?
// TODO(konsti): We should support this by building and using the built PEP 517 metadata
.with_context(|| {
format!(
"Workspace member doesn't declare static metadata: `{}`",
current.root().user_display()
)
})?;
// Recurse into any editables.
for requirement in &project.requirements {
if matches!(
requirement.source,
RequirementSource::Path { editable: true, .. }
) {
if seen.insert(requirement.name.clone()) {
queue.push_back(requirement.name.clone());
}
}
}
// Collect the requirements and extras.
used_extras.extend(project.used_extras);
requirements.extend(project.requirements);
}
let spec = Self {
project: Some(project.name),
requirements: requirements
.into_iter()
.map(UnresolvedRequirementSpecification::from)
.collect(),
extras: used_extras,
..Self::default()
};
Ok(spec)
}
/// Read the combined requirements and constraints from a set of sources.
///
/// If a [`Workspace`] is provided, it will be used as-is without re-discovering a workspace
/// from the filesystem.
pub async fn from_sources(
requirements: &[RequirementsSource],
constraints: &[RequirementsSource],
overrides: &[RequirementsSource],
workspace: Option<&Workspace>,
extras: &ExtrasSpecification,
client_builder: &BaseClientBuilder<'_>,
preview: PreviewMode,
) -> Result<Self> {
let mut spec = Self::default();
@ -469,8 +226,7 @@ impl RequirementsSpecification {
// A `requirements.txt` can contain a `-c constraints.txt` directive within it, so reading
// a requirements file can also add constraints.
for source in requirements {
let source =
Self::from_source(source, extras, client_builder, workspace, preview).await?;
let source = Self::from_source(source, client_builder).await?;
spec.requirements.extend(source.requirements);
spec.constraints.extend(source.constraints);
spec.overrides.extend(source.overrides);
@ -502,8 +258,7 @@ impl RequirementsSpecification {
// Read all constraints, treating both requirements _and_ constraints as constraints.
// Overrides are ignored, as are the hashes, as they are not relevant for constraints.
for source in constraints {
let source =
Self::from_source(source, extras, client_builder, workspace, preview).await?;
let source = Self::from_source(source, client_builder).await?;
for entry in source.requirements {
match entry.requirement {
UnresolvedRequirement::Named(requirement) => {
@ -538,7 +293,7 @@ impl RequirementsSpecification {
// Read all overrides, treating both requirements _and_ overrides as overrides.
// Constraints are ignored.
for source in overrides {
let source = Self::from_source(source, extras, client_builder, None, preview).await?;
let source = Self::from_source(source, client_builder).await?;
spec.overrides.extend(source.requirements);
spec.overrides.extend(source.overrides);
@ -566,17 +321,7 @@ impl RequirementsSpecification {
pub async fn from_simple_sources(
requirements: &[RequirementsSource],
client_builder: &BaseClientBuilder<'_>,
preview: PreviewMode,
) -> Result<Self> {
Self::from_sources(
requirements,
&[],
&[],
None,
&ExtrasSpecification::None,
client_builder,
preview,
)
.await
Self::from_sources(requirements, &[], &[], client_builder).await
}
}

View file

@ -12,10 +12,10 @@ use tracing::debug;
use distribution_filename::{SourceDistFilename, WheelFilename};
use distribution_types::{
BuildableSource, DirectSourceUrl, DirectorySourceUrl, GitSourceUrl, PathSourceUrl,
RemoteSource, Requirement, SourceUrl, UnresolvedRequirement,
UnresolvedRequirementSpecification, VersionId,
RemoteSource, SourceUrl, UnresolvedRequirement, UnresolvedRequirementSpecification, VersionId,
};
use pep508_rs::{UnnamedRequirement, VersionOrUrl};
use pypi_types::Requirement;
use pypi_types::{Metadata10, ParsedUrl, VerbatimParsedUrl};
use uv_distribution::{DistributionDatabase, Reporter};
use uv_normalize::PackageName;

View file

@ -1,15 +1,14 @@
use std::path::Path;
use anstream::eprint;
use anyhow::Result;
use anstream::eprint;
use requirements_txt::RequirementsTxt;
use uv_client::{BaseClientBuilder, Connectivity};
use uv_configuration::Upgrade;
use uv_distribution::ProjectWorkspace;
use uv_resolver::{Lock, Preference, PreferenceError};
use crate::ProjectWorkspace;
/// Load the preferred requirements from an existing `requirements.txt`, applying the upgrade strategy.
pub async fn read_requirements_txt(
output_file: Option<&Path>,

View file

@ -1,7 +1,7 @@
use either::Either;
use distribution_types::Requirement;
use pep508_rs::MarkerEnvironment;
use pypi_types::Requirement;
use uv_configuration::{Constraints, Overrides};
use uv_normalize::PackageName;
use uv_types::RequestedRequirements;

View file

@ -1,4 +1,4 @@
use distribution_types::RequirementSource;
use pypi_types::RequirementSource;
use rustc_hash::FxHashSet;
use pep508_rs::MarkerEnvironment;

View file

@ -3,9 +3,10 @@ use pubgrub::range::Range;
use rustc_hash::FxHashSet;
use tracing::warn;
use distribution_types::{Requirement, RequirementSource, Verbatim};
use distribution_types::Verbatim;
use pep440_rs::Version;
use pep508_rs::MarkerEnvironment;
use pypi_types::{Requirement, RequirementSource};
use uv_configuration::{Constraints, Overrides};
use uv_normalize::{ExtraName, PackageName};

View file

@ -1,17 +1,17 @@
use std::hash::BuildHasherDefault;
use rustc_hash::{FxHashMap, FxHashSet};
use distribution_types::{
Dist, DistributionMetadata, Name, Requirement, ResolutionDiagnostic, VersionId, VersionOrUrlRef,
};
use pep440_rs::{Version, VersionSpecifier};
use pep508_rs::{MarkerEnvironment, MarkerTree};
use petgraph::{
graph::{Graph, NodeIndex},
Directed,
};
use pypi_types::{ParsedUrlError, Yanked};
use rustc_hash::{FxHashMap, FxHashSet};
use distribution_types::{
Dist, DistributionMetadata, Name, ResolutionDiagnostic, VersionId, VersionOrUrlRef,
};
use pep440_rs::{Version, VersionSpecifier};
use pep508_rs::{MarkerEnvironment, MarkerTree};
use pypi_types::{ParsedUrlError, Requirement, Yanked};
use uv_normalize::{ExtraName, PackageName};
use crate::preferences::Preferences;

View file

@ -6,7 +6,8 @@ use itertools::Itertools;
use distribution_types::{DistributionMetadata, Name, ResolvedDist, Verbatim, VersionOrUrlRef};
use pep508_rs::{split_scheme, MarkerTree, Scheme};
use pypi_types::{HashDigest, Metadata23};
use pypi_types::HashDigest;
use uv_distribution::Metadata;
use uv_normalize::{ExtraName, PackageName};
pub use crate::resolution::display::{AnnotationStyle, DisplayResolutionGraph};
@ -24,7 +25,7 @@ pub(crate) struct AnnotatedDist {
pub(crate) extra: Option<ExtraName>,
pub(crate) marker: Option<MarkerTree>,
pub(crate) hashes: Vec<HashDigest>,
pub(crate) metadata: Metadata23,
pub(crate) metadata: Metadata,
}
impl Name for AnnotatedDist {

View file

@ -4,9 +4,10 @@ use std::str::FromStr;
use rustc_hash::FxHashMap;
use distribution_filename::{SourceDistFilename, WheelFilename};
use distribution_types::{RemoteSource, RequirementSource};
use distribution_types::RemoteSource;
use pep440_rs::{Operator, Version, VersionSpecifier, VersionSpecifierBuildError};
use pep508_rs::MarkerEnvironment;
use pypi_types::RequirementSource;
use uv_normalize::PackageName;
use crate::{DependencyMode, Manifest};
@ -203,10 +204,10 @@ mod tests {
use anyhow::Result;
use url::Url;
use distribution_types::RequirementSource;
use pep440_rs::{Operator, Version, VersionSpecifier, VersionSpecifiers};
use pep508_rs::VerbatimUrl;
use pypi_types::ParsedUrl;
use pypi_types::RequirementSource;
use crate::resolver::locals::{iter_locals, Locals};

View file

@ -21,14 +21,13 @@ use tracing::{debug, enabled, instrument, trace, warn, Level};
use distribution_types::{
BuiltDist, Dist, DistributionMetadata, IncompatibleDist, IncompatibleSource, IncompatibleWheel,
InstalledDist, RemoteSource, Requirement, ResolvedDist, ResolvedDistRef, SourceDist,
VersionOrUrlRef,
InstalledDist, RemoteSource, ResolvedDist, ResolvedDistRef, SourceDist, VersionOrUrlRef,
};
pub(crate) use locals::Locals;
use pep440_rs::{Version, MIN_VERSION};
use pep508_rs::MarkerEnvironment;
use platform_tags::Tags;
use pypi_types::Metadata23;
use pypi_types::{Metadata23, Requirement};
pub(crate) use urls::Urls;
use uv_configuration::{Constraints, Overrides};
use uv_distribution::{ArchiveMetadata, DistributionDatabase};
@ -1144,7 +1143,9 @@ impl<InstalledPackages: InstalledPackagesProvider> ResolverState<InstalledPackag
trace!("Received installed distribution metadata for: {dist}");
self.index.distributions().done(
dist.version_id(),
Arc::new(MetadataResponse::Found(ArchiveMetadata::from(metadata))),
Arc::new(MetadataResponse::Found(ArchiveMetadata::from_metadata23(
metadata,
))),
);
}
Some(Response::Dist {

View file

@ -1,9 +1,11 @@
use distribution_types::{RequirementSource, Verbatim};
use distribution_types::Verbatim;
use rustc_hash::FxHashMap;
use tracing::debug;
use pep508_rs::{MarkerEnvironment, VerbatimUrl};
use pypi_types::{ParsedArchiveUrl, ParsedGitUrl, ParsedPathUrl, ParsedUrl, VerbatimParsedUrl};
use pypi_types::{
ParsedArchiveUrl, ParsedGitUrl, ParsedPathUrl, ParsedUrl, RequirementSource, VerbatimParsedUrl,
};
use uv_distribution::is_same_reference;
use uv_git::GitUrl;
use uv_normalize::PackageName;

View file

@ -1,4 +1,4 @@
use distribution_types::RequirementSource;
use pypi_types::RequirementSource;
use rustc_hash::{FxHashMap, FxHashSet};
use pep440_rs::Version;

View file

@ -10,10 +10,11 @@ use anyhow::Result;
use chrono::{DateTime, Utc};
use once_cell::sync::Lazy;
use distribution_types::{CachedDist, IndexLocations, Requirement, Resolution, SourceDist};
use distribution_types::{CachedDist, IndexLocations, Resolution, SourceDist};
use pep440_rs::Version;
use pep508_rs::{MarkerEnvironment, MarkerEnvironmentBuilder};
use platform_tags::{Arch, Os, Platform, Tags};
use pypi_types::Requirement;
use uv_cache::Cache;
use uv_client::RegistryClientBuilder;
use uv_configuration::{
@ -155,7 +156,12 @@ async fn resolve(
&hashes,
&build_context,
installed_packages,
DistributionDatabase::new(&client, &build_context, concurrency.downloads),
DistributionDatabase::new(
&client,
&build_context,
concurrency.downloads,
PreviewMode::Disabled,
),
)?;
Ok(resolver.resolve().await?)
}

View file

@ -3,12 +3,9 @@ use std::str::FromStr;
use rustc_hash::FxHashMap;
use url::Url;
use distribution_types::{
DistributionMetadata, HashPolicy, PackageId, Requirement, RequirementSource,
UnresolvedRequirement,
};
use distribution_types::{DistributionMetadata, HashPolicy, PackageId, UnresolvedRequirement};
use pep508_rs::MarkerEnvironment;
use pypi_types::{HashDigest, HashError};
use pypi_types::{HashDigest, HashError, Requirement, RequirementSource};
use uv_normalize::PackageName;
#[derive(Debug, Default, Clone)]

View file

@ -1,4 +1,4 @@
use distribution_types::Requirement;
use pypi_types::Requirement;
use uv_normalize::ExtraName;
/// A set of requirements as requested by a parent requirement.

View file

@ -3,10 +3,9 @@ use std::path::{Path, PathBuf};
use anyhow::Result;
use distribution_types::{
CachedDist, IndexLocations, InstalledDist, Requirement, Resolution, SourceDist,
};
use distribution_types::{CachedDist, IndexLocations, InstalledDist, Resolution, SourceDist};
use pep508_rs::PackageName;
use pypi_types::Requirement;
use uv_cache::Cache;
use uv_configuration::{BuildKind, NoBinary, NoBuild, SetupPyStrategy};
use uv_interpreter::{Interpreter, PythonEnvironment};

View file

@ -112,7 +112,7 @@ pub(crate) async fn pip_compile(
// Read all requirements from the provided sources.
let RequirementsSpecification {
project,
mut project,
requirements,
constraints,
overrides,
@ -128,10 +128,7 @@ pub(crate) async fn pip_compile(
requirements,
constraints,
overrides,
None,
&extras,
&client_builder,
preview,
)
.await?;
@ -327,6 +324,7 @@ pub(crate) async fn pip_compile(
&no_build,
&NoBinary::None,
concurrency,
preview,
)
.with_options(OptionsBuilder::new().exclude_newer(exclude_newer).build());
@ -337,7 +335,7 @@ pub(crate) async fn pip_compile(
requirements,
&hasher,
&top_level_index,
DistributionDatabase::new(&client, &build_dispatch, concurrency.downloads),
DistributionDatabase::new(&client, &build_dispatch, concurrency.downloads, preview),
)
.with_reporter(ResolverReporter::from(printer))
.resolve()
@ -345,17 +343,52 @@ pub(crate) async fn pip_compile(
// Resolve any source trees into requirements.
if !source_trees.is_empty() {
let resolutions = SourceTreeResolver::new(
source_trees,
&extras,
&hasher,
&top_level_index,
DistributionDatabase::new(&client, &build_dispatch, concurrency.downloads, preview),
)
.with_reporter(ResolverReporter::from(printer))
.resolve()
.await?;
// If we resolved a single project, use it for the project name.
project = project.or_else(|| {
if let [resolution] = &resolutions[..] {
Some(resolution.project.clone())
} else {
None
}
});
// If any of the extras were unused, surface a warning.
if let ExtrasSpecification::Some(extras) = extras {
let mut unused_extras = extras
.iter()
.filter(|extra| {
!resolutions
.iter()
.any(|resolution| resolution.extras.contains(extra))
})
.collect::<Vec<_>>();
if !unused_extras.is_empty() {
unused_extras.sort_unstable();
unused_extras.dedup();
let s = if unused_extras.len() == 1 { "" } else { "s" };
return Err(anyhow!(
"Requested extra{s} not found: {}",
unused_extras.iter().join(", ")
));
}
}
// Extend the requirements with the resolved source trees.
requirements.extend(
SourceTreeResolver::new(
source_trees,
&extras,
&hasher,
&top_level_index,
DistributionDatabase::new(&client, &build_dispatch, concurrency.downloads),
)
.with_reporter(ResolverReporter::from(printer))
.resolve()
.await?,
resolutions
.into_iter()
.flat_map(|resolution| resolution.requirements),
);
}
@ -367,7 +400,7 @@ pub(crate) async fn pip_compile(
overrides,
&hasher,
&top_level_index,
DistributionDatabase::new(&client, &build_dispatch, concurrency.downloads),
DistributionDatabase::new(&client, &build_dispatch, concurrency.downloads, preview),
)
.with_reporter(ResolverReporter::from(printer))
.resolve()
@ -425,7 +458,7 @@ pub(crate) async fn pip_compile(
&overrides,
&hasher,
&top_level_index,
DistributionDatabase::new(&client, &build_dispatch, concurrency.downloads),
DistributionDatabase::new(&client, &build_dispatch, concurrency.downloads, preview),
)
.with_reporter(ResolverReporter::from(printer))
.resolve(marker_filter)
@ -466,7 +499,7 @@ pub(crate) async fn pip_compile(
&hasher,
&build_dispatch,
EmptyInstalledPackages,
DistributionDatabase::new(&client, &build_dispatch, concurrency.downloads),
DistributionDatabase::new(&client, &build_dispatch, concurrency.downloads, preview),
)?
.with_reporter(ResolverReporter::from(printer));

View file

@ -100,10 +100,8 @@ pub(crate) async fn pip_install(
requirements,
constraints,
overrides,
None,
extras,
&client_builder,
preview,
)
.await?;
@ -318,6 +316,7 @@ pub(crate) async fn pip_install(
&no_build,
&no_binary,
concurrency,
preview,
)
.with_options(OptionsBuilder::new().exclude_newer(exclude_newer).build());
@ -358,6 +357,7 @@ pub(crate) async fn pip_install(
concurrency,
options,
printer,
preview,
)
.await
{
@ -395,6 +395,7 @@ pub(crate) async fn pip_install(
&no_build,
&no_binary,
concurrency,
preview,
)
.with_options(OptionsBuilder::new().exclude_newer(exclude_newer).build())
};
@ -419,6 +420,7 @@ pub(crate) async fn pip_install(
&venv,
dry_run,
printer,
preview,
)
.await?;

View file

@ -9,8 +9,7 @@ use owo_colors::OwoColorize;
use tracing::debug;
use distribution_types::{
CachedDist, Diagnostic, InstalledDist, Requirement, ResolutionDiagnostic,
UnresolvedRequirementSpecification,
CachedDist, Diagnostic, InstalledDist, ResolutionDiagnostic, UnresolvedRequirementSpecification,
};
use distribution_types::{
DistributionMetadata, IndexLocations, InstalledMetadata, LocalDist, Name, Resolution,
@ -18,6 +17,7 @@ use distribution_types::{
use install_wheel_rs::linker::LinkMode;
use pep508_rs::MarkerEnvironment;
use platform_tags::Tags;
use pypi_types::Requirement;
use uv_cache::Cache;
use uv_client::{BaseClientBuilder, RegistryClient};
use uv_configuration::{
@ -32,7 +32,7 @@ use uv_interpreter::{Interpreter, PythonEnvironment};
use uv_normalize::PackageName;
use uv_requirements::{
LookaheadResolver, NamedRequirementsResolver, RequirementsSource, RequirementsSpecification,
SourceTreeResolver, Workspace,
SourceTreeResolver,
};
use uv_resolver::{
DependencyMode, Exclusions, FlatIndex, InMemoryIndex, Manifest, Options, Preference,
@ -42,8 +42,7 @@ use uv_types::{HashStrategy, InFlight, InstalledPackagesProvider};
use uv_warnings::warn_user;
use crate::commands::reporters::{DownloadReporter, InstallReporter, ResolverReporter};
use crate::commands::DryRunEvent;
use crate::commands::{compile_bytecode, elapsed, ChangeEvent, ChangeEventKind};
use crate::commands::{compile_bytecode, elapsed, ChangeEvent, ChangeEventKind, DryRunEvent};
use crate::printer::Printer;
/// Consolidate the requirements for an installation.
@ -51,10 +50,8 @@ pub(crate) async fn read_requirements(
requirements: &[RequirementsSource],
constraints: &[RequirementsSource],
overrides: &[RequirementsSource],
workspace: Option<&Workspace>,
extras: &ExtrasSpecification,
client_builder: &BaseClientBuilder<'_>,
preview: PreviewMode,
) -> Result<RequirementsSpecification, Error> {
// If the user requests `extras` but does not provide a valid source (e.g., a `pyproject.toml`),
// return an error.
@ -66,39 +63,13 @@ pub(crate) async fn read_requirements(
}
// Read all requirements from the provided sources.
let spec = RequirementsSpecification::from_sources(
Ok(RequirementsSpecification::from_sources(
requirements,
constraints,
overrides,
workspace,
extras,
client_builder,
preview,
)
.await?;
// If all the metadata could be statically resolved, validate that every extra was used. If we
// need to resolve metadata via PEP 517, we don't know which extras are used until much later.
if spec.source_trees.is_empty() {
if let ExtrasSpecification::Some(extras) = extras {
let mut unused_extras = extras
.iter()
.filter(|extra| !spec.extras.contains(extra))
.collect::<Vec<_>>();
if !unused_extras.is_empty() {
unused_extras.sort_unstable();
unused_extras.dedup();
let s = if unused_extras.len() == 1 { "" } else { "s" };
return Err(anyhow!(
"Requested extra{s} not found: {}",
unused_extras.iter().join(", ")
)
.into());
}
}
}
Ok(spec)
.await?)
}
/// Resolve a set of requirements, similar to running `pip compile`.
@ -108,7 +79,7 @@ pub(crate) async fn resolve<InstalledPackages: InstalledPackagesProvider>(
constraints: Vec<Requirement>,
overrides: Vec<UnresolvedRequirementSpecification>,
source_trees: Vec<PathBuf>,
project: Option<PackageName>,
mut project: Option<PackageName>,
extras: &ExtrasSpecification,
preferences: Vec<Preference>,
installed_packages: InstalledPackages,
@ -125,6 +96,7 @@ pub(crate) async fn resolve<InstalledPackages: InstalledPackagesProvider>(
concurrency: Concurrency,
options: Options,
printer: Printer,
preview: PreviewMode,
) -> Result<ResolutionGraph, Error> {
let start = std::time::Instant::now();
@ -135,7 +107,7 @@ pub(crate) async fn resolve<InstalledPackages: InstalledPackagesProvider>(
requirements,
hasher,
index,
DistributionDatabase::new(client, build_dispatch, concurrency.downloads),
DistributionDatabase::new(client, build_dispatch, concurrency.downloads, preview),
)
.with_reporter(ResolverReporter::from(printer))
.resolve()
@ -143,17 +115,53 @@ pub(crate) async fn resolve<InstalledPackages: InstalledPackagesProvider>(
// Resolve any source trees into requirements.
if !source_trees.is_empty() {
let resolutions = SourceTreeResolver::new(
source_trees,
extras,
hasher,
index,
DistributionDatabase::new(client, build_dispatch, concurrency.downloads, preview),
)
.with_reporter(ResolverReporter::from(printer))
.resolve()
.await?;
// If we resolved a single project, use it for the project name.
project = project.or_else(|| {
if let [resolution] = &resolutions[..] {
Some(resolution.project.clone())
} else {
None
}
});
// If any of the extras were unused, surface a warning.
if let ExtrasSpecification::Some(extras) = extras {
let mut unused_extras = extras
.iter()
.filter(|extra| {
!resolutions
.iter()
.any(|resolution| resolution.extras.contains(extra))
})
.collect::<Vec<_>>();
if !unused_extras.is_empty() {
unused_extras.sort_unstable();
unused_extras.dedup();
let s = if unused_extras.len() == 1 { "" } else { "s" };
return Err(anyhow!(
"Requested extra{s} not found: {}",
unused_extras.iter().join(", ")
)
.into());
}
}
// Extend the requirements with the resolved source trees.
requirements.extend(
SourceTreeResolver::new(
source_trees,
extras,
hasher,
index,
DistributionDatabase::new(client, build_dispatch, concurrency.downloads),
)
.with_reporter(ResolverReporter::from(printer))
.resolve()
.await?,
resolutions
.into_iter()
.flat_map(|resolution| resolution.requirements),
);
}
@ -165,7 +173,7 @@ pub(crate) async fn resolve<InstalledPackages: InstalledPackagesProvider>(
overrides,
hasher,
index,
DistributionDatabase::new(client, build_dispatch, concurrency.downloads),
DistributionDatabase::new(client, build_dispatch, concurrency.downloads, preview),
)
.with_reporter(ResolverReporter::from(printer))
.resolve()
@ -185,7 +193,7 @@ pub(crate) async fn resolve<InstalledPackages: InstalledPackagesProvider>(
&overrides,
hasher,
index,
DistributionDatabase::new(client, build_dispatch, concurrency.downloads),
DistributionDatabase::new(client, build_dispatch, concurrency.downloads, preview),
)
.with_reporter(ResolverReporter::from(printer))
.resolve(Some(markers))
@ -229,7 +237,7 @@ pub(crate) async fn resolve<InstalledPackages: InstalledPackagesProvider>(
hasher,
build_dispatch,
installed_packages,
DistributionDatabase::new(client, build_dispatch, concurrency.downloads),
DistributionDatabase::new(client, build_dispatch, concurrency.downloads, preview),
)?
.with_reporter(reporter);
@ -287,6 +295,7 @@ pub(crate) async fn install(
venv: &PythonEnvironment,
dry_run: bool,
printer: Printer,
preview: PreviewMode,
) -> Result<(), Error> {
let start = std::time::Instant::now();
@ -362,7 +371,7 @@ pub(crate) async fn install(
cache,
tags,
hasher,
DistributionDatabase::new(client, build_dispatch, concurrency.downloads),
DistributionDatabase::new(client, build_dispatch, concurrency.downloads, preview),
)
.with_reporter(DownloadReporter::from(printer).with_length(remote.len() as u64));

View file

@ -97,10 +97,8 @@ pub(crate) async fn pip_sync(
requirements,
constraints,
overrides,
None,
&extras,
&client_builder,
preview,
)
.await?;
@ -269,6 +267,7 @@ pub(crate) async fn pip_sync(
&no_build,
&no_binary,
concurrency,
preview,
)
.with_options(OptionsBuilder::new().exclude_newer(exclude_newer).build());
@ -308,6 +307,7 @@ pub(crate) async fn pip_sync(
concurrency,
options,
printer,
preview,
)
.await
{
@ -344,6 +344,7 @@ pub(crate) async fn pip_sync(
&no_build,
&no_binary,
concurrency,
preview,
)
.with_options(OptionsBuilder::new().exclude_newer(exclude_newer).build())
};
@ -368,6 +369,7 @@ pub(crate) async fn pip_sync(
&venv,
dry_run,
printer,
preview,
)
.await?;

View file

@ -5,8 +5,9 @@ use itertools::{Either, Itertools};
use owo_colors::OwoColorize;
use tracing::debug;
use distribution_types::{InstalledMetadata, Name, Requirement, UnresolvedRequirement};
use distribution_types::{InstalledMetadata, Name, UnresolvedRequirement};
use pep508_rs::UnnamedRequirement;
use pypi_types::Requirement;
use pypi_types::VerbatimParsedUrl;
use uv_cache::Cache;
use uv_client::{BaseClientBuilder, Connectivity};
@ -40,8 +41,7 @@ pub(crate) async fn pip_uninstall(
.keyring(keyring_provider);
// Read all requirements from the provided sources.
let spec =
RequirementsSpecification::from_simple_sources(sources, &client_builder, preview).await?;
let spec = RequirementsSpecification::from_simple_sources(sources, &client_builder).await?;
// Detect the current Python interpreter.
let system = if system {

View file

@ -10,9 +10,9 @@ use uv_configuration::{
SetupPyStrategy, Upgrade,
};
use uv_dispatch::BuildDispatch;
use uv_distribution::ProjectWorkspace;
use uv_interpreter::PythonEnvironment;
use uv_requirements::upgrade::read_lockfile;
use uv_requirements::ProjectWorkspace;
use uv_resolver::{ExcludeNewer, FlatIndex, InMemoryIndex, Lock, OptionsBuilder};
use uv_types::{BuildIsolation, EmptyInstalledPackages, HashStrategy, InFlight};
use uv_warnings::warn_user;
@ -41,7 +41,17 @@ pub(crate) async fn lock(
let venv = project::init_environment(&project, preview, cache, printer)?;
// Perform the lock operation.
match do_lock(&project, &venv, upgrade, exclude_newer, cache, printer).await {
match do_lock(
&project,
&venv,
upgrade,
exclude_newer,
preview,
cache,
printer,
)
.await
{
Ok(_) => Ok(ExitStatus::Success),
Err(ProjectError::Operation(pip::operations::Error::Resolve(
uv_resolver::ResolveError::NoSolution(err),
@ -61,6 +71,7 @@ pub(super) async fn do_lock(
venv: &PythonEnvironment,
upgrade: Upgrade,
exclude_newer: Option<ExcludeNewer>,
preview: PreviewMode,
cache: &Cache,
printer: Printer,
) -> Result<Lock, ProjectError> {
@ -124,6 +135,7 @@ pub(super) async fn do_lock(
&no_build,
&no_binary,
concurrency,
preview,
);
// Resolve the requirements.
@ -149,6 +161,7 @@ pub(super) async fn do_lock(
concurrency,
options,
printer,
preview,
)
.await?;

View file

@ -14,10 +14,11 @@ use uv_configuration::{
SetupPyStrategy, Upgrade,
};
use uv_dispatch::BuildDispatch;
use uv_distribution::ProjectWorkspace;
use uv_fs::Simplified;
use uv_installer::{SatisfiesResult, SitePackages};
use uv_interpreter::{find_default_interpreter, PythonEnvironment};
use uv_requirements::{ProjectWorkspace, RequirementsSource, RequirementsSpecification, Workspace};
use uv_requirements::{RequirementsSource, RequirementsSpecification};
use uv_resolver::{FlatIndex, InMemoryIndex, Options};
use uv_types::{BuildIsolation, HashStrategy, InFlight};
@ -107,11 +108,10 @@ pub(crate) fn init_environment(
pub(crate) async fn update_environment(
venv: PythonEnvironment,
requirements: &[RequirementsSource],
workspace: Option<&Workspace>,
preview: PreviewMode,
connectivity: Connectivity,
cache: &Cache,
printer: Printer,
preview: PreviewMode,
) -> Result<PythonEnvironment> {
// TODO(zanieb): Support client configuration
let client_builder = BaseClientBuilder::default().connectivity(connectivity);
@ -119,16 +119,8 @@ pub(crate) async fn update_environment(
// Read all requirements from the provided sources.
// TODO(zanieb): Consider allowing constraints and extras
// TODO(zanieb): Allow specifying extras somehow
let spec = RequirementsSpecification::from_sources(
requirements,
&[],
&[],
workspace,
&ExtrasSpecification::None,
&client_builder,
preview,
)
.await?;
let spec =
RequirementsSpecification::from_sources(requirements, &[], &[], &client_builder).await?;
// Check if the current environment satisfies the requirements
let site_packages = SitePackages::from_executable(&venv)?;
@ -204,6 +196,7 @@ pub(crate) async fn update_environment(
&no_build,
&no_binary,
concurrency,
preview,
);
// Resolve the requirements.
@ -229,6 +222,7 @@ pub(crate) async fn update_environment(
concurrency,
options,
printer,
preview,
)
.await
{
@ -259,6 +253,7 @@ pub(crate) async fn update_environment(
&no_build,
&no_binary,
concurrency,
preview,
)
};
@ -282,6 +277,7 @@ pub(crate) async fn update_environment(
&venv,
dry_run,
printer,
preview,
)
.await?;

View file

@ -10,8 +10,9 @@ use tracing::debug;
use uv_cache::Cache;
use uv_client::Connectivity;
use uv_configuration::{ExtrasSpecification, PreviewMode, Upgrade};
use uv_distribution::ProjectWorkspace;
use uv_interpreter::{PythonEnvironment, SystemPython};
use uv_requirements::{ProjectWorkspace, RequirementsSource};
use uv_requirements::RequirementsSource;
use uv_resolver::ExcludeNewer;
use uv_warnings::warn_user;
@ -48,9 +49,17 @@ pub(crate) async fn run(
let venv = project::init_environment(&project, preview, cache, printer)?;
// Lock and sync the environment.
let lock =
project::lock::do_lock(&project, &venv, upgrade, exclude_newer, cache, printer).await?;
project::sync::do_sync(&project, &venv, &lock, extras, cache, printer).await?;
let lock = project::lock::do_lock(
&project,
&venv,
upgrade,
exclude_newer,
preview,
cache,
printer,
)
.await?;
project::sync::do_sync(&project, &venv, &lock, extras, preview, cache, printer).await?;
Some(venv)
};
@ -92,16 +101,8 @@ pub(crate) async fn run(
// Install the ephemeral requirements.
Some(
project::update_environment(
venv,
&requirements,
None,
preview,
connectivity,
cache,
printer,
)
.await?,
project::update_environment(venv, &requirements, connectivity, cache, printer, preview)
.await?,
)
};

View file

@ -9,9 +9,9 @@ use uv_configuration::{
SetupPyStrategy,
};
use uv_dispatch::BuildDispatch;
use uv_distribution::ProjectWorkspace;
use uv_installer::SitePackages;
use uv_interpreter::PythonEnvironment;
use uv_requirements::ProjectWorkspace;
use uv_resolver::{FlatIndex, InMemoryIndex, Lock};
use uv_types::{BuildIsolation, HashStrategy, InFlight};
use uv_warnings::warn_user;
@ -47,7 +47,7 @@ pub(crate) async fn sync(
};
// Perform the sync operation.
do_sync(&project, &venv, &lock, extras, cache, printer).await?;
do_sync(&project, &venv, &lock, extras, preview, cache, printer).await?;
Ok(ExitStatus::Success)
}
@ -58,6 +58,7 @@ pub(super) async fn do_sync(
venv: &PythonEnvironment,
lock: &Lock,
extras: ExtrasSpecification,
preview: PreviewMode,
cache: &Cache,
printer: Printer,
) -> Result<(), ProjectError> {
@ -112,6 +113,7 @@ pub(super) async fn do_sync(
&no_build,
&no_binary,
concurrency,
preview,
);
let site_packages = SitePackages::from_executable(venv)?;
@ -136,6 +138,7 @@ pub(super) async fn do_sync(
venv,
dry_run,
printer,
preview,
)
.await?;

View file

@ -71,18 +71,8 @@ pub(crate) async fn run(
)?;
// Install the ephemeral requirements.
let ephemeral_env = Some(
update_environment(
venv,
&requirements,
None,
preview,
connectivity,
cache,
printer,
)
.await?,
);
let ephemeral_env =
Some(update_environment(venv, &requirements, connectivity, cache, printer, preview).await?);
// TODO(zanieb): Determine the command via the package entry points
let command = target;

View file

@ -9,8 +9,9 @@ use miette::{Diagnostic, IntoDiagnostic};
use owo_colors::OwoColorize;
use thiserror::Error;
use distribution_types::{IndexLocations, Requirement};
use distribution_types::IndexLocations;
use install_wheel_rs::linker::LinkMode;
use pypi_types::Requirement;
use uv_auth::store_credentials_from_url;
use uv_cache::Cache;
use uv_client::{Connectivity, FlatIndexClient, RegistryClientBuilder};
@ -221,6 +222,7 @@ async fn venv_impl(
&NoBuild::All,
&NoBinary::None,
concurrency,
preview,
)
.with_options(OptionsBuilder::new().exclude_newer(exclude_newer).build());

View file

@ -116,8 +116,7 @@ fn missing_requirements_in() {
----- stdout -----
----- stderr -----
error: failed to read from file `requirements.in`
Caused by: No such file or directory (os error 2)
error: File not found: `requirements.in`
"###
);
@ -177,6 +176,7 @@ requires = ["setuptools", "wheel"]
[project]
name = "project"
version = "0.1.0"
dependencies = [
"anyio==3.7.0",
]
@ -216,6 +216,7 @@ requires = ["setuptools", "wheel"]
[project]
name = "project"
version = "0.1.0"
dependencies = [
"anyio==3.7.0",
]
@ -405,6 +406,7 @@ requires = ["setuptools", "wheel"]
[project]
name = "project"
version = "0.1.0"
dependencies = []
optional-dependencies.foo = [
"anyio==3.7.0",
@ -447,6 +449,7 @@ requires = ["setuptools", "wheel"]
[project]
name = "project"
version = "0.1.0"
dependencies = []
optional-dependencies."FrIeNdLy-._.-bArD" = [
"anyio==3.7.0",
@ -489,6 +492,7 @@ requires = ["setuptools", "wheel"]
[project]
name = "project"
version = "0.1.0"
dependencies = []
optional-dependencies.foo = [
"anyio==3.7.0",
@ -789,6 +793,7 @@ requires = ["setuptools", "wheel"]
[project]
name = "project"
version = "0.1.0"
dependencies = []
optional-dependencies.foo = [
"anyio==3.7.0",
@ -2165,6 +2170,7 @@ requires = ["setuptools", "wheel"]
[project]
name = "project"
version = "0.1.0"
dependencies = ["anyio==3.7.0"]
optional-dependencies.foo = [
"iniconfig==1.1.1",
@ -2220,6 +2226,7 @@ requires = ["setuptools", "wheel"]
[project]
name = "project"
version = "0.1.0"
dependencies = ["anyio==3.7.0"]
optional-dependencies.foo = [
"iniconfig==1.1.1",
@ -2309,6 +2316,7 @@ requires = ["setuptools", "wheel"]
[project]
name = "my-project"
version = "0.1.0"
dependencies = ["anyio==3.7.0", "anyio==4.0.0"]
"#,
)?;
@ -2340,6 +2348,7 @@ requires = ["setuptools", "wheel"]
[project]
name = "my-project"
version = "0.1.0"
dependencies = ["anyio==300.1.4"]
"#,
)?;
@ -6228,27 +6237,27 @@ fn pre_release_constraint() -> Result<()> {
Ok(())
}
/// Resolve from a `pyproject.toml` file with a recursive extra.
/// Resolve from a `pyproject.toml` file with a mutually recursive extra.
#[test]
fn compile_pyproject_toml_recursive_extra() -> Result<()> {
fn compile_pyproject_toml_mutually_recursive_extra() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"
[project]
name = "my-project"
name = "project"
version = "0.0.1"
dependencies = [
"tomli>=2,<3",
"anyio"
]
[project.optional-dependencies]
test = [
"pep517>=0.13,<0.14",
"my-project[dev]"
"iniconfig",
"project[dev]"
]
dev = [
"my-project[test]",
"project[test]",
]
"#,
)?;
@ -6262,13 +6271,67 @@ dev = [
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z pyproject.toml --extra dev
pep517==0.13.1
# via my-project (pyproject.toml)
tomli==2.0.1
# via my-project (pyproject.toml)
anyio==4.3.0
# via project (pyproject.toml)
idna==3.6
# via anyio
iniconfig==2.0.0
# via project (pyproject.toml)
sniffio==1.3.1
# via anyio
----- stderr -----
Resolved 2 packages in [TIME]
Resolved 4 packages in [TIME]
"###
);
Ok(())
}
/// Resolve from a `pyproject.toml` file with a recursive extra.
#[test]
fn compile_pyproject_toml_recursive_extra() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"
[project]
name = "project"
version = "0.0.1"
dependencies = [
"anyio"
]
[project.optional-dependencies]
test = [
"iniconfig",
]
dev = [
"project[test]",
]
"#,
)?;
uv_snapshot!(context.compile()
.arg("pyproject.toml")
.arg("--extra")
.arg("dev"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z pyproject.toml --extra dev
anyio==4.3.0
# via project (pyproject.toml)
idna==3.6
# via anyio
iniconfig==2.0.0
# via project (pyproject.toml)
sniffio==1.3.1
# via anyio
----- stderr -----
Resolved 4 packages in [TIME]
"###
);

View file

@ -81,8 +81,7 @@ fn missing_requirements_txt() {
----- stdout -----
----- stderr -----
error: failed to read from file `requirements.txt`
Caused by: No such file or directory (os error 2)
error: File not found: `requirements.txt`
"###
);
@ -124,8 +123,7 @@ fn missing_pyproject_toml() {
----- stdout -----
----- stderr -----
error: failed to read from file `pyproject.toml`
Caused by: No such file or directory (os error 2)
error: File not found: `pyproject.toml`
"###
);
}
@ -184,41 +182,6 @@ fn invalid_pyproject_toml_schema() -> Result<()> {
Ok(())
}
/// For user controlled pyproject.toml files, we enforce PEP 621.
#[test]
fn invalid_pyproject_toml_requirement_direct() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"[project]
name = "project"
dependencies = ["flask==1.0.x"]
"#,
)?;
let filters = [("exit status", "exit code")]
.into_iter()
.chain(context.filters())
.collect::<Vec<_>>();
uv_snapshot!(filters, context.install()
.arg("-r")
.arg("pyproject.toml"), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: Failed to parse: `pyproject.toml`
Caused by: after parsing '1.0', found '.x', which is not part of a valid version
flask==1.0.x
^^^^^^^
"###
);
Ok(())
}
/// For indirect, non-user controlled pyproject.toml, we don't enforce correctness.
///
/// If we fail to extract the PEP 621 metadata, we fall back to treating it as a source
@ -4980,7 +4943,8 @@ fn tool_uv_sources() -> Result<()> {
----- stdout -----
----- stderr -----
Audited 6 packages in [TIME]
Resolved 9 packages in [TIME]
Audited 9 packages in [TIME]
"###
);
Ok(())
@ -5013,9 +4977,7 @@ fn tool_uv_sources_is_in_preview() -> Result<()> {
----- stdout -----
----- stderr -----
error: Failed to parse: `pyproject.toml`
Caused by: Failed to parse entry for: `tqdm`
Caused by: `tool.uv.sources` is a preview feature; use `--preview` or set `UV_PREVIEW=1` to enable it
error: `tool.uv.sources` is a preview feature; use `--preview` or set `UV_PREVIEW=1` to enable it
"###
);

View file

@ -95,8 +95,7 @@ fn missing_requirements_txt() {
----- stdout -----
----- stderr -----
error: failed to read from file `requirements.txt`
Caused by: No such file or directory (os error 2)
error: File not found: `requirements.txt`
"###);
requirements_txt.assert(predicates::path::missing());

View file

@ -116,8 +116,7 @@ fn missing_requirements_txt() -> Result<()> {
----- stdout -----
----- stderr -----
error: failed to read from file `requirements.txt`
Caused by: No such file or directory (os error 2)
error: File not found: `requirements.txt`
"###
);

View file

@ -218,7 +218,7 @@ fn test_albatross_root_workspace() {
----- stdout -----
----- stderr -----
Audited 3 packages in [TIME]
Audited 1 package in [TIME]
"###
);
@ -257,7 +257,7 @@ fn test_albatross_root_workspace_bird_feeder() {
----- stdout -----
----- stderr -----
Audited 2 packages in [TIME]
Audited 1 package in [TIME]
"###
);
@ -296,7 +296,7 @@ fn test_albatross_root_workspace_albatross() {
----- stdout -----
----- stderr -----
Audited 2 packages in [TIME]
Audited 1 package in [TIME]
"###
);
@ -335,7 +335,7 @@ fn test_albatross_virtual_workspace() {
----- stdout -----
----- stderr -----
Audited 2 packages in [TIME]
Audited 1 package in [TIME]
"###
);