mirror of
https://github.com/astral-sh/uv.git
synced 2025-10-27 18:36:44 +00:00
Support dev dependencies in virtual workspace roots (#5709)
## Summary Closes https://github.com/astral-sh/uv/issues/5650.
This commit is contained in:
parent
499c368f1e
commit
69b8b16c75
10 changed files with 222 additions and 62 deletions
|
|
@ -2,7 +2,7 @@ pub use distribution_database::{DistributionDatabase, HttpArchivePointer, LocalA
|
|||
pub use download::LocalWheel;
|
||||
pub use error::Error;
|
||||
pub use index::{BuiltWheelIndex, RegistryWheelIndex};
|
||||
pub use metadata::{ArchiveMetadata, Metadata, RequiresDist, DEV_DEPENDENCIES};
|
||||
pub use metadata::{ArchiveMetadata, Metadata, RequiresDist};
|
||||
pub use reporter::Reporter;
|
||||
|
||||
mod archive;
|
||||
|
|
|
|||
|
|
@ -4,7 +4,7 @@ use std::path::Path;
|
|||
use thiserror::Error;
|
||||
|
||||
use crate::metadata::lowering::LoweringError;
|
||||
pub use crate::metadata::requires_dist::{RequiresDist, DEV_DEPENDENCIES};
|
||||
pub use crate::metadata::requires_dist::RequiresDist;
|
||||
use pep440_rs::{Version, VersionSpecifiers};
|
||||
use pypi_types::{HashDigest, Metadata23};
|
||||
use uv_configuration::PreviewMode;
|
||||
|
|
|
|||
|
|
@ -1,22 +1,14 @@
|
|||
use std::collections::BTreeMap;
|
||||
use std::path::Path;
|
||||
use std::sync::LazyLock;
|
||||
|
||||
use uv_configuration::PreviewMode;
|
||||
use uv_normalize::{ExtraName, GroupName, PackageName};
|
||||
use uv_normalize::{ExtraName, GroupName, PackageName, DEV_DEPENDENCIES};
|
||||
use uv_workspace::{DiscoveryOptions, ProjectWorkspace};
|
||||
|
||||
use crate::metadata::lowering::lower_requirement;
|
||||
use crate::metadata::MetadataError;
|
||||
use crate::Metadata;
|
||||
|
||||
/// The name of the global `dev-dependencies` group.
|
||||
///
|
||||
/// Internally, we model dependency groups as a generic concept; but externally, we only expose the
|
||||
/// `dev-dependencies` group.
|
||||
pub static DEV_DEPENDENCIES: LazyLock<GroupName> =
|
||||
LazyLock::new(|| GroupName::new("dev".to_string()).unwrap());
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct RequiresDist {
|
||||
pub name: PackageName,
|
||||
|
|
|
|||
|
|
@ -1,6 +1,7 @@
|
|||
use std::fmt;
|
||||
use std::fmt::{Display, Formatter};
|
||||
use std::str::FromStr;
|
||||
use std::sync::LazyLock;
|
||||
|
||||
use serde::{Deserialize, Deserializer, Serialize};
|
||||
|
||||
|
|
@ -51,3 +52,10 @@ impl AsRef<str> for GroupName {
|
|||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
/// The name of the global `dev-dependencies` group.
|
||||
///
|
||||
/// Internally, we model dependency groups as a generic concept; but externally, we only expose the
|
||||
/// `dev-dependencies` group.
|
||||
pub static DEV_DEPENDENCIES: LazyLock<GroupName> =
|
||||
LazyLock::new(|| GroupName::new("dev".to_string()).unwrap());
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@ use std::error::Error;
|
|||
use std::fmt::{Display, Formatter};
|
||||
|
||||
pub use extra_name::ExtraName;
|
||||
pub use group_name::GroupName;
|
||||
pub use group_name::{GroupName, DEV_DEPENDENCIES};
|
||||
pub use package_name::PackageName;
|
||||
|
||||
mod extra_name;
|
||||
|
|
|
|||
|
|
@ -420,6 +420,18 @@ impl Lock {
|
|||
}
|
||||
}
|
||||
|
||||
// Add any dependency groups that are exclusive to the workspace root (e.g., dev
|
||||
// dependencies in virtual workspaces).
|
||||
for group in dev {
|
||||
for dependency in project.group(group) {
|
||||
let root = self
|
||||
.find_by_name(dependency)
|
||||
.expect("found too many distributions matching root")
|
||||
.expect("could not find root");
|
||||
queue.push_back((root, None));
|
||||
}
|
||||
}
|
||||
|
||||
let mut map = BTreeMap::default();
|
||||
let mut hashes = BTreeMap::default();
|
||||
while let Some((dist, extra)) = queue.pop_front() {
|
||||
|
|
|
|||
|
|
@ -11,7 +11,7 @@ use tracing::{debug, trace, warn};
|
|||
use pep508_rs::{RequirementOrigin, VerbatimUrl};
|
||||
use pypi_types::{Requirement, RequirementSource};
|
||||
use uv_fs::{absolutize_path, normalize_path, relative_to, Simplified};
|
||||
use uv_normalize::PackageName;
|
||||
use uv_normalize::{GroupName, PackageName, DEV_DEPENDENCIES};
|
||||
use uv_warnings::warn_user;
|
||||
|
||||
use crate::pyproject::{Project, PyProjectToml, Source, ToolUvWorkspace};
|
||||
|
|
@ -239,53 +239,94 @@ impl Workspace {
|
|||
}
|
||||
}
|
||||
|
||||
/// Returns the set of requirements that include all packages in the workspace.
|
||||
pub fn members_as_requirements(&self) -> Vec<Requirement> {
|
||||
self.packages
|
||||
pub fn is_virtual(&self) -> bool {
|
||||
!self
|
||||
.packages
|
||||
.values()
|
||||
.filter_map(|member| {
|
||||
let project = member.pyproject_toml.project.as_ref()?;
|
||||
// Extract the extras available in the project.
|
||||
let extras = project
|
||||
.optional_dependencies
|
||||
.as_ref()
|
||||
.map(|optional_dependencies| {
|
||||
// It's a `BTreeMap` so the keys are sorted.
|
||||
optional_dependencies
|
||||
.iter()
|
||||
.filter_map(|(name, dependencies)| {
|
||||
if dependencies.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(name)
|
||||
}
|
||||
})
|
||||
.cloned()
|
||||
.collect::<Vec<_>>()
|
||||
})
|
||||
.unwrap_or_default();
|
||||
.any(|member| *member.root() == self.install_path)
|
||||
}
|
||||
|
||||
let url = VerbatimUrl::from_path(&member.root)
|
||||
.expect("path is valid URL")
|
||||
.with_given(member.root.to_string_lossy());
|
||||
Some(Requirement {
|
||||
name: project.name.clone(),
|
||||
extras,
|
||||
marker: None,
|
||||
source: RequirementSource::Directory {
|
||||
install_path: member.root.clone(),
|
||||
lock_path: member
|
||||
.root
|
||||
.strip_prefix(&self.install_path)
|
||||
.expect("Project must be below workspace root")
|
||||
.to_path_buf(),
|
||||
editable: true,
|
||||
url,
|
||||
},
|
||||
origin: None,
|
||||
/// Returns the set of requirements that include all packages in the workspace.
|
||||
pub fn members_requirements(&self) -> impl Iterator<Item = Requirement> + '_ {
|
||||
self.packages.values().filter_map(|member| {
|
||||
let project = member.pyproject_toml.project.as_ref()?;
|
||||
// Extract the extras available in the project.
|
||||
let extras = project
|
||||
.optional_dependencies
|
||||
.as_ref()
|
||||
.map(|optional_dependencies| {
|
||||
// It's a `BTreeMap` so the keys are sorted.
|
||||
optional_dependencies
|
||||
.iter()
|
||||
.filter_map(|(name, dependencies)| {
|
||||
if dependencies.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(name)
|
||||
}
|
||||
})
|
||||
.cloned()
|
||||
.collect::<Vec<_>>()
|
||||
})
|
||||
.unwrap_or_default();
|
||||
|
||||
let url = VerbatimUrl::from_path(&member.root)
|
||||
.expect("path is valid URL")
|
||||
.with_given(member.root.to_string_lossy());
|
||||
Some(Requirement {
|
||||
name: project.name.clone(),
|
||||
extras,
|
||||
marker: None,
|
||||
source: RequirementSource::Directory {
|
||||
install_path: member.root.clone(),
|
||||
lock_path: member
|
||||
.root
|
||||
.strip_prefix(&self.install_path)
|
||||
.expect("Project must be below workspace root")
|
||||
.to_path_buf(),
|
||||
editable: true,
|
||||
url,
|
||||
},
|
||||
origin: None,
|
||||
})
|
||||
.collect()
|
||||
})
|
||||
}
|
||||
|
||||
/// Returns any requirements that are exclusive to the workspace root, i.e., not included in
|
||||
/// any of the workspace members.
|
||||
///
|
||||
/// For virtual workspaces, returns the dev dependencies in the workspace root, which are
|
||||
/// the only dependencies that are not part of the workspace members.
|
||||
///
|
||||
/// For non-virtual workspaces, returns an empty list.
|
||||
pub fn root_requirements(&self) -> impl Iterator<Item = Requirement> + '_ {
|
||||
if self
|
||||
.packages
|
||||
.values()
|
||||
.any(|member| *member.root() == self.install_path)
|
||||
{
|
||||
// If the workspace is non-virtual, the root is a member, so we don't need to include
|
||||
// any root-only requirements.
|
||||
Either::Left(std::iter::empty())
|
||||
} else {
|
||||
// Otherwise, return the dev dependencies in the workspace root.
|
||||
Either::Right(
|
||||
self.pyproject_toml
|
||||
.tool
|
||||
.as_ref()
|
||||
.and_then(|tool| tool.uv.as_ref())
|
||||
.and_then(|uv| uv.dev_dependencies.as_ref())
|
||||
.into_iter()
|
||||
.flatten()
|
||||
.map(|requirement| {
|
||||
Requirement::from(
|
||||
requirement
|
||||
.clone()
|
||||
.with_origin(RequirementOrigin::Workspace),
|
||||
)
|
||||
}),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the set of overrides for the workspace.
|
||||
|
|
@ -1255,6 +1296,39 @@ impl VirtualProject {
|
|||
}
|
||||
}
|
||||
|
||||
/// Return the [`VirtualProject`] dependencies for the given group name.
|
||||
///
|
||||
/// Returns dependencies that apply to the workspace root, but not any of its members. As such,
|
||||
/// only returns a non-empty iterator for virtual workspaces, which can include dev dependencies
|
||||
/// on the virtual root.
|
||||
pub fn group(&self, name: &GroupName) -> impl Iterator<Item = &PackageName> {
|
||||
match self {
|
||||
VirtualProject::Project(_) => {
|
||||
// For non-virtual projects, dev dependencies are attached to the members.
|
||||
Either::Left(std::iter::empty())
|
||||
}
|
||||
VirtualProject::Virtual(workspace) => {
|
||||
// For virtual projects, we might have dev dependencies that are attached to the
|
||||
// workspace root (which isn't a member).
|
||||
if name == &*DEV_DEPENDENCIES {
|
||||
Either::Right(
|
||||
workspace
|
||||
.pyproject_toml
|
||||
.tool
|
||||
.as_ref()
|
||||
.and_then(|tool| tool.uv.as_ref())
|
||||
.and_then(|uv| uv.dev_dependencies.as_ref())
|
||||
.map(|dev| dev.iter().map(|req| &req.name))
|
||||
.into_iter()
|
||||
.flatten(),
|
||||
)
|
||||
} else {
|
||||
Either::Left(std::iter::empty())
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Return the [`PackageName`] of the project, if it's not a virtual workspace.
|
||||
pub fn project_name(&self) -> Option<&PackageName> {
|
||||
match self {
|
||||
|
|
|
|||
|
|
@ -15,10 +15,9 @@ use uv_cache::Cache;
|
|||
use uv_client::{Connectivity, FlatIndexClient, RegistryClientBuilder};
|
||||
use uv_configuration::{Concurrency, ExtrasSpecification, PreviewMode, Reinstall, SetupPyStrategy};
|
||||
use uv_dispatch::BuildDispatch;
|
||||
use uv_distribution::DEV_DEPENDENCIES;
|
||||
use uv_fs::CWD;
|
||||
use uv_git::ResolvedRepositoryReference;
|
||||
use uv_normalize::PackageName;
|
||||
use uv_normalize::{PackageName, DEV_DEPENDENCIES};
|
||||
use uv_python::{Interpreter, PythonFetch, PythonPreference, PythonRequest};
|
||||
use uv_requirements::upgrade::{read_lock_requirements, LockedRequirements};
|
||||
use uv_resolver::{
|
||||
|
|
@ -228,8 +227,8 @@ async fn do_lock(
|
|||
|
||||
// When locking, include the project itself (as editable).
|
||||
let requirements = workspace
|
||||
.members_as_requirements()
|
||||
.into_iter()
|
||||
.members_requirements()
|
||||
.chain(workspace.root_requirements())
|
||||
.map(UnresolvedRequirementSpecification::from)
|
||||
.collect::<Vec<_>>();
|
||||
let overrides = workspace
|
||||
|
|
|
|||
|
|
@ -7,10 +7,9 @@ use uv_configuration::{
|
|||
Concurrency, ExtrasSpecification, HashCheckingMode, PreviewMode, SetupPyStrategy,
|
||||
};
|
||||
use uv_dispatch::BuildDispatch;
|
||||
use uv_distribution::DEV_DEPENDENCIES;
|
||||
use uv_fs::CWD;
|
||||
use uv_installer::SitePackages;
|
||||
use uv_normalize::PackageName;
|
||||
use uv_normalize::{PackageName, DEV_DEPENDENCIES};
|
||||
use uv_python::{PythonEnvironment, PythonFetch, PythonPreference, PythonRequest};
|
||||
use uv_resolver::{FlatIndex, Lock};
|
||||
use uv_types::{BuildIsolation, HashStrategy};
|
||||
|
|
|
|||
|
|
@ -360,3 +360,79 @@ fn mixed_requires_python() -> Result<()> {
|
|||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Sync development dependencies in a virtual workspace root.
|
||||
#[test]
|
||||
fn virtual_workspace_dev_dependencies() -> Result<()> {
|
||||
let context = TestContext::new("3.12");
|
||||
|
||||
let pyproject_toml = context.temp_dir.child("pyproject.toml");
|
||||
pyproject_toml.write_str(
|
||||
r#"
|
||||
[tool.uv]
|
||||
dev-dependencies = ["anyio>3"]
|
||||
|
||||
[tool.uv.workspace]
|
||||
members = ["child"]
|
||||
"#,
|
||||
)?;
|
||||
|
||||
let src = context.temp_dir.child("src").child("albatross");
|
||||
src.create_dir_all()?;
|
||||
|
||||
let init = src.child("__init__.py");
|
||||
init.touch()?;
|
||||
|
||||
let child = context.temp_dir.child("child");
|
||||
fs_err::create_dir_all(&child)?;
|
||||
|
||||
let pyproject_toml = child.child("pyproject.toml");
|
||||
pyproject_toml.write_str(
|
||||
r#"
|
||||
[project]
|
||||
name = "child"
|
||||
version = "0.1.0"
|
||||
requires-python = ">=3.12"
|
||||
dependencies = ["iniconfig>1"]
|
||||
"#,
|
||||
)?;
|
||||
|
||||
let src = child.child("src").child("albatross");
|
||||
src.create_dir_all()?;
|
||||
|
||||
let init = src.child("__init__.py");
|
||||
init.touch()?;
|
||||
|
||||
// Syncing with `--no-dev` should omit `anyio`.
|
||||
uv_snapshot!(context.filters(), context.sync().arg("--no-dev"), @r###"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
warning: `uv sync` is experimental and may change without warning
|
||||
Resolved 5 packages in [TIME]
|
||||
Prepared 2 packages in [TIME]
|
||||
Installed 2 packages in [TIME]
|
||||
+ child==0.1.0 (from file://[TEMP_DIR]/child)
|
||||
+ iniconfig==2.0.0
|
||||
"###);
|
||||
|
||||
// Syncing without `--no-dev` should include `anyio`.
|
||||
uv_snapshot!(context.filters(), context.sync(), @r###"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
warning: `uv sync` is experimental and may change without warning
|
||||
Resolved 5 packages in [TIME]
|
||||
Prepared 3 packages in [TIME]
|
||||
Installed 3 packages in [TIME]
|
||||
+ anyio==4.3.0
|
||||
+ idna==3.6
|
||||
+ sniffio==1.3.1
|
||||
"###);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue