Add support for development dependencies (#4036)

## Summary

Externally, development dependencies are currently structured as a flat
list of PEP 580-compatible requirements:

```toml
[tool.uv]
dev-dependencies = ["werkzeug"]
```

When locking, we lock all development dependencies; when syncing, users
can provide `--dev`.

Internally, though, we model them as dependency groups, similar to
Poetry, PDM, and [PEP 735](https://peps.python.org/pep-0735). This
enables us to change out the user-facing frontend without changing the
internal implementation, once we've decided how these should be exposed
to users.

A few important decisions encoded in the implementation (which we can
change later):

1. Groups are enabled globally, for all dependencies. This differs from
extras, which are enabled on a per-requirement basis. Note, however,
that we'll only discover groups for uv-enabled packages anyway.
2. Installing a group requires installing the base package. We rely on
this in PubGrub to ensure that we resolve to the same version (even
though we only expect groups to come from workspace dependencies anyway,
which are unique). But anyway, that's encoded in the resolver right now,
just as it is for extras.
This commit is contained in:
Charlie Marsh 2024-06-05 21:40:17 -04:00 committed by GitHub
parent a81fb92ee6
commit 0acae9bd9c
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
37 changed files with 642 additions and 79 deletions

1
Cargo.lock generated
View file

@ -4664,6 +4664,7 @@ dependencies = [
"insta",
"install-wheel-rs",
"nanoid",
"once_cell",
"path-absolutize",
"pep440_rs",
"pep508_rs",

View file

@ -1,7 +1,7 @@
use pypi_types::{Requirement, RequirementSource};
use std::collections::BTreeMap;
use uv_normalize::{ExtraName, PackageName};
use pypi_types::{Requirement, RequirementSource};
use uv_normalize::{ExtraName, GroupName, PackageName};
use crate::{BuiltDist, Diagnostic, Dist, Name, ResolvedDist, SourceDist};
@ -75,6 +75,12 @@ pub enum ResolutionDiagnostic {
/// The extra that was requested. For example, `colorama` in `black[colorama]`.
extra: ExtraName,
},
MissingDev {
/// The distribution that was requested with a non-existent development dependency group.
dist: ResolvedDist,
/// The development dependency group that was requested.
dev: GroupName,
},
YankedVersion {
/// The package that was requested with a yanked version. For example, `black==23.10.0`.
dist: ResolvedDist,
@ -90,6 +96,9 @@ impl Diagnostic for ResolutionDiagnostic {
Self::MissingExtra { dist, extra } => {
format!("The package `{dist}` does not have an extra named `{extra}`.")
}
Self::MissingDev { dist, dev } => {
format!("The package `{dist}` does not have a development dependency group named `{dev}`.")
}
Self::YankedVersion { dist, reason } => {
if let Some(reason) = reason {
format!("`{dist}` is yanked (reason: \"{reason}\").")
@ -104,6 +113,7 @@ impl Diagnostic for ResolutionDiagnostic {
fn includes(&self, name: &PackageName) -> bool {
match self {
Self::MissingExtra { dist, .. } => name == dist.name(),
Self::MissingDev { dist, .. } => name == dist.name(),
Self::YankedVersion { dist, .. } => name == dist.name(),
}
}

View file

@ -35,6 +35,7 @@ fs-err = { workspace = true }
futures = { workspace = true }
glob = { workspace = true }
nanoid = { workspace = true }
once_cell = { workspace = true }
path-absolutize = { workspace = true }
reqwest = { workspace = true }
reqwest-middleware = { workspace = true }

View file

@ -3,7 +3,7 @@ use std::path::PathBuf;
use tokio::task::JoinError;
use zip::result::ZipError;
use crate::metadata::MetadataLoweringError;
use crate::metadata::MetadataError;
use distribution_filename::WheelFilenameError;
use pep440_rs::Version;
use pypi_types::HashDigest;
@ -79,7 +79,7 @@ pub enum Error {
#[error("Unsupported scheme in URL: {0}")]
UnsupportedScheme(String),
#[error(transparent)]
MetadataLowering(#[from] MetadataLoweringError),
MetadataLowering(#[from] MetadataError),
/// A generic request middleware error happened while making a request.
/// Refer to the error message for more details.

View file

@ -2,7 +2,7 @@ pub use distribution_database::{DistributionDatabase, HttpArchivePointer, LocalA
pub use download::LocalWheel;
pub use error::Error;
pub use index::{BuiltWheelIndex, RegistryWheelIndex};
pub use metadata::{ArchiveMetadata, Metadata, RequiresDist};
pub use metadata::{ArchiveMetadata, Metadata, RequiresDist, DEV_DEPENDENCIES};
pub use reporter::Reporter;
pub use workspace::{ProjectWorkspace, Workspace, WorkspaceError, WorkspaceMember};

View file

@ -1,21 +1,22 @@
use std::collections::BTreeMap;
use std::path::Path;
use thiserror::Error;
use pep440_rs::{Version, VersionSpecifiers};
use pypi_types::{HashDigest, Metadata23};
pub use requires_dist::RequiresDist;
use uv_configuration::PreviewMode;
use uv_normalize::{ExtraName, PackageName};
use uv_normalize::{ExtraName, GroupName, PackageName};
use crate::metadata::lowering::LoweringError;
pub use crate::metadata::requires_dist::{RequiresDist, DEV_DEPENDENCIES};
use crate::WorkspaceError;
mod lowering;
mod requires_dist;
#[derive(Debug, Error)]
pub enum MetadataLoweringError {
pub enum MetadataError {
#[error(transparent)]
Workspace(#[from] WorkspaceError),
#[error("Failed to parse entry for: `{0}`")]
@ -31,6 +32,7 @@ pub struct Metadata {
pub requires_dist: Vec<pypi_types::Requirement>,
pub requires_python: Option<VersionSpecifiers>,
pub provides_extras: Vec<ExtraName>,
pub dev_dependencies: BTreeMap<GroupName, Vec<pypi_types::Requirement>>,
}
impl Metadata {
@ -47,6 +49,7 @@ impl Metadata {
.collect(),
requires_python: metadata.requires_python,
provides_extras: metadata.provides_extras,
dev_dependencies: BTreeMap::default(),
}
}
@ -56,12 +59,13 @@ impl Metadata {
metadata: Metadata23,
project_root: &Path,
preview_mode: PreviewMode,
) -> Result<Self, MetadataLoweringError> {
) -> Result<Self, MetadataError> {
// Lower the requirements.
let RequiresDist {
name,
requires_dist,
provides_extras,
dev_dependencies,
} = RequiresDist::from_workspace(
pypi_types::RequiresDist {
name: metadata.name,
@ -80,6 +84,7 @@ impl Metadata {
requires_dist,
requires_python: metadata.requires_python,
provides_extras,
dev_dependencies,
})
}
}

View file

@ -1,18 +1,27 @@
use once_cell::sync::Lazy;
use std::collections::BTreeMap;
use std::path::Path;
use uv_configuration::PreviewMode;
use uv_normalize::{ExtraName, PackageName};
use uv_normalize::{ExtraName, GroupName, PackageName};
use crate::metadata::lowering::lower_requirement;
use crate::metadata::MetadataLoweringError;
use crate::metadata::MetadataError;
use crate::{Metadata, ProjectWorkspace};
/// The name of the global `dev-dependencies` group.
///
/// Internally, we model dependency groups as a generic concept; but externally, we only expose the
/// `dev-dependencies` group.
pub static DEV_DEPENDENCIES: Lazy<GroupName> =
Lazy::new(|| GroupName::new("dev".to_string()).unwrap());
#[derive(Debug, Clone)]
pub struct RequiresDist {
pub name: PackageName,
pub requires_dist: Vec<pypi_types::Requirement>,
pub provides_extras: Vec<ExtraName>,
pub dev_dependencies: BTreeMap<GroupName, Vec<pypi_types::Requirement>>,
}
impl RequiresDist {
@ -27,6 +36,7 @@ impl RequiresDist {
.map(pypi_types::Requirement::from)
.collect(),
provides_extras: metadata.provides_extras,
dev_dependencies: BTreeMap::default(),
}
}
@ -36,7 +46,7 @@ impl RequiresDist {
metadata: pypi_types::RequiresDist,
project_root: &Path,
preview_mode: PreviewMode,
) -> Result<Self, MetadataLoweringError> {
) -> Result<Self, MetadataError> {
// TODO(konsti): Limit discovery for Git checkouts to Git root.
// TODO(konsti): Cache workspace discovery.
let Some(project_workspace) =
@ -52,7 +62,8 @@ impl RequiresDist {
metadata: pypi_types::RequiresDist,
project_workspace: &ProjectWorkspace,
preview_mode: PreviewMode,
) -> Result<Self, MetadataLoweringError> {
) -> Result<Self, MetadataError> {
// Collect any `tool.uv.sources` and `tool.uv.dev_dependencies` from `pyproject.toml`.
let empty = BTreeMap::default();
let sources = project_workspace
.current_project()
@ -63,6 +74,37 @@ impl RequiresDist {
.and_then(|uv| uv.sources.as_ref())
.unwrap_or(&empty);
let dev_dependencies = {
let dev_dependencies = project_workspace
.current_project()
.pyproject_toml()
.tool
.as_ref()
.and_then(|tool| tool.uv.as_ref())
.and_then(|uv| uv.dev_dependencies.as_ref())
.into_iter()
.flatten()
.cloned()
.map(|requirement| {
let requirement_name = requirement.name.clone();
lower_requirement(
requirement,
&metadata.name,
project_workspace.project_root(),
sources,
project_workspace.workspace(),
preview_mode,
)
.map_err(|err| MetadataError::LoweringError(requirement_name.clone(), err))
})
.collect::<Result<Vec<_>, _>>()?;
if dev_dependencies.is_empty() {
BTreeMap::default()
} else {
BTreeMap::from([(DEV_DEPENDENCIES.clone(), dev_dependencies)])
}
};
let requires_dist = metadata
.requires_dist
.into_iter()
@ -76,13 +118,14 @@ impl RequiresDist {
project_workspace.workspace(),
preview_mode,
)
.map_err(|err| MetadataLoweringError::LoweringError(requirement_name.clone(), err))
.map_err(|err| MetadataError::LoweringError(requirement_name.clone(), err))
})
.collect::<Result<_, _>>()?;
Ok(Self {
name: metadata.name,
requires_dist,
dev_dependencies,
provides_extras: metadata.provides_extras,
})
}
@ -94,6 +137,7 @@ impl From<Metadata> for RequiresDist {
name: metadata.name,
requires_dist: metadata.requires_dist,
provides_extras: metadata.provides_extras,
dev_dependencies: metadata.dev_dependencies,
}
}
}

View file

@ -10,10 +10,11 @@ use std::collections::BTreeMap;
use std::ops::Deref;
use glob::Pattern;
use pep440_rs::VersionSpecifiers;
use serde::{Deserialize, Serialize};
use url::Url;
use pep440_rs::VersionSpecifiers;
use pypi_types::VerbatimParsedUrl;
use uv_normalize::{ExtraName, PackageName};
/// A `pyproject.toml` as specified in PEP 517.
@ -47,10 +48,19 @@ pub struct Tool {
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq)]
#[serde(rename_all = "kebab-case")]
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
pub struct ToolUv {
pub sources: Option<BTreeMap<PackageName, Source>>,
pub workspace: Option<ToolUvWorkspace>,
#[cfg_attr(
feature = "schemars",
schemars(
with = "Option<Vec<String>>",
description = "PEP 508-style requirements, e.g., `flask==3.0.0`, or `black @ https://...`."
)
)]
pub dev_dependencies: Option<Vec<pep508_rs::Requirement<VerbatimParsedUrl>>>,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq)]
@ -151,6 +161,7 @@ pub enum Source {
workspace: bool,
},
}
/// <https://github.com/serde-rs/serde/issues/1316#issue-332908452>
mod serde_from_and_to_string {
use std::fmt::Display;

View file

@ -6,7 +6,7 @@ use serde::{Deserialize, Deserializer, Serialize};
use crate::{validate_and_normalize_owned, validate_and_normalize_ref, InvalidNameError};
/// The normalized name of an extra dependency group.
/// The normalized name of an extra dependency.
///
/// Converts the name to lowercase and collapses runs of `-`, `_`, and `.` down to a single `-`.
/// For example, `---`, `.`, and `__` are all converted to a single `-`.

View file

@ -0,0 +1,53 @@
use std::fmt;
use std::fmt::{Display, Formatter};
use std::str::FromStr;
use serde::{Deserialize, Deserializer, Serialize};
use crate::{validate_and_normalize_owned, validate_and_normalize_ref, InvalidNameError};
/// The normalized name of a dependency group.
///
/// See:
/// - <https://peps.python.org/pep-0735/>
/// - <https://packaging.python.org/en/latest/specifications/name-normalization/>
#[derive(Debug, Clone, PartialEq, Eq, Hash, PartialOrd, Ord, Serialize)]
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
pub struct GroupName(String);
impl GroupName {
/// Create a validated, normalized extra name.
pub fn new(name: String) -> Result<Self, InvalidNameError> {
validate_and_normalize_owned(name).map(Self)
}
}
impl FromStr for GroupName {
type Err = InvalidNameError;
fn from_str(name: &str) -> Result<Self, Self::Err> {
validate_and_normalize_ref(name).map(Self)
}
}
impl<'de> Deserialize<'de> for GroupName {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
let s = String::deserialize(deserializer)?;
Self::from_str(&s).map_err(serde::de::Error::custom)
}
}
impl Display for GroupName {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
self.0.fmt(f)
}
}
impl AsRef<str> for GroupName {
fn as_ref(&self) -> &str {
&self.0
}
}

View file

@ -2,9 +2,11 @@ use std::error::Error;
use std::fmt::{Display, Formatter};
pub use extra_name::ExtraName;
pub use group_name::GroupName;
pub use package_name::PackageName;
mod extra_name;
mod group_name;
mod package_name;
/// Validate and normalize an owned package or extra name.

View file

@ -233,6 +233,7 @@ impl NoSolutionError {
PubGrubPackageInner::Root(_) => {}
PubGrubPackageInner::Python(_) => {}
PubGrubPackageInner::Extra { .. } => {}
PubGrubPackageInner::Dev { .. } => {}
PubGrubPackageInner::Package { name, .. } => {
// Avoid including available versions for packages that exist in the derivation
// tree, but were never visited during resolution. We _may_ have metadata for

View file

@ -26,10 +26,10 @@ use platform_tags::{TagCompatibility, TagPriority, Tags};
use pypi_types::{HashDigest, ParsedArchiveUrl, ParsedGitUrl};
use uv_configuration::ExtrasSpecification;
use uv_git::{GitReference, GitSha, RepositoryReference, ResolvedRepositoryReference};
use uv_normalize::{ExtraName, PackageName};
use uv_normalize::{ExtraName, GroupName, PackageName};
use crate::resolution::AnnotatedDist;
use crate::{lock, ResolutionGraph};
use crate::ResolutionGraph;
#[derive(Clone, Debug, serde::Deserialize)]
#[serde(try_from = "LockWire")]
@ -60,33 +60,42 @@ impl Lock {
// Lock all base packages.
for node_index in graph.petgraph.node_indices() {
let dist = &graph.petgraph[node_index];
if dist.extra.is_some() {
continue;
}
let mut locked_dist = lock::Distribution::from_annotated_dist(dist)?;
for neighbor in graph.petgraph.neighbors(node_index) {
let dependency_dist = &graph.petgraph[neighbor];
locked_dist.add_dependency(dependency_dist);
}
if let Some(locked_dist) = locked_dists.insert(locked_dist.id.clone(), locked_dist) {
return Err(LockError::duplicate_distribution(locked_dist.id));
if dist.is_base() {
let mut locked_dist = Distribution::from_annotated_dist(dist)?;
for neighbor in graph.petgraph.neighbors(node_index) {
let dependency_dist = &graph.petgraph[neighbor];
locked_dist.add_dependency(dependency_dist);
}
let id = locked_dist.id.clone();
if let Some(locked_dist) = locked_dists.insert(id, locked_dist) {
return Err(LockError::duplicate_distribution(locked_dist.id));
}
}
}
// Lock all extras.
// Lock all extras and development dependencies.
for node_index in graph.petgraph.node_indices() {
let dist = &graph.petgraph[node_index];
if let Some(extra) = dist.extra.as_ref() {
let id = lock::DistributionId::from_annotated_dist(dist);
let id = DistributionId::from_annotated_dist(dist);
let Some(locked_dist) = locked_dists.get_mut(&id) else {
return Err(LockError::missing_base(id, extra.clone()));
return Err(LockError::missing_extra_base(id, extra.clone()));
};
for neighbor in graph.petgraph.neighbors(node_index) {
let dependency_dist = &graph.petgraph[neighbor];
locked_dist.add_optional_dependency(extra.clone(), dependency_dist);
}
}
if let Some(group) = dist.dev.as_ref() {
let id = DistributionId::from_annotated_dist(dist);
let Some(locked_dist) = locked_dists.get_mut(&id) else {
return Err(LockError::missing_dev_base(id, group.clone()));
};
for neighbor in graph.petgraph.neighbors(node_index) {
let dependency_dist = &graph.petgraph[neighbor];
locked_dist.add_dev_dependency(group.clone(), dependency_dist);
}
}
}
let distributions = locked_dists.into_values().collect();
@ -125,6 +134,7 @@ impl Lock {
tags: &Tags,
root_name: &PackageName,
extras: &ExtrasSpecification,
dev: &[GroupName],
) -> Resolution {
let mut queue: VecDeque<(&Distribution, Option<&ExtraName>)> = VecDeque::new();
@ -154,11 +164,17 @@ impl Lock {
let mut map = BTreeMap::default();
while let Some((dist, extra)) = queue.pop_front() {
let deps = if let Some(extra) = extra {
Either::Left(dist.optional_dependencies.get(extra).into_iter().flatten())
} else {
Either::Right(dist.dependencies.iter())
};
let deps =
if let Some(extra) = extra {
Either::Left(dist.optional_dependencies.get(extra).into_iter().flatten())
} else {
Either::Right(dist.dependencies.iter().chain(
dev.iter().flat_map(|group| {
dist.dev_dependencies.get(group).into_iter().flatten()
}),
))
};
for dep in deps {
let dep_dist = self.find_by_id(&dep.id);
if dep_dist
@ -272,6 +288,18 @@ impl Lock {
table.insert("optional-dependencies", Item::Table(optional_deps));
}
if !dist.dev_dependencies.is_empty() {
let mut dev_dependencies = Table::new();
for (extra, deps) in &dist.dev_dependencies {
let deps = deps
.iter()
.map(Dependency::to_toml)
.collect::<ArrayOfTables>();
dev_dependencies.insert(extra.as_ref(), Item::ArrayOfTables(deps));
}
table.insert("dev-dependencies", Item::Table(dev_dependencies));
}
if !dist.wheels.is_empty() {
let wheels = dist
.wheels
@ -371,6 +399,7 @@ impl TryFrom<LockWire> for Lock {
}
#[derive(Clone, Debug, serde::Deserialize)]
#[serde(rename_all = "kebab-case")]
pub struct Distribution {
#[serde(flatten)]
pub(crate) id: DistributionId,
@ -382,12 +411,10 @@ pub struct Distribution {
wheels: Vec<Wheel>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
dependencies: Vec<Dependency>,
#[serde(
default,
skip_serializing_if = "IndexMap::is_empty",
rename = "optional-dependencies"
)]
#[serde(default, skip_serializing_if = "IndexMap::is_empty")]
optional_dependencies: IndexMap<ExtraName, Vec<Dependency>>,
#[serde(default, skip_serializing_if = "IndexMap::is_empty")]
dev_dependencies: IndexMap<GroupName, Vec<Dependency>>,
}
impl Distribution {
@ -408,6 +435,7 @@ impl Distribution {
wheels,
dependencies: vec![],
optional_dependencies: IndexMap::default(),
dev_dependencies: IndexMap::default(),
})
}
@ -426,6 +454,12 @@ impl Distribution {
.push(dep);
}
/// Add the [`AnnotatedDist`] as a development dependency of the [`Distribution`].
fn add_dev_dependency(&mut self, dev: GroupName, annotated_dist: &AnnotatedDist) {
let dep = Dependency::from_annotated_dist(annotated_dist);
self.dev_dependencies.entry(dev).or_default().push(dep);
}
/// Convert the [`Distribution`] to a [`Dist`] that can be used in installation.
fn to_dist(&self, tags: &Tags) -> Dist {
if let Some(best_wheel_index) = self.find_best_wheel(tags) {
@ -1469,8 +1503,15 @@ impl LockError {
}
}
fn missing_base(id: DistributionId, extra: ExtraName) -> LockError {
let kind = LockErrorKind::MissingBase { id, extra };
fn missing_extra_base(id: DistributionId, extra: ExtraName) -> LockError {
let kind = LockErrorKind::MissingExtraBase { id, extra };
LockError {
kind: Box::new(kind),
}
}
fn missing_dev_base(id: DistributionId, group: GroupName) -> LockError {
let kind = LockErrorKind::MissingDevBase { id, group };
LockError {
kind: Box::new(kind),
}
@ -1485,7 +1526,8 @@ impl std::error::Error for LockError {
LockErrorKind::InvalidFileUrl { ref err } => Some(err),
LockErrorKind::UnrecognizedDependency { ref err } => Some(err),
LockErrorKind::Hash { .. } => None,
LockErrorKind::MissingBase { .. } => None,
LockErrorKind::MissingExtraBase { .. } => None,
LockErrorKind::MissingDevBase { .. } => None,
}
}
}
@ -1535,12 +1577,18 @@ impl std::fmt::Display for LockError {
source = id.source.kind.name(),
)
}
LockErrorKind::MissingBase { ref id, ref extra } => {
LockErrorKind::MissingExtraBase { ref id, ref extra } => {
write!(
f,
"found distribution `{id}` with extra `{extra}` but no base distribution",
)
}
LockErrorKind::MissingDevBase { ref id, ref group } => {
write!(
f,
"found distribution `{id}` with development dependency group `{group}` but no base distribution",
)
}
}
}
}
@ -1589,12 +1637,21 @@ enum LockErrorKind {
},
/// An error that occurs when a distribution is included with an extra name,
/// but no corresponding base distribution (i.e., without the extra) exists.
MissingBase {
MissingExtraBase {
/// The ID of the distribution that has a missing base.
id: DistributionId,
/// The extra name that was found.
extra: ExtraName,
},
/// An error that occurs when a distribution is included with a development
/// dependency group, but no corresponding base distribution (i.e., without
/// the group) exists.
MissingDevBase {
/// The ID of the distribution that has a missing base.
id: DistributionId,
/// The development dependency group that was found.
group: GroupName,
},
}
/// An error that occurs when there's an unrecognized dependency.

View file

@ -3,7 +3,7 @@ use either::Either;
use pep508_rs::MarkerEnvironment;
use pypi_types::Requirement;
use uv_configuration::{Constraints, Overrides};
use uv_normalize::PackageName;
use uv_normalize::{GroupName, PackageName};
use uv_types::RequestedRequirements;
use crate::{preferences::Preference, DependencyMode, Exclusions};
@ -20,6 +20,10 @@ pub struct Manifest {
/// The overrides for the project.
pub(crate) overrides: Overrides,
/// The enabled development dependency groups for the project. Dependency groups are global,
/// such that any provided groups will be enabled for all requirements.
pub(crate) dev: Vec<GroupName>,
/// The preferences for the project.
///
/// These represent "preferred" versions of a given package. For example, they may be the
@ -50,6 +54,7 @@ impl Manifest {
requirements: Vec<Requirement>,
constraints: Constraints,
overrides: Overrides,
dev: Vec<GroupName>,
preferences: Vec<Preference>,
project: Option<PackageName>,
exclusions: Exclusions,
@ -59,6 +64,7 @@ impl Manifest {
requirements,
constraints,
overrides,
dev,
preferences,
project,
exclusions,
@ -71,6 +77,7 @@ impl Manifest {
requirements,
constraints: Constraints::default(),
overrides: Overrides::default(),
dev: Vec::new(),
preferences: Vec::new(),
project: None,
exclusions: Exclusions::default(),

View file

@ -1,3 +1,6 @@
use std::collections::BTreeMap;
use either::Either;
use itertools::Itertools;
use pubgrub::range::Range;
use rustc_hash::FxHashSet;
@ -9,7 +12,7 @@ use pep508_rs::MarkerEnvironment;
use pypi_types::{Requirement, RequirementSource};
use uv_configuration::{Constraints, Overrides};
use uv_git::GitResolver;
use uv_normalize::{ExtraName, PackageName};
use uv_normalize::{ExtraName, GroupName, PackageName};
use crate::pubgrub::specifier::PubGrubSpecifier;
use crate::pubgrub::{PubGrubPackage, PubGrubPackageInner};
@ -24,10 +27,12 @@ impl PubGrubDependencies {
#[allow(clippy::too_many_arguments)]
pub(crate) fn from_requirements(
requirements: &[Requirement],
dev_dependencies: &BTreeMap<GroupName, Vec<Requirement>>,
constraints: &Constraints,
overrides: &Overrides,
source_name: Option<&PackageName>,
source_extra: Option<&ExtraName>,
source_dev: Option<&GroupName>,
urls: &Urls,
locals: &Locals,
git: &GitResolver,
@ -38,10 +43,12 @@ impl PubGrubDependencies {
add_requirements(
requirements,
dev_dependencies,
constraints,
overrides,
source_name,
source_extra,
source_dev,
urls,
locals,
git,
@ -68,10 +75,12 @@ impl PubGrubDependencies {
#[allow(clippy::too_many_arguments)]
fn add_requirements(
requirements: &[Requirement],
dev_dependencies: &BTreeMap<GroupName, Vec<Requirement>>,
constraints: &Constraints,
overrides: &Overrides,
source_name: Option<&PackageName>,
source_extra: Option<&ExtraName>,
source_dev: Option<&GroupName>,
urls: &Urls,
locals: &Locals,
git: &GitResolver,
@ -80,7 +89,11 @@ fn add_requirements(
seen: &mut FxHashSet<ExtraName>,
) -> Result<(), ResolveError> {
// Iterate over all declared requirements.
for requirement in overrides.apply(requirements) {
for requirement in overrides.apply(if let Some(source_dev) = source_dev {
Either::Left(dev_dependencies.get(source_dev).into_iter().flatten())
} else {
Either::Right(requirements.iter())
}) {
// If the requirement isn't relevant for the current platform, skip it.
match source_extra {
Some(source_extra) => {
@ -128,10 +141,12 @@ fn add_requirements(
if seen.insert(extra.clone()) {
add_requirements(
requirements,
dev_dependencies,
constraints,
overrides,
source_name,
Some(extra),
None,
urls,
locals,
git,
@ -261,6 +276,7 @@ impl PubGrubRequirement {
package: PubGrubPackage::from(PubGrubPackageInner::Package {
name: requirement.name.clone(),
extra,
dev: None,
marker: requirement.marker.clone(),
url: Some(expected.clone()),
}),
@ -287,6 +303,7 @@ impl PubGrubRequirement {
package: PubGrubPackage::from(PubGrubPackageInner::Package {
name: requirement.name.clone(),
extra,
dev: None,
marker: requirement.marker.clone(),
url: Some(expected.clone()),
}),
@ -313,6 +330,7 @@ impl PubGrubRequirement {
package: PubGrubPackage::from(PubGrubPackageInner::Package {
name: requirement.name.clone(),
extra,
dev: None,
marker: requirement.marker.clone(),
url: Some(expected.clone()),
}),

View file

@ -1,9 +1,10 @@
use pep508_rs::MarkerTree;
use pypi_types::VerbatimParsedUrl;
use std::fmt::{Display, Formatter};
use std::ops::Deref;
use std::sync::Arc;
use uv_normalize::{ExtraName, PackageName};
use pep508_rs::MarkerTree;
use pypi_types::VerbatimParsedUrl;
use uv_normalize::{ExtraName, GroupName, PackageName};
use crate::resolver::Urls;
@ -48,6 +49,7 @@ pub enum PubGrubPackageInner {
Package {
name: PackageName,
extra: Option<ExtraName>,
dev: Option<GroupName>,
marker: Option<MarkerTree>,
/// The URL of the package, if it was specified in the requirement.
///
@ -106,6 +108,17 @@ pub enum PubGrubPackageInner {
marker: Option<MarkerTree>,
url: Option<VerbatimParsedUrl>,
},
/// A proxy package to represent an enabled "dependency group" (e.g., development dependencies).
///
/// This is similar in spirit to [PEP 735](https://peps.python.org/pep-0735/) and similar in
/// implementation to the `Extra` variant. The main difference is that we treat groups as
/// enabled globally, rather than on a per-requirement basis.
Dev {
name: PackageName,
dev: GroupName,
marker: Option<MarkerTree>,
url: Option<VerbatimParsedUrl>,
},
}
impl PubGrubPackage {
@ -134,6 +147,7 @@ impl PubGrubPackage {
Self(Arc::new(PubGrubPackageInner::Package {
name,
extra,
dev: None,
marker,
url,
}))
@ -189,6 +203,7 @@ impl std::fmt::Display for PubGrubPackageInner {
write!(f, "{name}[{extra}]{{{marker}}}")
}
Self::Extra { name, extra, .. } => write!(f, "{name}[{extra}]"),
Self::Dev { name, dev, .. } => write!(f, "{name}:{dev}"),
}
}
}

View file

@ -32,6 +32,9 @@ impl PubGrubPriorities {
PubGrubPackageInner::Extra {
name, url: None, ..
}
| PubGrubPackageInner::Dev {
name, url: None, ..
}
| PubGrubPackageInner::Package {
name, url: None, ..
} => {
@ -70,6 +73,9 @@ impl PubGrubPriorities {
PubGrubPackageInner::Extra {
name, url: Some(_), ..
}
| PubGrubPackageInner::Dev {
name, url: Some(_), ..
}
| PubGrubPackageInner::Package {
name, url: Some(_), ..
} => {
@ -106,6 +112,7 @@ impl PubGrubPriorities {
PubGrubPackageInner::Root(_) => Some(PubGrubPriority::Root),
PubGrubPackageInner::Python(_) => Some(PubGrubPriority::Root),
PubGrubPackageInner::Extra { name, .. } => self.0.get(name).copied(),
PubGrubPackageInner::Dev { name, .. } => self.0.get(name).copied(),
PubGrubPackageInner::Package { name, .. } => self.0.get(name).copied(),
}
}

View file

@ -13,7 +13,7 @@ use pep440_rs::{Version, VersionSpecifier, VersionSpecifiers};
use pep508_rs::{MarkerEnvironment, MarkerTree};
use pypi_types::{ParsedUrlError, Yanked};
use uv_git::GitResolver;
use uv_normalize::{ExtraName, PackageName};
use uv_normalize::{ExtraName, GroupName, PackageName};
use crate::preferences::Preferences;
use crate::pubgrub::{PubGrubDistribution, PubGrubPackageInner};
@ -37,6 +37,13 @@ pub struct ResolutionGraph {
pub(crate) diagnostics: Vec<ResolutionDiagnostic>,
}
type NodeKey<'a> = (
&'a PackageName,
&'a Version,
Option<&'a ExtraName>,
Option<&'a GroupName>,
);
impl ResolutionGraph {
/// Create a new graph from the resolved PubGrub state.
#[allow(clippy::too_many_arguments)]
@ -70,11 +77,10 @@ impl ResolutionGraph {
// Add every package to the graph.
let mut petgraph: Graph<AnnotatedDist, Version, Directed> =
Graph::with_capacity(resolution.packages.len(), resolution.packages.len());
let mut inverse: FxHashMap<(&PackageName, &Version, &Option<ExtraName>), NodeIndex<u32>> =
FxHashMap::with_capacity_and_hasher(
resolution.packages.len(),
BuildHasherDefault::default(),
);
let mut inverse: FxHashMap<NodeKey, NodeIndex<u32>> = FxHashMap::with_capacity_and_hasher(
resolution.packages.len(),
BuildHasherDefault::default(),
);
let mut diagnostics = Vec::new();
for (package, versions) in &resolution.packages {
@ -83,6 +89,7 @@ impl ResolutionGraph {
PubGrubPackageInner::Package {
name,
extra,
dev,
marker: None,
url: None,
} => {
@ -167,6 +174,17 @@ impl ResolutionGraph {
});
}
}
// Validate the development dependency group.
if let Some(dev) = dev {
if !metadata.dev_dependencies.contains_key(dev) {
diagnostics.push(ResolutionDiagnostic::MissingDev {
dist: dist.clone(),
dev: dev.clone(),
});
}
}
// Extract the markers.
let marker = markers.get(&(name, version, extra)).cloned();
@ -174,16 +192,18 @@ impl ResolutionGraph {
let index = petgraph.add_node(AnnotatedDist {
dist,
extra: extra.clone(),
dev: dev.clone(),
marker,
hashes,
metadata,
});
inverse.insert((name, version, extra), index);
inverse.insert((name, version, extra.as_ref(), dev.as_ref()), index);
}
PubGrubPackageInner::Package {
name,
extra,
dev,
marker: None,
url: Some(url),
} => {
@ -244,6 +264,17 @@ impl ResolutionGraph {
});
}
}
// Validate the development dependency group.
if let Some(dev) = dev {
if !metadata.dev_dependencies.contains_key(dev) {
diagnostics.push(ResolutionDiagnostic::MissingDev {
dist: dist.clone().into(),
dev: dev.clone(),
});
}
}
// Extract the markers.
let marker = markers.get(&(name, version, extra)).cloned();
@ -251,11 +282,12 @@ impl ResolutionGraph {
let index = petgraph.add_node(AnnotatedDist {
dist: dist.into(),
extra: extra.clone(),
dev: dev.clone(),
marker,
hashes,
metadata,
});
inverse.insert((name, version, extra), index);
inverse.insert((name, version, extra.as_ref(), dev.as_ref()), index);
}
_ => {}
@ -266,9 +298,18 @@ impl ResolutionGraph {
// Add every edge to the graph.
for (names, version_set) in resolution.dependencies {
for versions in version_set {
let from_index =
inverse[&(&names.from, &versions.from_version, &versions.from_extra)];
let to_index = inverse[&(&names.to, &versions.to_version, &versions.to_extra)];
let from_index = inverse[&(
&names.from,
&versions.from_version,
versions.from_extra.as_ref(),
versions.from_dev.as_ref(),
)];
let to_index = inverse[&(
&names.to,
&versions.to_version,
versions.to_extra.as_ref(),
versions.to_dev.as_ref(),
)];
petgraph.update_edge(from_index, to_index, versions.to_version.clone());
}
}
@ -293,7 +334,7 @@ impl ResolutionGraph {
self.petgraph
.node_indices()
.map(|index| &self.petgraph[index])
.filter(|dist| dist.extra.is_none())
.filter(|dist| dist.is_base())
.count()
}

View file

@ -8,7 +8,7 @@ use distribution_types::{DistributionMetadata, Name, ResolvedDist, Verbatim, Ver
use pep508_rs::{split_scheme, MarkerTree, Scheme};
use pypi_types::HashDigest;
use uv_distribution::Metadata;
use uv_normalize::{ExtraName, PackageName};
use uv_normalize::{ExtraName, GroupName, PackageName};
pub use crate::resolution::display::{AnnotationStyle, DisplayResolutionGraph};
pub use crate::resolution::graph::ResolutionGraph;
@ -23,11 +23,20 @@ mod graph;
pub(crate) struct AnnotatedDist {
pub(crate) dist: ResolvedDist,
pub(crate) extra: Option<ExtraName>,
pub(crate) dev: Option<GroupName>,
pub(crate) marker: Option<MarkerTree>,
pub(crate) hashes: Vec<HashDigest>,
pub(crate) metadata: Metadata,
}
impl AnnotatedDist {
/// Returns `true` if the [`AnnotatedDist`] is a base package (i.e., not an extra or a
/// dependency group).
pub(crate) fn is_base(&self) -> bool {
self.extra.is_none() && self.dev.is_none()
}
}
impl Name for AnnotatedDist {
fn name(&self) -> &PackageName {
self.dist.name()

View file

@ -56,6 +56,7 @@ impl BatchPrefetcher {
let PubGrubPackageInner::Package {
name,
extra: None,
dev: None,
marker: _marker,
url: None,
} = &**next

View file

@ -1,6 +1,7 @@
//! Given a set of requirements, find a set of compatible packages.
use std::borrow::Cow;
use std::collections::BTreeMap;
use std::fmt::{Display, Formatter};
use std::sync::Arc;
use std::thread;
@ -30,7 +31,7 @@ pub(crate) use urls::Urls;
use uv_configuration::{Constraints, Overrides};
use uv_distribution::{ArchiveMetadata, DistributionDatabase};
use uv_git::GitResolver;
use uv_normalize::{ExtraName, PackageName};
use uv_normalize::{ExtraName, GroupName, PackageName};
use uv_types::{BuildContext, HashStrategy, InstalledPackagesProvider};
use crate::candidate_selector::{CandidateDist, CandidateSelector};
@ -80,6 +81,7 @@ struct ResolverState<InstalledPackages: InstalledPackagesProvider> {
requirements: Vec<Requirement>,
constraints: Constraints,
overrides: Overrides,
dev: Vec<GroupName>,
preferences: Preferences,
git: GitResolver,
exclusions: Exclusions,
@ -190,6 +192,7 @@ impl<Provider: ResolverProvider, InstalledPackages: InstalledPackagesProvider>
requirements: manifest.requirements,
constraints: manifest.constraints,
overrides: manifest.overrides,
dev: manifest.dev,
preferences: Preferences::from_iter(manifest.preferences, markers),
exclusions: manifest.exclusions,
hasher: hasher.clone(),
@ -577,6 +580,7 @@ impl<InstalledPackages: InstalledPackagesProvider> ResolverState<InstalledPackag
PubGrubPackageInner::Root(_) => {}
PubGrubPackageInner::Python(_) => {}
PubGrubPackageInner::Extra { .. } => {}
PubGrubPackageInner::Dev { .. } => {}
PubGrubPackageInner::Package {
name, url: None, ..
} => {
@ -622,6 +626,7 @@ impl<InstalledPackages: InstalledPackagesProvider> ResolverState<InstalledPackag
let PubGrubPackageInner::Package {
name,
extra: None,
dev: None,
marker: _marker,
url: None,
} = &**package
@ -662,6 +667,11 @@ impl<InstalledPackages: InstalledPackagesProvider> ResolverState<InstalledPackag
url: Some(url),
..
}
| PubGrubPackageInner::Dev {
name,
url: Some(url),
..
}
| PubGrubPackageInner::Package {
name,
url: Some(url),
@ -751,6 +761,9 @@ impl<InstalledPackages: InstalledPackagesProvider> ResolverState<InstalledPackag
PubGrubPackageInner::Extra {
name, url: None, ..
}
| PubGrubPackageInner::Dev {
name, url: None, ..
}
| PubGrubPackageInner::Package {
name, url: None, ..
} => {
@ -870,9 +883,13 @@ impl<InstalledPackages: InstalledPackagesProvider> ResolverState<InstalledPackag
let name = match &**pkg {
// A root can never be a dependency of another package, and a `Python` pubgrub
// package is never returned by `get_dependencies`. So these cases never occur.
// TODO(charlie): This might be overly conservative for `Extra` and `Group`. If
// multiple groups are enabled, we shouldn't need to fork. Similarly, if multiple
// extras are enabled, we shouldn't need to fork.
PubGrubPackageInner::Root(_) | PubGrubPackageInner::Python(_) => unreachable!(),
PubGrubPackageInner::Package { ref name, .. }
| PubGrubPackageInner::Extra { ref name, .. } => name,
| PubGrubPackageInner::Extra { ref name, .. }
| PubGrubPackageInner::Dev { ref name, .. } => name,
};
by_grouping
.entry(name)
@ -918,10 +935,12 @@ impl<InstalledPackages: InstalledPackagesProvider> ResolverState<InstalledPackag
// Add the root requirements.
let dependencies = PubGrubDependencies::from_requirements(
&self.requirements,
&BTreeMap::default(),
&self.constraints,
&self.overrides,
None,
None,
None,
&self.urls,
&self.locals,
&self.git,
@ -955,6 +974,7 @@ impl<InstalledPackages: InstalledPackagesProvider> ResolverState<InstalledPackag
PubGrubPackageInner::Package {
name,
extra,
dev,
marker,
url,
} => {
@ -967,6 +987,7 @@ impl<InstalledPackages: InstalledPackagesProvider> ResolverState<InstalledPackag
PubGrubPackage::from(PubGrubPackageInner::Package {
name: name.clone(),
extra: extra.clone(),
dev: dev.clone(),
marker: None,
url: url.clone(),
}),
@ -1070,10 +1091,12 @@ impl<InstalledPackages: InstalledPackagesProvider> ResolverState<InstalledPackag
let mut dependencies = PubGrubDependencies::from_requirements(
&metadata.requires_dist,
&metadata.dev_dependencies,
&self.constraints,
&self.overrides,
Some(name),
extra.as_ref(),
dev.as_ref(),
&self.urls,
&self.locals,
&self.git,
@ -1090,6 +1113,25 @@ impl<InstalledPackages: InstalledPackagesProvider> ResolverState<InstalledPackag
self.visit_package(dep_package, request_sink)?;
}
// If a package has metadata for an enabled dependency group,
// add a dependency from it to the same package with the group
// enabled.
if extra.is_none() && dev.is_none() {
for dev in &self.dev {
if metadata.dev_dependencies.contains_key(dev) {
dependencies.push(
PubGrubPackage::from(PubGrubPackageInner::Dev {
name: name.clone(),
dev: dev.clone(),
marker: marker.clone(),
url: url.clone(),
}),
Range::singleton(version.clone()),
);
}
}
}
// If a package has a marker, add a dependency from it to the
// same package without markers.
//
@ -1106,6 +1148,7 @@ impl<InstalledPackages: InstalledPackagesProvider> ResolverState<InstalledPackag
PubGrubPackage::from(PubGrubPackageInner::Package {
name: name.clone(),
extra: extra.clone(),
dev: dev.clone(),
marker: None,
url: url.clone(),
}),
@ -1127,6 +1170,7 @@ impl<InstalledPackages: InstalledPackagesProvider> ResolverState<InstalledPackag
PubGrubPackage::from(PubGrubPackageInner::Package {
name: name.clone(),
extra: None,
dev: None,
marker: marker.clone(),
url: url.clone(),
}),
@ -1136,6 +1180,36 @@ impl<InstalledPackages: InstalledPackagesProvider> ResolverState<InstalledPackag
PubGrubPackage::from(PubGrubPackageInner::Package {
name: name.clone(),
extra: Some(extra.clone()),
dev: None,
marker: marker.clone(),
url: url.clone(),
}),
Range::singleton(version.clone()),
),
])),
// Add a dependency on both the development dependency group and base package.
PubGrubPackageInner::Dev {
name,
dev,
marker,
url,
} => Ok(Dependencies::Available(vec![
(
PubGrubPackage::from(PubGrubPackageInner::Package {
name: name.clone(),
extra: None,
dev: None,
marker: marker.clone(),
url: url.clone(),
}),
Range::singleton(version.clone()),
),
(
PubGrubPackage::from(PubGrubPackageInner::Package {
name: name.clone(),
extra: None,
dev: Some(dev.clone()),
marker: marker.clone(),
url: url.clone(),
}),
@ -1371,6 +1445,7 @@ impl<InstalledPackages: InstalledPackagesProvider> ResolverState<InstalledPackag
PubGrubPackageInner::Root(_) => {}
PubGrubPackageInner::Python(_) => {}
PubGrubPackageInner::Extra { .. } => {}
PubGrubPackageInner::Dev { .. } => {}
PubGrubPackageInner::Package {
name,
url: Some(url),
@ -1462,6 +1537,7 @@ impl SolveState {
let PubGrubPackageInner::Package {
name: ref self_name,
extra: ref self_extra,
dev: ref self_dev,
..
} = &**self_package
else {
@ -1472,6 +1548,7 @@ impl SolveState {
PubGrubPackageInner::Package {
name: ref dependency_name,
extra: ref dependency_extra,
dev: ref dependency_dev,
..
} => {
if self_name == dependency_name {
@ -1484,8 +1561,10 @@ impl SolveState {
let versions = ResolutionDependencyVersions {
from_version: self_version.clone(),
from_extra: self_extra.clone(),
from_dev: self_dev.clone(),
to_version: dependency_version.clone(),
to_extra: dependency_extra.clone(),
to_dev: dependency_dev.clone(),
};
dependencies.entry(names).or_default().insert(versions);
}
@ -1505,8 +1584,33 @@ impl SolveState {
let versions = ResolutionDependencyVersions {
from_version: self_version.clone(),
from_extra: self_extra.clone(),
from_dev: self_dev.clone(),
to_version: dependency_version.clone(),
to_extra: Some(dependency_extra.clone()),
to_dev: None,
};
dependencies.entry(names).or_default().insert(versions);
}
PubGrubPackageInner::Dev {
name: ref dependency_name,
dev: ref dependency_dev,
..
} => {
if self_name == dependency_name {
continue;
}
let names = ResolutionDependencyNames {
from: self_name.clone(),
to: dependency_name.clone(),
};
let versions = ResolutionDependencyVersions {
from_version: self_version.clone(),
from_extra: self_extra.clone(),
from_dev: self_dev.clone(),
to_version: dependency_version.clone(),
to_extra: None,
to_dev: Some(dependency_dev.clone()),
};
dependencies.entry(names).or_default().insert(versions);
}
@ -1545,8 +1649,10 @@ pub(crate) struct ResolutionDependencyNames {
pub(crate) struct ResolutionDependencyVersions {
pub(crate) from_version: Version,
pub(crate) from_extra: Option<ExtraName>,
pub(crate) from_dev: Option<GroupName>,
pub(crate) to_version: Version,
pub(crate) to_extra: Option<ExtraName>,
pub(crate) to_dev: Option<GroupName>,
}
impl Resolution {

View file

@ -71,6 +71,7 @@ Ok(
],
dependencies: [],
optional_dependencies: {},
dev_dependencies: {},
},
],
requires_python: None,

View file

@ -311,6 +311,7 @@ async fn black_mypy_extensions() -> Result<()> {
pep508_rs::Requirement::from_str("mypy-extensions<0.4.4").unwrap(),
)]),
Overrides::default(),
Vec::default(),
vec![],
None,
Exclusions::default(),
@ -351,6 +352,7 @@ async fn black_mypy_extensions_extra() -> Result<()> {
pep508_rs::Requirement::from_str("mypy-extensions[extra]<0.4.4").unwrap(),
)]),
Overrides::default(),
Vec::default(),
vec![],
None,
Exclusions::default(),
@ -391,6 +393,7 @@ async fn black_flake8() -> Result<()> {
pep508_rs::Requirement::from_str("flake8<1").unwrap(),
)]),
Overrides::default(),
Vec::default(),
vec![],
None,
Exclusions::default(),
@ -485,6 +488,7 @@ async fn black_respect_preference() -> Result<()> {
)?)],
Constraints::default(),
Overrides::default(),
Vec::default(),
vec![Preference::simple(
PackageName::from_str("black")?,
Version::from_str("23.9.0")?,
@ -525,6 +529,7 @@ async fn black_ignore_preference() -> Result<()> {
)?)],
Constraints::default(),
Overrides::default(),
Vec::default(),
vec![Preference::simple(
PackageName::from_str("black")?,
Version::from_str("23.9.2")?,

View file

@ -1665,6 +1665,13 @@ pub(crate) struct RunArgs {
#[arg(long, overrides_with("all_extras"), hide = true)]
pub(crate) no_all_extras: bool,
/// Include development dependencies.
#[arg(long, overrides_with("no_dev"))]
pub(crate) dev: bool,
#[arg(long, conflicts_with("offline"), overrides_with("dev"), hide = true)]
pub(crate) no_dev: bool,
/// The command to run.
pub(crate) target: Option<String>,
@ -1746,6 +1753,13 @@ pub(crate) struct SyncArgs {
#[arg(long, overrides_with("all_extras"), hide = true)]
pub(crate) no_all_extras: bool,
/// Include development dependencies.
#[arg(long, overrides_with("no_dev"))]
pub(crate) dev: bool,
#[arg(long, conflicts_with("offline"), overrides_with("dev"), hide = true)]
pub(crate) no_dev: bool,
/// Refresh all cached data.
#[arg(long, conflicts_with("offline"), overrides_with("no_refresh"))]
pub(crate) refresh: bool,

View file

@ -463,6 +463,9 @@ pub(crate) async fn pip_compile(
let constraints = Constraints::from_requirements(constraints);
let overrides = Overrides::from_requirements(overrides);
// Ignore development dependencies.
let dev = Vec::default();
// Determine any lookahead requirements.
let lookaheads = match dependency_mode {
DependencyMode::Transitive => {
@ -486,6 +489,7 @@ pub(crate) async fn pip_compile(
requirements,
constraints,
overrides,
dev,
preferences,
project,
// Do not consider any installed packages during resolution.

View file

@ -262,6 +262,9 @@ pub(crate) async fn pip_install(
let preferences = Vec::default();
let git = GitResolver::default();
// Ignore development dependencies.
let dev = Vec::default();
// Incorporate any index locations from the provided sources.
let index_locations =
index_locations.combine(index_url, extra_index_urls, find_links, no_index);
@ -340,6 +343,7 @@ pub(crate) async fn pip_install(
requirements,
constraints,
overrides,
dev,
source_trees,
project,
extras,

View file

@ -30,7 +30,7 @@ use uv_distribution::DistributionDatabase;
use uv_fs::Simplified;
use uv_installer::{Downloader, Plan, Planner, SitePackages};
use uv_interpreter::{Interpreter, PythonEnvironment};
use uv_normalize::PackageName;
use uv_normalize::{GroupName, PackageName};
use uv_requirements::{
LookaheadResolver, NamedRequirementsResolver, RequirementsSource, RequirementsSpecification,
SourceTreeResolver,
@ -79,6 +79,7 @@ pub(crate) async fn resolve<InstalledPackages: InstalledPackagesProvider>(
requirements: Vec<UnresolvedRequirementSpecification>,
constraints: Vec<Requirement>,
overrides: Vec<UnresolvedRequirementSpecification>,
dev: Vec<GroupName>,
source_trees: Vec<PathBuf>,
mut project: Option<PackageName>,
extras: &ExtrasSpecification,
@ -216,6 +217,7 @@ pub(crate) async fn resolve<InstalledPackages: InstalledPackagesProvider>(
requirements,
constraints,
overrides,
dev,
preferences,
project,
exclusions,

View file

@ -256,6 +256,9 @@ pub(crate) async fn pip_sync(
let preferences = Vec::default();
let git = GitResolver::default();
// Ignore development dependencies.
let dev = Vec::default();
// Create a build dispatch for resolution.
let resolve_dispatch = BuildDispatch::new(
&client,
@ -292,6 +295,7 @@ pub(crate) async fn pip_sync(
requirements,
constraints,
overrides,
dev,
source_trees,
project,
&extras,

View file

@ -12,7 +12,7 @@ use uv_configuration::{
SetupPyStrategy, Upgrade,
};
use uv_dispatch::BuildDispatch;
use uv_distribution::ProjectWorkspace;
use uv_distribution::{ProjectWorkspace, DEV_DEPENDENCIES};
use uv_git::GitResolver;
use uv_interpreter::PythonEnvironment;
use uv_requirements::upgrade::{read_lockfile, LockedRequirements};
@ -90,6 +90,8 @@ pub(super) async fn do_lock(
.collect::<Vec<_>>();
let constraints = vec![];
let overrides = vec![];
let dev = vec![DEV_DEPENDENCIES.clone()];
let source_trees = vec![];
let project_name = project.project_name().clone();
@ -171,6 +173,7 @@ pub(super) async fn do_lock(
requirements,
constraints,
overrides,
dev,
source_trees,
Some(project_name),
&extras,

View file

@ -175,6 +175,7 @@ pub(crate) async fn update_environment(
let extras = ExtrasSpecification::default();
let flat_index = FlatIndex::default();
let git = GitResolver::default();
let dev = Vec::default();
let hasher = HashStrategy::default();
let in_flight = InFlight::default();
let index = InMemoryIndex::default();
@ -212,6 +213,7 @@ pub(crate) async fn update_environment(
spec.requirements,
spec.constraints,
spec.overrides,
dev,
spec.source_trees,
spec.project,
&extras,

View file

@ -26,6 +26,7 @@ use crate::printer::Printer;
pub(crate) async fn run(
index_locations: IndexLocations,
extras: ExtrasSpecification,
dev: bool,
target: Option<String>,
mut args: Vec<OsString>,
requirements: Vec<RequirementsSource>,
@ -80,6 +81,7 @@ pub(crate) async fn run(
&lock,
&index_locations,
extras,
dev,
preview,
cache,
printer,

View file

@ -9,7 +9,7 @@ use uv_configuration::{
SetupPyStrategy,
};
use uv_dispatch::BuildDispatch;
use uv_distribution::ProjectWorkspace;
use uv_distribution::{ProjectWorkspace, DEV_DEPENDENCIES};
use uv_git::GitResolver;
use uv_installer::SitePackages;
use uv_interpreter::PythonEnvironment;
@ -27,6 +27,7 @@ use crate::printer::Printer;
pub(crate) async fn sync(
index_locations: IndexLocations,
extras: ExtrasSpecification,
dev: bool,
preview: PreviewMode,
cache: &Cache,
printer: Printer,
@ -55,6 +56,7 @@ pub(crate) async fn sync(
&lock,
&index_locations,
extras,
dev,
preview,
cache,
printer,
@ -72,6 +74,7 @@ pub(super) async fn do_sync(
lock: &Lock,
index_locations: &IndexLocations,
extras: ExtrasSpecification,
dev: bool,
preview: PreviewMode,
cache: &Cache,
printer: Printer,
@ -86,11 +89,18 @@ pub(super) async fn do_sync(
}
}
// Include development dependencies, if requested.
let dev = if dev {
vec![DEV_DEPENDENCIES.clone()]
} else {
vec![]
};
let markers = venv.interpreter().markers();
let tags = venv.interpreter().tags()?;
// Read the lockfile.
let resolution = lock.to_resolution(markers, tags, project.project_name(), &extras);
let resolution = lock.to_resolution(markers, tags, project.project_name(), &extras, &dev);
// Initialize the registry client.
// TODO(zanieb): Support client options e.g. offline, tls, etc.

View file

@ -582,6 +582,7 @@ async fn run() -> Result<ExitStatus> {
commands::run(
args.index_locations,
args.extras,
args.dev,
args.target,
args.args,
requirements,
@ -607,6 +608,7 @@ async fn run() -> Result<ExitStatus> {
commands::sync(
args.index_locations,
args.extras,
args.dev,
globals.preview,
&cache,
printer,

View file

@ -113,6 +113,7 @@ impl CacheSettings {
pub(crate) struct RunSettings {
pub(crate) index_locations: IndexLocations,
pub(crate) extras: ExtrasSpecification,
pub(crate) dev: bool,
pub(crate) target: Option<String>,
pub(crate) args: Vec<OsString>,
pub(crate) with: Vec<String>,
@ -131,6 +132,8 @@ impl RunSettings {
extra,
all_extras,
no_all_extras,
dev,
no_dev,
target,
args,
with,
@ -140,7 +143,6 @@ impl RunSettings {
upgrade,
no_upgrade,
upgrade_package,
index_args,
python,
exclude_newer,
@ -168,6 +170,7 @@ impl RunSettings {
flag(all_extras, no_all_extras).unwrap_or_default(),
extra.unwrap_or_default(),
),
dev: flag(dev, no_dev).unwrap_or(false),
target,
args,
with,
@ -234,6 +237,7 @@ pub(crate) struct SyncSettings {
pub(crate) index_locations: IndexLocations,
pub(crate) refresh: Refresh,
pub(crate) extras: ExtrasSpecification,
pub(crate) dev: bool,
pub(crate) python: Option<String>,
}
@ -245,6 +249,8 @@ impl SyncSettings {
extra,
all_extras,
no_all_extras,
dev,
no_dev,
refresh,
no_refresh,
refresh_package,
@ -272,6 +278,7 @@ impl SyncSettings {
flag(all_extras, no_all_extras).unwrap_or_default(),
extra.unwrap_or_default(),
),
dev: flag(dev, no_dev).unwrap_or(false),
python,
}
}

View file

@ -1483,3 +1483,107 @@ fn lock_requires_python() -> Result<()> {
Ok(())
}
/// Lock the development dependencies for a project.
#[test]
fn lock_dev() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"
[project]
name = "project"
version = "0.1.0"
requires-python = ">=3.12"
dependencies = ["iniconfig"]
[tool.uv]
dev-dependencies = ["typing-extensions"]
"#,
)?;
uv_snapshot!(context.filters(), context.lock(), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
warning: `uv lock` is experimental and may change without warning.
Resolved 3 packages in [TIME]
"###);
let lock = fs_err::read_to_string(context.temp_dir.join("uv.lock"))?;
insta::with_settings!({
filters => context.filters(),
}, {
assert_snapshot!(
lock, @r###"
version = 1
requires-python = ">=3.12"
[[distribution]]
name = "iniconfig"
version = "2.0.0"
source = "registry+https://pypi.org/simple"
sdist = { url = "https://files.pythonhosted.org/packages/d7/4b/cbd8e699e64a6f16ca3a8220661b5f83792b3017d0f79807cb8708d33913/iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3", size = 4646 }
wheels = [{ url = "https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374", size = 5892 }]
[[distribution]]
name = "project"
version = "0.1.0"
source = "editable+file://[TEMP_DIR]/"
sdist = { url = "file://[TEMP_DIR]/" }
[[distribution.dependencies]]
name = "iniconfig"
version = "2.0.0"
source = "registry+https://pypi.org/simple"
[distribution.dev-dependencies]
[[distribution.dev-dependencies.dev]]
name = "typing-extensions"
version = "4.10.0"
source = "registry+https://pypi.org/simple"
[[distribution]]
name = "typing-extensions"
version = "4.10.0"
source = "registry+https://pypi.org/simple"
sdist = { url = "https://files.pythonhosted.org/packages/16/3a/0d26ce356c7465a19c9ea8814b960f8a36c3b0d07c323176620b7b483e44/typing_extensions-4.10.0.tar.gz", hash = "sha256:b0abd7c89e8fb96f98db18d86106ff1d90ab692004eb746cf6eda2682f91b3cb", size = 77558 }
wheels = [{ url = "https://files.pythonhosted.org/packages/f9/de/dc04a3ea60b22624b51c703a84bbe0184abcd1d0b9bc8074b5d6b7ab90bb/typing_extensions-4.10.0-py3-none-any.whl", hash = "sha256:69b1a937c3a517342112fb4c6df7e72fc39a38e7891a5730ed4985b5214b5475", size = 33926 }]
"###
);
});
// Install from the lockfile, excluding development dependencies.
uv_snapshot!(context.filters(), context.sync(), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
warning: `uv sync` is experimental and may change without warning.
Downloaded 2 packages in [TIME]
Installed 2 packages in [TIME]
+ iniconfig==2.0.0
+ project==0.1.0 (from file://[TEMP_DIR]/)
"###);
// Install from the lockfile, including development dependencies.
uv_snapshot!(context.filters(), context.sync().arg("--dev"), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
warning: `uv sync` is experimental and may change without warning.
Downloaded 1 package in [TIME]
Installed 1 package in [TIME]
+ typing-extensions==4.10.0
"###);
Ok(())
}

View file

@ -436,7 +436,7 @@ fn compile_constraint_extra() -> Result<()> {
Ok(())
}
/// Resolve a package from an optional dependency group in a `pyproject.toml` file.
/// Resolve a package from an optional extra in a `pyproject.toml` file.
#[test]
fn compile_pyproject_toml_extra() -> Result<()> {
let context = TestContext::new("3.12");
@ -522,7 +522,7 @@ optional-dependencies."FrIeNdLy-._.-bArD" = [
Ok(())
}
/// Request an extra that does not exist as a dependency group in a `pyproject.toml` file.
/// Request an extra that does not exist in a `pyproject.toml` file.
#[test]
fn compile_pyproject_toml_extra_missing() -> Result<()> {
let context = TestContext::new("3.12");
@ -823,7 +823,7 @@ dependencies = [
Ok(())
}
/// Request multiple extras that do not exist as a dependency group in a `pyproject.toml` file.
/// Request multiple extras that do not exist in a `pyproject.toml` file.
#[test]
fn compile_pyproject_toml_extras_missing() -> Result<()> {
let context = TestContext::new("3.12");
@ -2200,7 +2200,7 @@ fn requirement_override_prerelease() -> Result<()> {
Ok(())
}
/// Resolve packages from all optional dependency groups in a `pyproject.toml` file.
/// Resolve packages from all extras in a `pyproject.toml` file.
#[test]
fn compile_pyproject_toml_all_extras() -> Result<()> {
let context = TestContext::new("3.12");
@ -2303,7 +2303,7 @@ optional-dependencies.bar = [
Ok(())
}
/// Resolve packages from all optional dependency groups in a `pyproject.toml` file.
/// Resolve packages from all extras in a `pyproject.toml` file.
#[test]
fn compile_does_not_allow_both_extra_and_all_extras() -> Result<()> {
let context = TestContext::new("3.12");

12
uv.schema.json generated
View file

@ -10,6 +10,16 @@
"null"
]
},
"dev-dependencies": {
"description": "PEP 508-style requirements, e.g., `flask==3.0.0`, or `black @ https://...`.",
"type": [
"array",
"null"
],
"items": {
"type": "string"
}
},
"native-tls": {
"type": [
"boolean",
@ -140,7 +150,7 @@
"pattern": "^\\d{4}-\\d{2}-\\d{2}(T\\d{2}:\\d{2}:\\d{2}(Z|[+-]\\d{2}:\\d{2}))?$"
},
"ExtraName": {
"description": "The normalized name of an extra dependency group.\n\nConverts the name to lowercase and collapses runs of `-`, `_`, and `.` down to a single `-`. For example, `---`, `.`, and `__` are all converted to a single `-`.\n\nSee: - <https://peps.python.org/pep-0685/#specification/> - <https://packaging.python.org/en/latest/specifications/name-normalization/>",
"description": "The normalized name of an extra dependency.\n\nConverts the name to lowercase and collapses runs of `-`, `_`, and `.` down to a single `-`. For example, `---`, `.`, and `__` are all converted to a single `-`.\n\nSee: - <https://peps.python.org/pep-0685/#specification/> - <https://packaging.python.org/en/latest/specifications/name-normalization/>",
"type": "string"
},
"FlatIndexLocation": {