mirror of
https://github.com/astral-sh/uv.git
synced 2025-08-03 10:33:49 +00:00
Respect sources in overrides and constraints (#9455)
## Summary We still only respect overrides and constraints in the workspace root -- which we may want to change -- but overrides and constraints are now correctly lowered. Closes https://github.com/astral-sh/uv/issues/8148.
This commit is contained in:
parent
8c8a1f071c
commit
7169b2c427
11 changed files with 413 additions and 117 deletions
|
@ -465,13 +465,12 @@ impl SourceBuild {
|
|||
.or(package_name)
|
||||
{
|
||||
let build_requires = uv_pypi_types::BuildRequires {
|
||||
name: name.clone(),
|
||||
name: Some(name.clone()),
|
||||
requires_dist: build_system.requires,
|
||||
};
|
||||
let build_requires = BuildRequires::from_project_maybe_workspace(
|
||||
build_requires,
|
||||
install_path,
|
||||
None,
|
||||
locations,
|
||||
source_strategy,
|
||||
LowerBound::Allow,
|
||||
|
@ -905,25 +904,20 @@ async fn create_pep517_build_environment(
|
|||
// If necessary, lower the requirements.
|
||||
let extra_requires = match source_strategy {
|
||||
SourceStrategy::Enabled => {
|
||||
if let Some(package_name) = package_name {
|
||||
let build_requires = uv_pypi_types::BuildRequires {
|
||||
name: package_name.clone(),
|
||||
requires_dist: extra_requires,
|
||||
};
|
||||
let build_requires = BuildRequires::from_project_maybe_workspace(
|
||||
build_requires,
|
||||
install_path,
|
||||
None,
|
||||
locations,
|
||||
source_strategy,
|
||||
LowerBound::Allow,
|
||||
)
|
||||
.await
|
||||
.map_err(Error::Lowering)?;
|
||||
build_requires.requires_dist
|
||||
} else {
|
||||
extra_requires.into_iter().map(Requirement::from).collect()
|
||||
}
|
||||
let build_requires = uv_pypi_types::BuildRequires {
|
||||
name: package_name.cloned(),
|
||||
requires_dist: extra_requires,
|
||||
};
|
||||
let build_requires = BuildRequires::from_project_maybe_workspace(
|
||||
build_requires,
|
||||
install_path,
|
||||
locations,
|
||||
source_strategy,
|
||||
LowerBound::Allow,
|
||||
)
|
||||
.await
|
||||
.map_err(Error::Lowering)?;
|
||||
build_requires.requires_dist
|
||||
}
|
||||
SourceStrategy::Disabled => extra_requires.into_iter().map(Requirement::from).collect(),
|
||||
};
|
||||
|
|
|
@ -5,14 +5,14 @@ use uv_configuration::{LowerBound, SourceStrategy};
|
|||
use uv_distribution_types::IndexLocations;
|
||||
use uv_normalize::PackageName;
|
||||
use uv_workspace::pyproject::ToolUvSources;
|
||||
use uv_workspace::{DiscoveryOptions, ProjectWorkspace};
|
||||
use uv_workspace::{DiscoveryOptions, ProjectWorkspace, Workspace};
|
||||
|
||||
use crate::metadata::{GitWorkspaceMember, LoweredRequirement, MetadataError};
|
||||
use crate::metadata::{LoweredRequirement, MetadataError};
|
||||
|
||||
/// Lowered requirements from a `[build-system.requires]` field in a `pyproject.toml` file.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct BuildRequires {
|
||||
pub name: PackageName,
|
||||
pub name: Option<PackageName>,
|
||||
pub requires_dist: Vec<uv_pypi_types::Requirement>,
|
||||
}
|
||||
|
||||
|
@ -35,27 +35,14 @@ impl BuildRequires {
|
|||
pub async fn from_project_maybe_workspace(
|
||||
metadata: uv_pypi_types::BuildRequires,
|
||||
install_path: &Path,
|
||||
git_member: Option<&GitWorkspaceMember<'_>>,
|
||||
locations: &IndexLocations,
|
||||
sources: SourceStrategy,
|
||||
lower_bound: LowerBound,
|
||||
) -> Result<Self, MetadataError> {
|
||||
// TODO(konsti): Cache workspace discovery.
|
||||
let discovery_options = if let Some(git_member) = &git_member {
|
||||
DiscoveryOptions {
|
||||
stop_discovery_at: Some(
|
||||
git_member
|
||||
.fetch_root
|
||||
.parent()
|
||||
.expect("git checkout has a parent"),
|
||||
),
|
||||
..Default::default()
|
||||
}
|
||||
} else {
|
||||
DiscoveryOptions::default()
|
||||
};
|
||||
let Some(project_workspace) =
|
||||
ProjectWorkspace::from_maybe_project_root(install_path, &discovery_options).await?
|
||||
ProjectWorkspace::from_maybe_project_root(install_path, &DiscoveryOptions::default())
|
||||
.await?
|
||||
else {
|
||||
return Ok(Self::from_metadata23(metadata));
|
||||
};
|
||||
|
@ -63,7 +50,6 @@ impl BuildRequires {
|
|||
Self::from_project_workspace(
|
||||
metadata,
|
||||
&project_workspace,
|
||||
git_member,
|
||||
locations,
|
||||
sources,
|
||||
lower_bound,
|
||||
|
@ -71,10 +57,9 @@ impl BuildRequires {
|
|||
}
|
||||
|
||||
/// Lower the `build-system.requires` field from a `pyproject.toml` file.
|
||||
fn from_project_workspace(
|
||||
pub fn from_project_workspace(
|
||||
metadata: uv_pypi_types::BuildRequires,
|
||||
project_workspace: &ProjectWorkspace,
|
||||
git_member: Option<&GitWorkspaceMember<'_>>,
|
||||
locations: &IndexLocations,
|
||||
source_strategy: SourceStrategy,
|
||||
lower_bound: LowerBound,
|
||||
|
@ -118,7 +103,7 @@ impl BuildRequires {
|
|||
let group = None;
|
||||
LoweredRequirement::from_requirement(
|
||||
requirement,
|
||||
&metadata.name,
|
||||
metadata.name.as_ref(),
|
||||
project_workspace.project_root(),
|
||||
project_sources,
|
||||
project_indexes,
|
||||
|
@ -127,7 +112,84 @@ impl BuildRequires {
|
|||
locations,
|
||||
project_workspace.workspace(),
|
||||
lower_bound,
|
||||
git_member,
|
||||
None,
|
||||
)
|
||||
.map(move |requirement| match requirement {
|
||||
Ok(requirement) => Ok(requirement.into_inner()),
|
||||
Err(err) => Err(MetadataError::LoweringError(
|
||||
requirement_name.clone(),
|
||||
Box::new(err),
|
||||
)),
|
||||
})
|
||||
})
|
||||
.collect::<Result<Vec<_>, _>>()?,
|
||||
SourceStrategy::Disabled => requires_dist
|
||||
.into_iter()
|
||||
.map(uv_pypi_types::Requirement::from)
|
||||
.collect(),
|
||||
};
|
||||
|
||||
Ok(Self {
|
||||
name: metadata.name,
|
||||
requires_dist,
|
||||
})
|
||||
}
|
||||
|
||||
/// Lower the `build-system.requires` field from a `pyproject.toml` file.
|
||||
pub fn from_workspace(
|
||||
metadata: uv_pypi_types::BuildRequires,
|
||||
workspace: &Workspace,
|
||||
locations: &IndexLocations,
|
||||
source_strategy: SourceStrategy,
|
||||
lower_bound: LowerBound,
|
||||
) -> Result<Self, MetadataError> {
|
||||
// Collect any `tool.uv.index` entries.
|
||||
let empty = vec![];
|
||||
let project_indexes = match source_strategy {
|
||||
SourceStrategy::Enabled => workspace
|
||||
.pyproject_toml()
|
||||
.tool
|
||||
.as_ref()
|
||||
.and_then(|tool| tool.uv.as_ref())
|
||||
.and_then(|uv| uv.index.as_deref())
|
||||
.unwrap_or(&empty),
|
||||
SourceStrategy::Disabled => &empty,
|
||||
};
|
||||
|
||||
// Collect any `tool.uv.sources` and `tool.uv.dev_dependencies` from `pyproject.toml`.
|
||||
let empty = BTreeMap::default();
|
||||
let project_sources = match source_strategy {
|
||||
SourceStrategy::Enabled => workspace
|
||||
.pyproject_toml()
|
||||
.tool
|
||||
.as_ref()
|
||||
.and_then(|tool| tool.uv.as_ref())
|
||||
.and_then(|uv| uv.sources.as_ref())
|
||||
.map(ToolUvSources::inner)
|
||||
.unwrap_or(&empty),
|
||||
SourceStrategy::Disabled => &empty,
|
||||
};
|
||||
|
||||
// Lower the requirements.
|
||||
let requires_dist = metadata.requires_dist.into_iter();
|
||||
let requires_dist = match source_strategy {
|
||||
SourceStrategy::Enabled => requires_dist
|
||||
.flat_map(|requirement| {
|
||||
let requirement_name = requirement.name.clone();
|
||||
let extra = requirement.marker.top_level_extra_name();
|
||||
let group = None;
|
||||
LoweredRequirement::from_requirement(
|
||||
requirement,
|
||||
None,
|
||||
workspace.install_path(),
|
||||
project_sources,
|
||||
project_indexes,
|
||||
extra.as_ref(),
|
||||
group,
|
||||
locations,
|
||||
workspace,
|
||||
lower_bound,
|
||||
None,
|
||||
)
|
||||
.map(move |requirement| match requirement {
|
||||
Ok(requirement) => Ok(requirement.into_inner()),
|
||||
|
|
|
@ -37,7 +37,7 @@ impl LoweredRequirement {
|
|||
/// Combine `project.dependencies` or `project.optional-dependencies` with `tool.uv.sources`.
|
||||
pub(crate) fn from_requirement<'data>(
|
||||
requirement: uv_pep508::Requirement<VerbatimParsedUrl>,
|
||||
project_name: &'data PackageName,
|
||||
project_name: Option<&'data PackageName>,
|
||||
project_dir: &'data Path,
|
||||
project_sources: &'data BTreeMap<PackageName, Sources>,
|
||||
project_indexes: &'data [Index],
|
||||
|
@ -89,7 +89,7 @@ impl LoweredRequirement {
|
|||
}))
|
||||
// ... except for recursive self-inclusion (extras that activate other extras), e.g.
|
||||
// `framework[machine_learning]` depends on `framework[cuda]`.
|
||||
|| &requirement.name == project_name;
|
||||
|| project_name.is_some_and(|project_name| *project_name == requirement.name);
|
||||
if !workspace_package_declared {
|
||||
return Either::Left(std::iter::once(Err(
|
||||
LoweringError::UndeclaredWorkspacePackage,
|
||||
|
@ -102,7 +102,7 @@ impl LoweredRequirement {
|
|||
// Support recursive editable inclusions.
|
||||
if has_sources
|
||||
&& requirement.version_or_url.is_none()
|
||||
&& &requirement.name != project_name
|
||||
&& !project_name.is_some_and(|project_name| *project_name == requirement.name)
|
||||
{
|
||||
warn_user_once!(
|
||||
"Missing version constraint (e.g., a lower bound) for `{}`",
|
||||
|
@ -211,11 +211,15 @@ impl LoweredRequirement {
|
|||
index,
|
||||
));
|
||||
};
|
||||
let conflict = if let Some(extra) = extra {
|
||||
Some(ConflictItem::from((project_name.clone(), extra)))
|
||||
} else {
|
||||
group.map(|group| ConflictItem::from((project_name.clone(), group)))
|
||||
};
|
||||
let conflict = project_name.and_then(|project_name| {
|
||||
if let Some(extra) = extra {
|
||||
Some(ConflictItem::from((project_name.clone(), extra)))
|
||||
} else {
|
||||
group.map(|group| {
|
||||
ConflictItem::from((project_name.clone(), group))
|
||||
})
|
||||
}
|
||||
});
|
||||
let source = registry_source(
|
||||
&requirement,
|
||||
index.into_url(),
|
||||
|
|
|
@ -1,8 +1,6 @@
|
|||
use std::collections::BTreeMap;
|
||||
use std::path::Path;
|
||||
|
||||
use crate::metadata::{GitWorkspaceMember, LoweredRequirement, MetadataError};
|
||||
use crate::Metadata;
|
||||
use uv_configuration::{LowerBound, SourceStrategy};
|
||||
use uv_distribution_types::IndexLocations;
|
||||
use uv_normalize::{ExtraName, GroupName, PackageName, DEV_DEPENDENCIES};
|
||||
|
@ -10,6 +8,9 @@ use uv_workspace::dependency_groups::FlatDependencyGroups;
|
|||
use uv_workspace::pyproject::{Sources, ToolUvSources};
|
||||
use uv_workspace::{DiscoveryOptions, ProjectWorkspace};
|
||||
|
||||
use crate::metadata::{GitWorkspaceMember, LoweredRequirement, MetadataError};
|
||||
use crate::Metadata;
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct RequiresDist {
|
||||
pub name: PackageName,
|
||||
|
@ -164,7 +165,7 @@ impl RequiresDist {
|
|||
let extra = None;
|
||||
LoweredRequirement::from_requirement(
|
||||
requirement,
|
||||
&metadata.name,
|
||||
Some(&metadata.name),
|
||||
project_workspace.project_root(),
|
||||
project_sources,
|
||||
project_indexes,
|
||||
|
@ -209,7 +210,7 @@ impl RequiresDist {
|
|||
let group = None;
|
||||
LoweredRequirement::from_requirement(
|
||||
requirement,
|
||||
&metadata.name,
|
||||
Some(&metadata.name),
|
||||
project_workspace.project_root(),
|
||||
project_sources,
|
||||
project_indexes,
|
||||
|
|
|
@ -8,6 +8,6 @@ use crate::VerbatimParsedUrl;
|
|||
/// See: <https://peps.python.org/pep-0518/>
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct BuildRequires {
|
||||
pub name: PackageName,
|
||||
pub name: Option<PackageName>,
|
||||
pub requires_dist: Vec<Requirement<VerbatimParsedUrl>>,
|
||||
}
|
||||
|
|
|
@ -86,6 +86,15 @@ impl Requirement {
|
|||
let fragment = url.fragment()?;
|
||||
Hashes::parse_fragment(fragment).ok()
|
||||
}
|
||||
|
||||
/// Set the source file containing the requirement.
|
||||
#[must_use]
|
||||
pub fn with_origin(self, origin: RequirementOrigin) -> Self {
|
||||
Self {
|
||||
origin: Some(origin),
|
||||
..self
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Requirement> for uv_pep508::Requirement<VerbatimUrl> {
|
||||
|
|
|
@ -6,12 +6,13 @@ use std::path::{Path, PathBuf};
|
|||
use glob::{glob, GlobError, PatternError};
|
||||
use rustc_hash::FxHashSet;
|
||||
use tracing::{debug, trace, warn};
|
||||
|
||||
use uv_distribution_types::Index;
|
||||
use uv_fs::{Simplified, CWD};
|
||||
use uv_normalize::{GroupName, PackageName, DEV_DEPENDENCIES};
|
||||
use uv_pep508::{MarkerTree, RequirementOrigin, VerbatimUrl};
|
||||
use uv_pypi_types::{Conflicts, Requirement, RequirementSource, SupportedEnvironments};
|
||||
use uv_pep508::{MarkerTree, VerbatimUrl};
|
||||
use uv_pypi_types::{
|
||||
Conflicts, Requirement, RequirementSource, SupportedEnvironments, VerbatimParsedUrl,
|
||||
};
|
||||
use uv_static::EnvVars;
|
||||
use uv_warnings::warn_user_once;
|
||||
|
||||
|
@ -307,6 +308,24 @@ impl Workspace {
|
|||
})
|
||||
}
|
||||
|
||||
/// Returns the set of supported environments for the workspace.
|
||||
pub fn environments(&self) -> Option<&SupportedEnvironments> {
|
||||
self.pyproject_toml
|
||||
.tool
|
||||
.as_ref()
|
||||
.and_then(|tool| tool.uv.as_ref())
|
||||
.and_then(|uv| uv.environments.as_ref())
|
||||
}
|
||||
|
||||
/// Returns the set of conflicts for the workspace.
|
||||
pub fn conflicts(&self) -> Conflicts {
|
||||
let mut conflicting = Conflicts::empty();
|
||||
for member in self.packages.values() {
|
||||
conflicting.append(&mut member.pyproject_toml.conflicts());
|
||||
}
|
||||
conflicting
|
||||
}
|
||||
|
||||
/// Returns any requirements that are exclusive to the workspace root, i.e., not included in
|
||||
/// any of the workspace members.
|
||||
///
|
||||
|
@ -314,7 +333,9 @@ impl Workspace {
|
|||
/// `pyproject.toml`.
|
||||
///
|
||||
/// Otherwise, returns an empty list.
|
||||
pub fn non_project_requirements(&self) -> Result<Vec<Requirement>, DependencyGroupError> {
|
||||
pub fn non_project_requirements(
|
||||
&self,
|
||||
) -> Result<Vec<uv_pep508::Requirement<VerbatimParsedUrl>>, DependencyGroupError> {
|
||||
if self
|
||||
.packages
|
||||
.values()
|
||||
|
@ -350,16 +371,7 @@ impl Workspace {
|
|||
let dev_dependencies = dependency_groups
|
||||
.into_iter()
|
||||
.flat_map(|(_, requirements)| requirements)
|
||||
.map(|requirement| {
|
||||
Requirement::from(requirement.with_origin(RequirementOrigin::Workspace))
|
||||
})
|
||||
.chain(dev_dependencies.into_iter().flatten().map(|requirement| {
|
||||
Requirement::from(
|
||||
requirement
|
||||
.clone()
|
||||
.with_origin(RequirementOrigin::Workspace),
|
||||
)
|
||||
}))
|
||||
.chain(dev_dependencies.into_iter().flatten().cloned())
|
||||
.collect();
|
||||
|
||||
Ok(dev_dependencies)
|
||||
|
@ -367,7 +379,7 @@ impl Workspace {
|
|||
}
|
||||
|
||||
/// Returns the set of overrides for the workspace.
|
||||
pub fn overrides(&self) -> Vec<Requirement> {
|
||||
pub fn overrides(&self) -> Vec<uv_pep508::Requirement<VerbatimParsedUrl>> {
|
||||
let Some(overrides) = self
|
||||
.pyproject_toml
|
||||
.tool
|
||||
|
@ -377,39 +389,11 @@ impl Workspace {
|
|||
else {
|
||||
return vec![];
|
||||
};
|
||||
|
||||
overrides
|
||||
.iter()
|
||||
.map(|requirement| {
|
||||
Requirement::from(
|
||||
requirement
|
||||
.clone()
|
||||
.with_origin(RequirementOrigin::Workspace),
|
||||
)
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Returns the set of supported environments for the workspace.
|
||||
pub fn environments(&self) -> Option<&SupportedEnvironments> {
|
||||
self.pyproject_toml
|
||||
.tool
|
||||
.as_ref()
|
||||
.and_then(|tool| tool.uv.as_ref())
|
||||
.and_then(|uv| uv.environments.as_ref())
|
||||
}
|
||||
|
||||
/// Returns the set of conflicts for the workspace.
|
||||
pub fn conflicts(&self) -> Conflicts {
|
||||
let mut conflicting = Conflicts::empty();
|
||||
for member in self.packages.values() {
|
||||
conflicting.append(&mut member.pyproject_toml.conflicts());
|
||||
}
|
||||
conflicting
|
||||
overrides.clone()
|
||||
}
|
||||
|
||||
/// Returns the set of constraints for the workspace.
|
||||
pub fn constraints(&self) -> Vec<Requirement> {
|
||||
pub fn constraints(&self) -> Vec<uv_pep508::Requirement<VerbatimParsedUrl>> {
|
||||
let Some(constraints) = self
|
||||
.pyproject_toml
|
||||
.tool
|
||||
|
@ -419,17 +403,7 @@ impl Workspace {
|
|||
else {
|
||||
return vec![];
|
||||
};
|
||||
|
||||
constraints
|
||||
.iter()
|
||||
.map(|requirement| {
|
||||
Requirement::from(
|
||||
requirement
|
||||
.clone()
|
||||
.with_origin(RequirementOrigin::Workspace),
|
||||
)
|
||||
})
|
||||
.collect()
|
||||
constraints.clone()
|
||||
}
|
||||
|
||||
/// Returns the set of all dependency group names defined in the workspace.
|
||||
|
|
|
@ -11,7 +11,8 @@ use tracing::debug;
|
|||
use uv_cache::Cache;
|
||||
use uv_client::{Connectivity, FlatIndexClient, RegistryClientBuilder};
|
||||
use uv_configuration::{
|
||||
Concurrency, Constraints, ExtrasSpecification, LowerBound, Reinstall, TrustedHost, Upgrade,
|
||||
Concurrency, Constraints, ExtrasSpecification, LowerBound, Reinstall, SourceStrategy,
|
||||
TrustedHost, Upgrade,
|
||||
};
|
||||
use uv_dispatch::BuildDispatch;
|
||||
use uv_distribution::DistributionDatabase;
|
||||
|
@ -22,7 +23,8 @@ use uv_distribution_types::{
|
|||
use uv_git::ResolvedRepositoryReference;
|
||||
use uv_normalize::PackageName;
|
||||
use uv_pep440::Version;
|
||||
use uv_pypi_types::{Requirement, SupportedEnvironments};
|
||||
use uv_pep508::RequirementOrigin;
|
||||
use uv_pypi_types::{Requirement, SupportedEnvironments, VerbatimParsedUrl};
|
||||
use uv_python::{Interpreter, PythonDownloads, PythonEnvironment, PythonPreference, PythonRequest};
|
||||
use uv_requirements::upgrade::{read_lock_requirements, LockedRequirements};
|
||||
use uv_requirements::ExtrasResolver;
|
||||
|
@ -323,11 +325,16 @@ async fn do_lock(
|
|||
|
||||
// Collect the requirements, etc.
|
||||
let requirements = workspace.non_project_requirements()?;
|
||||
let overrides = workspace.overrides().into_iter().collect::<Vec<_>>();
|
||||
let overrides = workspace.overrides();
|
||||
let constraints = workspace.constraints();
|
||||
let dev = workspace.groups().into_iter().cloned().collect::<Vec<_>>();
|
||||
let source_trees = vec![];
|
||||
|
||||
// If necessary, lower the overrides and constraints.
|
||||
let requirements = lower(requirements, workspace, index_locations, sources)?;
|
||||
let overrides = lower(overrides, workspace, index_locations, sources)?;
|
||||
let constraints = lower(constraints, workspace, index_locations, sources)?;
|
||||
|
||||
// Collect the list of members.
|
||||
let members = {
|
||||
let mut members = workspace.packages().keys().cloned().collect::<Vec<_>>();
|
||||
|
@ -1116,3 +1123,36 @@ fn report_upgrades(
|
|||
|
||||
Ok(updated)
|
||||
}
|
||||
|
||||
/// Lower a set of requirements, relative to the workspace root.
|
||||
fn lower(
|
||||
requirements: Vec<uv_pep508::Requirement<VerbatimParsedUrl>>,
|
||||
workspace: &Workspace,
|
||||
locations: &IndexLocations,
|
||||
sources: SourceStrategy,
|
||||
) -> Result<Vec<Requirement>, uv_distribution::MetadataError> {
|
||||
let name = workspace
|
||||
.pyproject_toml()
|
||||
.project
|
||||
.as_ref()
|
||||
.map(|project| project.name.clone());
|
||||
|
||||
// We model these as `build-requires`, since, like build requirements, it doesn't define extras
|
||||
// or dependency groups.
|
||||
let metadata = uv_distribution::BuildRequires::from_workspace(
|
||||
uv_pypi_types::BuildRequires {
|
||||
name,
|
||||
requires_dist: requirements,
|
||||
},
|
||||
workspace,
|
||||
locations,
|
||||
sources,
|
||||
LowerBound::Warn,
|
||||
)?;
|
||||
|
||||
Ok(metadata
|
||||
.requires_dist
|
||||
.into_iter()
|
||||
.map(|requirement| requirement.with_origin(RequirementOrigin::Workspace))
|
||||
.collect::<Vec<_>>())
|
||||
}
|
||||
|
|
|
@ -196,6 +196,9 @@ pub(crate) enum ProjectError {
|
|||
#[error(transparent)]
|
||||
Requirements(#[from] uv_requirements::Error),
|
||||
|
||||
#[error(transparent)]
|
||||
Metadata(#[from] uv_distribution::MetadataError),
|
||||
|
||||
#[error(transparent)]
|
||||
PyprojectMut(#[from] uv_workspace::pyproject_mut::Error),
|
||||
|
||||
|
|
|
@ -1369,6 +1369,7 @@ fn lock_project_extra() -> Result<()> {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
/// Lock a project with `uv.tool.override-dependencies`.
|
||||
#[test]
|
||||
fn lock_project_with_overrides() -> Result<()> {
|
||||
let context = TestContext::new("3.12");
|
||||
|
@ -1432,6 +1433,69 @@ fn lock_project_with_overrides() -> Result<()> {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
/// Lock a project with `uv.tool.override-dependencies` that reference `tool.uv.sources`.
|
||||
#[test]
|
||||
fn lock_project_with_override_sources() -> Result<()> {
|
||||
let context = TestContext::new("3.12");
|
||||
|
||||
let pyproject_toml = context.temp_dir.child("pyproject.toml");
|
||||
pyproject_toml.write_str(
|
||||
r#"
|
||||
[project]
|
||||
name = "project"
|
||||
version = "0.1.0"
|
||||
requires-python = ">=3.12"
|
||||
dependencies = ["anyio==3.7.0"]
|
||||
|
||||
[build-system]
|
||||
requires = ["setuptools>=42"]
|
||||
build-backend = "setuptools.build_meta"
|
||||
|
||||
[tool.uv]
|
||||
override-dependencies = ["idna==3.2"]
|
||||
|
||||
[tool.uv.sources]
|
||||
idna = { url = "https://files.pythonhosted.org/packages/d7/77/ff688d1504cdc4db2a938e2b7b9adee5dd52e34efbd2431051efc9984de9/idna-3.2-py3-none-any.whl" }
|
||||
"#,
|
||||
)?;
|
||||
|
||||
uv_snapshot!(context.filters(), context.lock(), @r###"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
Resolved 4 packages in [TIME]
|
||||
"###);
|
||||
|
||||
// Re-run with `--locked`.
|
||||
uv_snapshot!(context.filters(), context.lock().arg("--locked"), @r###"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
Resolved 4 packages in [TIME]
|
||||
"###);
|
||||
|
||||
// Install the base dependencies from the lockfile.
|
||||
uv_snapshot!(context.filters(), context.sync().arg("--frozen"), @r###"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
Prepared 3 packages in [TIME]
|
||||
Installed 4 packages in [TIME]
|
||||
+ anyio==3.7.0
|
||||
+ idna==3.2 (from https://files.pythonhosted.org/packages/d7/77/ff688d1504cdc4db2a938e2b7b9adee5dd52e34efbd2431051efc9984de9/idna-3.2-py3-none-any.whl)
|
||||
+ project==0.1.0 (from file://[TEMP_DIR]/)
|
||||
+ sniffio==1.3.1
|
||||
"###);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Lock a project with `uv.tool.constraint-dependencies`.
|
||||
#[test]
|
||||
fn lock_project_with_constraints() -> Result<()> {
|
||||
|
@ -1492,6 +1556,69 @@ fn lock_project_with_constraints() -> Result<()> {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
/// Lock a project with `uv.tool.constraint-dependencies` that reference `tool.uv.sources`.
|
||||
#[test]
|
||||
fn lock_project_with_constraint_sources() -> Result<()> {
|
||||
let context = TestContext::new("3.12");
|
||||
|
||||
let pyproject_toml = context.temp_dir.child("pyproject.toml");
|
||||
pyproject_toml.write_str(
|
||||
r#"
|
||||
[project]
|
||||
name = "project"
|
||||
version = "0.1.0"
|
||||
requires-python = ">=3.12"
|
||||
dependencies = ["anyio==3.7.0"]
|
||||
|
||||
[build-system]
|
||||
requires = ["setuptools>=42"]
|
||||
build-backend = "setuptools.build_meta"
|
||||
|
||||
[tool.uv]
|
||||
constraint-dependencies = ["idna<3.4"]
|
||||
|
||||
[tool.uv.sources]
|
||||
idna = { url = "https://files.pythonhosted.org/packages/d7/77/ff688d1504cdc4db2a938e2b7b9adee5dd52e34efbd2431051efc9984de9/idna-3.2-py3-none-any.whl" }
|
||||
"#,
|
||||
)?;
|
||||
|
||||
uv_snapshot!(context.filters(), context.lock(), @r###"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
Resolved 4 packages in [TIME]
|
||||
"###);
|
||||
|
||||
// Re-run with `--locked`.
|
||||
uv_snapshot!(context.filters(), context.lock().arg("--locked"), @r###"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
Resolved 4 packages in [TIME]
|
||||
"###);
|
||||
|
||||
// Install the base dependencies from the lockfile.
|
||||
uv_snapshot!(context.filters(), context.sync().arg("--frozen"), @r###"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
Prepared 3 packages in [TIME]
|
||||
Installed 4 packages in [TIME]
|
||||
+ anyio==3.7.0
|
||||
+ idna==3.2 (from https://files.pythonhosted.org/packages/d7/77/ff688d1504cdc4db2a938e2b7b9adee5dd52e34efbd2431051efc9984de9/idna-3.2-py3-none-any.whl)
|
||||
+ project==0.1.0 (from file://[TEMP_DIR]/)
|
||||
+ sniffio==1.3.1
|
||||
"###);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Lock a project with a dependency that has an extra.
|
||||
#[test]
|
||||
fn lock_dependency_extra() -> Result<()> {
|
||||
|
@ -14580,6 +14707,88 @@ fn lock_non_project_group() -> Result<()> {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
/// Lock a (legacy) non-project workspace root with `tool.uv.sources`.
|
||||
#[test]
|
||||
fn lock_non_project_sources() -> Result<()> {
|
||||
let context = TestContext::new("3.12");
|
||||
|
||||
let pyproject_toml = context.temp_dir.child("pyproject.toml");
|
||||
pyproject_toml.write_str(
|
||||
r#"
|
||||
[tool.uv.workspace]
|
||||
members = []
|
||||
|
||||
[tool.uv]
|
||||
dev-dependencies = ["idna"]
|
||||
|
||||
[tool.uv.sources]
|
||||
idna = { url = "https://files.pythonhosted.org/packages/d7/77/ff688d1504cdc4db2a938e2b7b9adee5dd52e34efbd2431051efc9984de9/idna-3.2-py3-none-any.whl" }
|
||||
"#,
|
||||
)?;
|
||||
|
||||
uv_snapshot!(context.filters(), context.lock(), @r###"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
warning: No `requires-python` value found in the workspace. Defaulting to `>=3.12`.
|
||||
Resolved 1 package in [TIME]
|
||||
"###);
|
||||
|
||||
let lock = context.read("uv.lock");
|
||||
|
||||
insta::with_settings!({
|
||||
filters => context.filters(),
|
||||
}, {
|
||||
assert_snapshot!(
|
||||
lock, @r###"
|
||||
version = 1
|
||||
requires-python = ">=3.12"
|
||||
|
||||
[options]
|
||||
exclude-newer = "2024-03-25T00:00:00Z"
|
||||
|
||||
[manifest]
|
||||
requirements = [{ name = "idna", url = "https://files.pythonhosted.org/packages/d7/77/ff688d1504cdc4db2a938e2b7b9adee5dd52e34efbd2431051efc9984de9/idna-3.2-py3-none-any.whl" }]
|
||||
|
||||
[[package]]
|
||||
name = "idna"
|
||||
version = "3.2"
|
||||
source = { url = "https://files.pythonhosted.org/packages/d7/77/ff688d1504cdc4db2a938e2b7b9adee5dd52e34efbd2431051efc9984de9/idna-3.2-py3-none-any.whl" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/d7/77/ff688d1504cdc4db2a938e2b7b9adee5dd52e34efbd2431051efc9984de9/idna-3.2-py3-none-any.whl", hash = "sha256:14475042e284991034cb48e06f6851428fb14c4dc953acd9be9a5e95c7b6dd7a" },
|
||||
]
|
||||
"###
|
||||
);
|
||||
});
|
||||
|
||||
// Re-run with `--locked`.
|
||||
uv_snapshot!(context.filters(), context.lock().arg("--locked"), @r###"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
warning: No `requires-python` value found in the workspace. Defaulting to `>=3.12`.
|
||||
Resolved 1 package in [TIME]
|
||||
"###);
|
||||
|
||||
// Re-run with `--offline`. We shouldn't need a network connection to validate an
|
||||
// already-correct lockfile with immutable metadata.
|
||||
uv_snapshot!(context.filters(), context.lock().arg("--locked").arg("--offline").arg("--no-cache"), @r###"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
warning: No `requires-python` value found in the workspace. Defaulting to `>=3.12`.
|
||||
Resolved 1 package in [TIME]
|
||||
"###);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// `coverage` defines a `toml` extra, but it doesn't enable any dependencies after Python 3.11.
|
||||
#[test]
|
||||
fn lock_dropped_dev_extra() -> Result<()> {
|
||||
|
|
|
@ -1999,7 +1999,7 @@ fn sync_group_legacy_non_project_member() -> Result<()> {
|
|||
"child",
|
||||
]
|
||||
requirements = [
|
||||
{ name = "child" },
|
||||
{ name = "child", editable = "child" },
|
||||
{ name = "typing-extensions", specifier = ">=4" },
|
||||
]
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue