Respect overrides in all direct-dependency iterators (#2742)

## Summary

We iterate over the project "requirements" directly in a variety of
places. However, it's not always the case that an input "requirement" on
its own will _actually_ be part of the resolution, since we support
"overrides".

Historically, then, overrides haven't worked as expected for _direct_
dependencies (and we have some tests that demonstrate the current,
"wrong" behavior). This is just a bug, but it's not really one that
comes up in practice, since it's rare to apply an override to your _own_
dependency.

However, we're now considering expanding the lookahead concept to
include local transitive dependencies. In this case, it's more and more
important that overrides and constraints are handled consistently.

This PR modifies all the locations in which we iterate over requirements
directly, and modifies them to respect overrides (and constraints, where
necessary).
This commit is contained in:
Charlie Marsh 2024-03-31 14:03:49 -04:00 committed by GitHub
parent f65f013066
commit c669542a9e
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
9 changed files with 345 additions and 191 deletions

View file

@ -5,11 +5,12 @@ use anyhow::Result;
use futures::stream::FuturesUnordered;
use futures::StreamExt;
use distribution_types::{BuildableSource, Dist};
use distribution_types::{BuildableSource, Dist, LocalEditable};
use pep508_rs::{MarkerEnvironment, Requirement, VersionOrUrl};
use pypi_types::Metadata23;
use uv_client::RegistryClient;
use uv_distribution::{Reporter, SourceDistCachedBuilder};
use uv_types::{BuildContext, RequestedRequirements};
use uv_types::{BuildContext, Constraints, Overrides, RequestedRequirements};
/// A resolver for resolving lookahead requirements from local dependencies.
///
@ -22,18 +23,32 @@ use uv_types::{BuildContext, RequestedRequirements};
///
/// The lookahead resolver resolves requirements for local dependencies, so that the resolver can
/// treat them as first-party dependencies for the purpose of analyzing their specifiers.
pub struct LookaheadResolver {
/// The requirements for the project.
requirements: Vec<Requirement>,
pub struct LookaheadResolver<'a> {
/// The direct requirements for the project.
requirements: &'a [Requirement],
/// The constraints for the project.
constraints: &'a Constraints,
/// The overrides for the project.
overrides: &'a Overrides,
/// The editable requirements for the project.
editables: &'a [(LocalEditable, Metadata23)],
/// The reporter to use when building source distributions.
reporter: Option<Arc<dyn Reporter>>,
}
impl LookaheadResolver {
impl<'a> LookaheadResolver<'a> {
/// Instantiate a new [`LookaheadResolver`] for a given set of requirements.
pub fn new(requirements: Vec<Requirement>) -> Self {
pub fn new(
requirements: &'a [Requirement],
constraints: &'a Constraints,
overrides: &'a Overrides,
editables: &'a [(LocalEditable, Metadata23)],
) -> Self {
Self {
requirements,
constraints,
overrides,
editables,
reporter: None,
}
}
@ -55,10 +70,22 @@ impl LookaheadResolver {
markers: &MarkerEnvironment,
client: &RegistryClient,
) -> Result<Vec<RequestedRequirements>> {
let mut queue = VecDeque::from(self.requirements.clone());
let mut results = Vec::new();
let mut futures = FuturesUnordered::new();
// Queue up the initial requirements.
let mut queue: VecDeque<Requirement> = self
.constraints
.apply(self.overrides.apply(self.requirements))
.filter(|requirement| requirement.evaluate_markers(markers, &[]))
.chain(self.editables.iter().flat_map(|(editable, metadata)| {
self.constraints
.apply(self.overrides.apply(&metadata.requires_dist))
.filter(|requirement| requirement.evaluate_markers(markers, &editable.extras))
}))
.cloned()
.collect();
while !queue.is_empty() || !futures.is_empty() {
while let Some(requirement) = queue.pop_front() {
futures.push(self.lookahead(requirement, context, client));
@ -66,7 +93,10 @@ impl LookaheadResolver {
while let Some(result) = futures.next().await {
if let Some(lookahead) = result? {
for requirement in lookahead.requirements() {
for requirement in self
.constraints
.apply(self.overrides.apply(lookahead.requirements()))
{
if requirement.evaluate_markers(markers, lookahead.extras()) {
queue.push_back(requirement.clone());
}

View file

@ -100,20 +100,18 @@ impl Manifest {
self.lookaheads
.iter()
.flat_map(|lookahead| {
lookahead
.requirements()
.iter()
self.overrides
.apply(lookahead.requirements())
.filter(|requirement| requirement.evaluate_markers(markers, lookahead.extras()))
})
.chain(self.editables.iter().flat_map(|(editable, metadata)| {
metadata
.requires_dist
.iter()
self.overrides
.apply(&metadata.requires_dist)
.filter(|requirement| requirement.evaluate_markers(markers, &editable.extras))
}))
.chain(
self.requirements
.iter()
self.overrides
.apply(&self.requirements)
.filter(|requirement| requirement.evaluate_markers(markers, &[])),
)
.chain(
@ -140,22 +138,31 @@ impl Manifest {
self.lookaheads
.iter()
.flat_map(|lookahead| {
lookahead
.requirements()
.iter()
self.overrides
.apply(lookahead.requirements())
.filter(|requirement| requirement.evaluate_markers(markers, lookahead.extras()))
})
.chain(self.editables.iter().flat_map(|(editable, metadata)| {
metadata
.requires_dist
.iter()
self.overrides
.apply(&metadata.requires_dist)
.filter(|requirement| requirement.evaluate_markers(markers, &editable.extras))
}))
.chain(
self.requirements
.iter()
self.overrides
.apply(&self.requirements)
.filter(|requirement| requirement.evaluate_markers(markers, &[])),
)
.map(|requirement| &requirement.name)
}
/// Apply the overrides and constraints to a set of requirements.
///
/// Constraints are always applied _on top_ of overrides, such that constraints are applied
/// even if a requirement is overridden.
pub fn apply<'a>(
&'a self,
requirements: impl IntoIterator<Item = &'a Requirement>,
) -> impl Iterator<Item = &Requirement> {
self.constraints.apply(self.overrides.apply(requirements))
}
}

View file

@ -421,19 +421,22 @@ impl ResolutionGraph {
.distributions
.get(&package_id)
.expect("every package in resolution graph has metadata");
for req in &md.requires_dist {
for req in manifest.apply(&md.requires_dist) {
let Some(ref marker_tree) = req.marker else {
continue;
};
add_marker_params_from_tree(marker_tree, &mut seen_marker_values);
}
}
let direct_reqs = manifest
.requirements
.iter()
.chain(manifest.constraints.requirements())
.chain(manifest.overrides.requirements());
for direct_req in direct_reqs {
// Ensure that we consider markers from direct dependencies.
let direct_reqs = manifest.requirements.iter().chain(
manifest
.editables
.iter()
.flat_map(|(_, metadata)| &metadata.requires_dist),
);
for direct_req in manifest.apply(direct_reqs) {
let Some(ref marker_tree) = direct_req.marker else {
continue;
};

View file

@ -23,61 +23,7 @@ impl Urls {
let mut required: FxHashMap<PackageName, VerbatimUrl> = FxHashMap::default();
let mut allowed: FxHashMap<VerbatimUrl, VerbatimUrl> = FxHashMap::default();
// Add any lookahead requirements. If there are any conflicts, return an error.
for lookahead in &manifest.lookaheads {
for requirement in lookahead
.requirements()
.iter()
.filter(|requirement| requirement.evaluate_markers(markers, lookahead.extras()))
{
if let Some(pep508_rs::VersionOrUrl::Url(url)) = &requirement.version_or_url {
if let Some(previous) = required.insert(requirement.name.clone(), url.clone()) {
if !is_equal(&previous, url) {
if is_precise(&previous, url) {
debug!("Assuming {url} is a precise variant of {previous}");
allowed.insert(url.clone(), previous);
} else {
return Err(ResolveError::ConflictingUrlsDirect(
requirement.name.clone(),
previous.verbatim().to_string(),
url.verbatim().to_string(),
));
}
}
}
}
}
}
// Add all direct requirements and constraints. If there are any conflicts, return an error.
for requirement in manifest
.requirements
.iter()
.chain(manifest.constraints.requirements())
.filter(|requirement| requirement.evaluate_markers(markers, &[]))
{
if let Some(pep508_rs::VersionOrUrl::Url(url)) = &requirement.version_or_url {
if let Some(previous) = required.insert(requirement.name.clone(), url.clone()) {
if is_equal(&previous, url) {
continue;
}
if is_precise(&previous, url) {
debug!("Assuming {url} is a precise variant of {previous}");
allowed.insert(url.clone(), previous);
continue;
}
return Err(ResolveError::ConflictingUrlsDirect(
requirement.name.clone(),
previous.verbatim().to_string(),
url.verbatim().to_string(),
));
}
}
}
// Add any editable requirements. If there are any conflicts, return an error.
// Add the themselves to the list of required URLs.
for (editable, metadata) in &manifest.editables {
if let Some(previous) = required.insert(metadata.name.clone(), editable.url.clone()) {
if !is_equal(&previous, &editable.url) {
@ -96,40 +42,28 @@ impl Urls {
}
}
}
for requirement in metadata
.requires_dist
.iter()
.filter(|requirement| requirement.evaluate_markers(markers, &editable.extras))
{
if let Some(pep508_rs::VersionOrUrl::Url(url)) = &requirement.version_or_url {
if let Some(previous) = required.insert(requirement.name.clone(), url.clone()) {
if !is_equal(&previous, url) {
if is_precise(&previous, url) {
debug!("Assuming {url} is a precise variant of {previous}");
allowed.insert(url.clone(), previous);
} else {
return Err(ResolveError::ConflictingUrlsDirect(
requirement.name.clone(),
previous.verbatim().to_string(),
url.verbatim().to_string(),
));
}
}
}
}
}
}
// Add any overrides. Conflicts here are fine, as the overrides are meant to be
// authoritative.
for requirement in manifest
.overrides
.requirements()
.filter(|requirement| requirement.evaluate_markers(markers, &[]))
{
// Add all direct requirements and constraints. If there are any conflicts, return an error.
for requirement in manifest.requirements(markers) {
if let Some(pep508_rs::VersionOrUrl::Url(url)) = &requirement.version_or_url {
required.insert(requirement.name.clone(), url.clone());
if let Some(previous) = required.insert(requirement.name.clone(), url.clone()) {
if is_equal(&previous, url) {
continue;
}
if is_precise(&previous, url) {
debug!("Assuming {url} is a precise variant of {previous}");
allowed.insert(url.clone(), previous);
continue;
}
return Err(ResolveError::ConflictingUrlsDirect(
requirement.name.clone(),
previous.verbatim().to_string(),
url.verbatim().to_string(),
));
}
}
}

View file

@ -32,4 +32,18 @@ impl Constraints {
pub fn get(&self, name: &PackageName) -> Option<&Vec<Requirement>> {
self.0.get(name)
}
/// Apply the constraints to a set of requirements.
pub fn apply<'a>(
&'a self,
requirements: impl IntoIterator<Item = &'a Requirement>,
) -> impl Iterator<Item = &Requirement> {
requirements.into_iter().flat_map(|requirement| {
std::iter::once(requirement).chain(
self.get(&requirement.name)
.into_iter()
.flat_map(|constraints| constraints.iter()),
)
})
}
}

View file

@ -37,9 +37,9 @@ impl Overrides {
/// Apply the overrides to a set of requirements.
pub fn apply<'a>(
&'a self,
requirements: &'a [Requirement],
requirements: impl IntoIterator<Item = &'a Requirement>,
) -> impl Iterator<Item = &Requirement> {
requirements.iter().flat_map(|requirement| {
requirements.into_iter().flat_map(|requirement| {
if let Some(overrides) = self.get(&requirement.name) {
Either::Left(overrides.iter())
} else {

View file

@ -338,22 +338,10 @@ pub(crate) async fn pip_compile(
};
// Determine any lookahead requirements.
let lookaheads = LookaheadResolver::new(
requirements
.iter()
.filter(|requirement| requirement.evaluate_markers(&markers, &[]))
.chain(editables.iter().flat_map(|(editable, metadata)| {
metadata
.requires_dist
.iter()
.filter(|requirement| requirement.evaluate_markers(&markers, &editable.extras))
}))
.cloned()
.collect(),
)
.with_reporter(ResolverReporter::from(printer))
.resolve(&build_dispatch, &markers, &client)
.await?;
let lookaheads = LookaheadResolver::new(&requirements, &constraints, &overrides, &editables)
.with_reporter(ResolverReporter::from(printer))
.resolve(&build_dispatch, &markers, &client)
.await?;
// Create a manifest of the requirements.
let manifest = Manifest::new(

View file

@ -539,22 +539,10 @@ async fn resolve(
.collect();
// Determine any lookahead requirements.
let lookaheads = LookaheadResolver::new(
requirements
.iter()
.filter(|requirement| requirement.evaluate_markers(markers, &[]))
.chain(editables.iter().flat_map(|(editable, metadata)| {
metadata
.requires_dist
.iter()
.filter(|requirement| requirement.evaluate_markers(markers, &editable.extras))
}))
.cloned()
.collect(),
)
.with_reporter(ResolverReporter::from(printer))
.resolve(build_dispatch, markers, client)
.await?;
let lookaheads = LookaheadResolver::new(&requirements, &constraints, &overrides, &editables)
.with_reporter(ResolverReporter::from(printer))
.resolve(build_dispatch, markers, client)
.await?;
// Create a manifest of the requirements.
let manifest = Manifest::new(

View file

@ -1841,8 +1841,8 @@ fn allowed_transitive_url_path_dependency() -> Result<()> {
Ok(())
}
/// A dependency with conflicting URLs in `requirements.in` and `constraints.txt` should arguably
/// be ignored if the dependency has an override. However, we currently error in this case.
/// A dependency with conflicting URLs in `requirements.in` and `constraints.txt` should be ignored
/// if the dependency has an override.
#[test]
fn requirement_constraint_override_url() -> Result<()> {
let context = TestContext::new("3.12");
@ -1863,13 +1863,13 @@ fn requirement_constraint_override_url() -> Result<()> {
.arg("--override")
.arg("overrides.txt"), @r###"
success: false
exit_code: 2
exit_code: 1
----- stdout -----
----- stderr -----
error: Requirements contain conflicting URLs for package `anyio`:
- https://files.pythonhosted.org/packages/db/4d/3970183622f0330d3c23d9b8a5f52e365e50381fd484d08e3285104333d3/anyio-4.3.0.tar.gz
- https://files.pythonhosted.org/packages/14/fd/2f20c40b45e4fb4324834aea24bd4afdf1143390242c0b33774da0e2e34f/anyio-4.3.0-py3-none-any.whl
× No solution found when resolving dependencies:
Because there is no version of anyio==3.7.0 and you require
anyio==3.7.0, we can conclude that the requirements are unsatisfiable.
"###
);
@ -1877,8 +1877,8 @@ fn requirement_constraint_override_url() -> Result<()> {
}
/// A dependency that uses a pre-release marker in `requirements.in` should be overridden by a
/// non-pre-release version in `overrides.txt`. We currently allow Flask to use a pre-release below,
/// but probably shouldn't.
/// non-pre-release version in `overrides.txt`. We should _not_ allow Flask to be resolved to
/// a pre-release version.
#[test]
fn requirement_override_prerelease() -> Result<()> {
let context = TestContext::new("3.12");
@ -1898,18 +1898,16 @@ fn requirement_override_prerelease() -> Result<()> {
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in --override overrides.txt
click==8.1.7
click==7.1.2
# via flask
flask==2.0.0rc2
itsdangerous==2.1.2
flask==1.1.4
itsdangerous==1.1.0
# via flask
jinja2==3.1.3
jinja2==2.11.3
# via flask
markupsafe==2.1.5
# via
# jinja2
# werkzeug
werkzeug==3.0.1
# via jinja2
werkzeug==1.0.1
# via flask
----- stderr -----
@ -6573,13 +6571,13 @@ fn pendulum_no_tzdata_on_windows() -> Result<()> {
fn allow_recursive_url_local_path() -> Result<()> {
let context = TestContext::new("3.12");
// Create a standalone library.
let lib2 = context.temp_dir.child("lib2");
lib2.create_dir_all()?;
let pyproject_toml = lib2.child("pyproject.toml");
// Create a standalone library named "anyio".
let anyio = context.temp_dir.child("anyio");
anyio.create_dir_all()?;
let pyproject_toml = anyio.child("pyproject.toml");
pyproject_toml.write_str(
r#"[project]
name = "lib2"
name = "anyio"
version = "0.0.0"
dependencies = [
"idna"
@ -6589,19 +6587,19 @@ requires-python = ">3.8"
)?;
// Create a library that depends on the standalone library.
let lib1 = context.temp_dir.child("lib1");
lib1.create_dir_all()?;
let pyproject_toml = lib1.child("pyproject.toml");
let lib = context.temp_dir.child("lib");
lib.create_dir_all()?;
let pyproject_toml = lib.child("pyproject.toml");
pyproject_toml.write_str(&format!(
r#"[project]
name = "lib1"
name = "lib"
version = "0.0.0"
dependencies = [
"lib2 @ {}"
"anyio @ {}"
]
requires-python = ">3.8"
"#,
Url::from_directory_path(lib2.path()).unwrap().as_str(),
Url::from_directory_path(anyio.path()).unwrap().as_str(),
))?;
// Create an application that depends on the library.
@ -6613,12 +6611,11 @@ requires-python = ">3.8"
name = "example"
version = "0.0.0"
dependencies = [
"anyio",
"lib1 @ {}"
"lib @ {}"
]
requires-python = ">3.8"
"#,
Url::from_directory_path(lib1.path()).unwrap().as_str(),
Url::from_directory_path(lib.path()).unwrap().as_str(),
))?;
// Write to a requirements file.
@ -6632,22 +6629,215 @@ requires-python = ">3.8"
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in
anyio==4.3.0
# via example
anyio @ file://[TEMP_DIR]/anyio/
# via lib
example @ ./app
idna==3.6
# via
# anyio
# lib2
lib1 @ file://[TEMP_DIR]/lib1/
# via anyio
lib @ file://[TEMP_DIR]/lib/
# via example
----- stderr -----
Resolved 4 packages in [TIME]
"###
);
Ok(())
}
/// Allow URL dependencies recursively for local source trees, but respect overrides.
#[test]
fn allow_recursive_url_local_path_override() -> Result<()> {
let context = TestContext::new("3.12");
// Create a standalone library named "anyio".
let anyio = context.temp_dir.child("anyio");
anyio.create_dir_all()?;
let pyproject_toml = anyio.child("pyproject.toml");
pyproject_toml.write_str(
r#"[project]
name = "anyio"
version = "0.0.0"
dependencies = [
"idna"
]
requires-python = ">3.8"
"#,
)?;
// Create a library that depends on the standalone library.
let lib = context.temp_dir.child("lib");
lib.create_dir_all()?;
let pyproject_toml = lib.child("pyproject.toml");
pyproject_toml.write_str(&format!(
r#"[project]
name = "lib"
version = "0.0.0"
dependencies = [
"anyio @ {}"
]
requires-python = ">3.8"
"#,
Url::from_directory_path(anyio.path()).unwrap().as_str(),
))?;
// Create an application that depends on the library.
let app = context.temp_dir.child("app");
app.create_dir_all()?;
let pyproject_toml = app.child("pyproject.toml");
pyproject_toml.write_str(&format!(
r#"[project]
name = "example"
version = "0.0.0"
dependencies = [
"lib @ {}"
]
requires-python = ">3.8"
"#,
Url::from_directory_path(lib.path()).unwrap().as_str(),
))?;
// Write to a requirements file.
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("./app")?;
// Create an override that pulls `anyio` from PyPI.
let overrides_txt = context.temp_dir.child("overrides.txt");
overrides_txt.write_str("anyio==3.7.0")?;
uv_snapshot!(context.filters(), context.compile()
.arg("requirements.in")
.arg("--override")
.arg("overrides.txt"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in --override overrides.txt
anyio==3.7.0
# via lib
example @ ./app
idna==3.6
# via anyio
lib @ file://[TEMP_DIR]/lib/
# via example
lib2 @ file://[TEMP_DIR]/lib2/
# via lib1
sniffio==1.3.1
# via anyio
----- stderr -----
Resolved 6 packages in [TIME]
Resolved 5 packages in [TIME]
"###
);
Ok(())
}
/// Allow URL dependencies recursively for local source trees, but respect both overrides _and_
/// constraints.
#[test]
fn allow_recursive_url_local_path_override_constraint() -> Result<()> {
let context = TestContext::new("3.12");
// Create a standalone library named "anyio".
let anyio = context.temp_dir.child("anyio");
anyio.create_dir_all()?;
let pyproject_toml = anyio.child("pyproject.toml");
pyproject_toml.write_str(
r#"[project]
name = "anyio"
version = "0.0.0"
dependencies = [
"idna"
]
requires-python = ">3.8"
"#,
)?;
// Create a library that depends on the standalone library.
let lib = context.temp_dir.child("lib");
lib.create_dir_all()?;
let pyproject_toml = lib.child("pyproject.toml");
pyproject_toml.write_str(&format!(
r#"[project]
name = "lib"
version = "0.0.0"
dependencies = [
"anyio @ {}"
]
requires-python = ">3.8"
"#,
Url::from_directory_path(anyio.path()).unwrap().as_str(),
))?;
// Create an application that depends on the library.
let app = context.temp_dir.child("app");
app.create_dir_all()?;
let pyproject_toml = app.child("pyproject.toml");
pyproject_toml.write_str(&format!(
r#"[project]
name = "example"
version = "0.0.0"
dependencies = [
"lib @ {}"
]
requires-python = ">3.8"
"#,
Url::from_directory_path(lib.path()).unwrap().as_str(),
))?;
// Write to a requirements file.
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("./app")?;
// Create an override that pulls `anyio` from PyPI.
let overrides_txt = context.temp_dir.child("overrides.txt");
overrides_txt.write_str("anyio==0.0.0")?;
// Ensure that resolution fails, since `0.0.0` does not exist on PyPI.
uv_snapshot!(context.filters(), context.compile()
.arg("requirements.in")
.arg("--override")
.arg("overrides.txt"), @r###"
success: false
exit_code: 1
----- stdout -----
----- stderr -----
× No solution found when resolving dependencies:
Because there is no version of anyio==0.0.0 and lib==0.0.0 depends on
anyio==0.0.0, we can conclude that lib==0.0.0 cannot be used.
And because only lib==0.0.0 is available and example==0.0.0 depends on
lib, we can conclude that example==0.0.0 cannot be used.
And because only example==0.0.0 is available and you require example, we
can conclude that the requirements are unsatisfiable.
"###
);
// Now constrain `anyio` to the local version.
let constraints_txt = context.temp_dir.child("constraints.txt");
constraints_txt.write_str("anyio @ ./anyio")?;
uv_snapshot!(context.filters(), context.compile()
.arg("requirements.in")
.arg("--override")
.arg("overrides.txt")
.arg("--constraint")
.arg("constraints.txt"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in --override overrides.txt --constraint constraints.txt
anyio @ ./anyio
# via lib
example @ ./app
idna==3.6
# via anyio
lib @ file://[TEMP_DIR]/lib
# via example
----- stderr -----
Resolved 4 packages in [TIME]
"###
);