mirror of
https://github.com/astral-sh/uv.git
synced 2025-07-07 13:25:00 +00:00
Add error when user requests extras that do not exist (#254)
Extends #253 Closes #241 Adds `extras` to `RequirementsSpecification` to track extras used to construct the requirements so we can throw an error when not all of the requested extras are used.
This commit is contained in:
parent
322532d6f9
commit
1ddb7d2827
7 changed files with 199 additions and 25 deletions
|
@ -3,7 +3,7 @@ use std::io::{stdout, BufWriter};
|
|||
use std::path::Path;
|
||||
use std::{env, fs};
|
||||
|
||||
use anyhow::Result;
|
||||
use anyhow::{anyhow, Result};
|
||||
use colored::Colorize;
|
||||
use fs_err::File;
|
||||
use itertools::Itertools;
|
||||
|
@ -47,7 +47,24 @@ pub(crate) async fn pip_compile(
|
|||
let RequirementsSpecification {
|
||||
requirements,
|
||||
constraints,
|
||||
extras: used_extras,
|
||||
} = RequirementsSpecification::try_from_sources(requirements, constraints, &extras)?;
|
||||
|
||||
// Check that all provided extras are used
|
||||
let mut unused_extras = extras
|
||||
.iter()
|
||||
.filter(|extra| !used_extras.contains(extra))
|
||||
.collect::<Vec<_>>();
|
||||
if !unused_extras.is_empty() {
|
||||
unused_extras.sort_unstable();
|
||||
unused_extras.dedup();
|
||||
let s = if unused_extras.len() == 1 { "" } else { "s" };
|
||||
return Err(anyhow!(
|
||||
"Requested extra{s} not found: {}",
|
||||
unused_extras.iter().join(", ")
|
||||
));
|
||||
}
|
||||
|
||||
let preferences: Vec<Requirement> = output_file
|
||||
.filter(|_| upgrade_mode.is_prefer_pinned())
|
||||
.filter(|output_file| output_file.exists())
|
||||
|
|
|
@ -34,6 +34,7 @@ pub(crate) async fn pip_sync(
|
|||
let RequirementsSpecification {
|
||||
requirements,
|
||||
constraints: _,
|
||||
extras: _,
|
||||
} = RequirementsSpecification::try_from_sources(sources, &[], &[])?;
|
||||
|
||||
if requirements.is_empty() {
|
||||
|
|
|
@ -25,6 +25,7 @@ pub(crate) async fn pip_uninstall(
|
|||
let RequirementsSpecification {
|
||||
requirements,
|
||||
constraints: _,
|
||||
extras: _,
|
||||
} = RequirementsSpecification::try_from_sources(sources, &[], &[])?;
|
||||
|
||||
// Detect the current Python interpreter.
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
//! A standard interface for working with heterogeneous sources of requirements.
|
||||
|
||||
use std::collections::HashSet;
|
||||
use std::path::PathBuf;
|
||||
use std::str::FromStr;
|
||||
|
||||
|
@ -42,6 +43,8 @@ pub(crate) struct RequirementsSpecification {
|
|||
pub(crate) requirements: Vec<Requirement>,
|
||||
/// The constraints for the project.
|
||||
pub(crate) constraints: Vec<Requirement>,
|
||||
/// The extras used to collect requirements.
|
||||
pub(crate) extras: HashSet<ExtraName>,
|
||||
}
|
||||
|
||||
impl RequirementsSpecification {
|
||||
|
@ -57,6 +60,7 @@ impl RequirementsSpecification {
|
|||
Self {
|
||||
requirements: vec![requirement],
|
||||
constraints: vec![],
|
||||
extras: HashSet::new(),
|
||||
}
|
||||
}
|
||||
RequirementsSource::RequirementsTxt(path) => {
|
||||
|
@ -68,39 +72,33 @@ impl RequirementsSpecification {
|
|||
.map(|entry| entry.requirement)
|
||||
.collect(),
|
||||
constraints: requirements_txt.constraints.into_iter().collect(),
|
||||
extras: HashSet::new(),
|
||||
}
|
||||
}
|
||||
RequirementsSource::PyprojectToml(path) => {
|
||||
let contents = fs::read_to_string(path)?;
|
||||
let pyproject_toml = toml::from_str::<pyproject_toml::PyProjectToml>(&contents)
|
||||
.with_context(|| format!("Failed to read `{}`", path.display()))?;
|
||||
let requirements: Vec<Requirement> = pyproject_toml
|
||||
.project
|
||||
.into_iter()
|
||||
.flat_map(|project| {
|
||||
project.dependencies.into_iter().flatten().chain(
|
||||
// Include any optional dependencies specified in `extras`
|
||||
project.optional_dependencies.into_iter().flat_map(
|
||||
|optional_dependencies| {
|
||||
optional_dependencies
|
||||
.iter()
|
||||
.flat_map(|(name, requirements)| {
|
||||
if extras.contains(&ExtraName::normalize(name)) {
|
||||
requirements.clone()
|
||||
} else {
|
||||
vec![]
|
||||
}
|
||||
})
|
||||
.collect::<Vec<Requirement>>()
|
||||
},
|
||||
),
|
||||
)
|
||||
})
|
||||
.collect();
|
||||
let mut used_extras = HashSet::new();
|
||||
let mut requirements = Vec::new();
|
||||
if let Some(project) = pyproject_toml.project {
|
||||
requirements.extend(project.dependencies.unwrap_or_default());
|
||||
// Include any optional dependencies specified in `extras`
|
||||
for (name, optional_requirements) in
|
||||
project.optional_dependencies.unwrap_or_default()
|
||||
{
|
||||
let normalized_name = ExtraName::normalize(name);
|
||||
if extras.contains(&normalized_name) {
|
||||
used_extras.insert(normalized_name);
|
||||
requirements.extend(optional_requirements);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Self {
|
||||
requirements,
|
||||
constraints: vec![],
|
||||
extras: used_extras,
|
||||
}
|
||||
}
|
||||
})
|
||||
|
@ -121,6 +119,7 @@ impl RequirementsSpecification {
|
|||
let source = Self::try_from_source(source, extras)?;
|
||||
spec.requirements.extend(source.requirements);
|
||||
spec.constraints.extend(source.constraints);
|
||||
spec.extras.extend(source.extras);
|
||||
}
|
||||
|
||||
// Read all constraints, treating both requirements _and_ constraints as constraints.
|
||||
|
|
|
@ -228,7 +228,7 @@ fn compile_constraints_inline() -> Result<()> {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
/// Resolve a package from an extra in a `pyproject.toml` file.
|
||||
/// Resolve a package from an optional dependency group in a `pyproject.toml` file.
|
||||
#[test]
|
||||
fn compile_pyproject_toml_extra() -> Result<()> {
|
||||
let temp_dir = assert_fs::TempDir::new()?;
|
||||
|
@ -333,3 +333,113 @@ optional-dependencies."FrIeNdLy-._.-bArD" = [
|
|||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Request an extra that does not exist as a dependency group in a `pyproject.toml` file.
|
||||
#[test]
|
||||
fn compile_pyproject_toml_extra_missing() -> Result<()> {
|
||||
let temp_dir = assert_fs::TempDir::new()?;
|
||||
let cache_dir = assert_fs::TempDir::new()?;
|
||||
let venv = temp_dir.child(".venv");
|
||||
|
||||
Command::new(get_cargo_bin(BIN_NAME))
|
||||
.arg("venv")
|
||||
.arg(venv.as_os_str())
|
||||
.arg("--cache-dir")
|
||||
.arg(cache_dir.path())
|
||||
.current_dir(&temp_dir)
|
||||
.assert()
|
||||
.success();
|
||||
venv.assert(predicates::path::is_dir());
|
||||
|
||||
let pyproject_toml = temp_dir.child("pyproject.toml");
|
||||
pyproject_toml.touch()?;
|
||||
pyproject_toml.write_str(
|
||||
r#"[build-system]
|
||||
requires = ["setuptools", "wheel"]
|
||||
|
||||
[project]
|
||||
name = "project"
|
||||
dependencies = []
|
||||
optional-dependencies.foo = [
|
||||
"django==5.0b1",
|
||||
]
|
||||
"#,
|
||||
)?;
|
||||
|
||||
insta::with_settings!({
|
||||
filters => vec![
|
||||
(r"\d+(ms|s)", "[TIME]"),
|
||||
(r"# .* pip-compile", "# [BIN_PATH] pip-compile"),
|
||||
(r"--cache-dir .*", "--cache-dir [CACHE_DIR]"),
|
||||
]
|
||||
}, {
|
||||
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
|
||||
.arg("pip-compile")
|
||||
.arg("pyproject.toml")
|
||||
.arg("--extra")
|
||||
.arg("bar")
|
||||
.arg("--cache-dir")
|
||||
.arg(cache_dir.path())
|
||||
.env("VIRTUAL_ENV", venv.as_os_str())
|
||||
.current_dir(&temp_dir));
|
||||
});
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Request multple extras that do not exist as a dependency group in a `pyproject.toml` file.
|
||||
#[test]
|
||||
fn compile_pyproject_toml_extras_missing() -> Result<()> {
|
||||
let temp_dir = assert_fs::TempDir::new()?;
|
||||
let cache_dir = assert_fs::TempDir::new()?;
|
||||
let venv = temp_dir.child(".venv");
|
||||
|
||||
Command::new(get_cargo_bin(BIN_NAME))
|
||||
.arg("venv")
|
||||
.arg(venv.as_os_str())
|
||||
.arg("--cache-dir")
|
||||
.arg(cache_dir.path())
|
||||
.current_dir(&temp_dir)
|
||||
.assert()
|
||||
.success();
|
||||
venv.assert(predicates::path::is_dir());
|
||||
|
||||
let pyproject_toml = temp_dir.child("pyproject.toml");
|
||||
pyproject_toml.touch()?;
|
||||
pyproject_toml.write_str(
|
||||
r#"[build-system]
|
||||
requires = ["setuptools", "wheel"]
|
||||
|
||||
[project]
|
||||
name = "project"
|
||||
dependencies = []
|
||||
optional-dependencies.foo = [
|
||||
"django==5.0b1",
|
||||
]
|
||||
"#,
|
||||
)?;
|
||||
|
||||
insta::with_settings!({
|
||||
filters => vec![
|
||||
(r"\d+(ms|s)", "[TIME]"),
|
||||
(r"# .* pip-compile", "# [BIN_PATH] pip-compile"),
|
||||
(r"--cache-dir .*", "--cache-dir [CACHE_DIR]"),
|
||||
]
|
||||
}, {
|
||||
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
|
||||
.arg("pip-compile")
|
||||
.arg("pyproject.toml")
|
||||
.arg("--extra")
|
||||
.arg("foo")
|
||||
.arg("--extra")
|
||||
.arg("bar")
|
||||
.arg("--extra")
|
||||
.arg("foobar")
|
||||
.arg("--cache-dir")
|
||||
.arg(cache_dir.path())
|
||||
.env("VIRTUAL_ENV", venv.as_os_str())
|
||||
.current_dir(&temp_dir));
|
||||
});
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
|
|
@ -0,0 +1,21 @@
|
|||
---
|
||||
source: crates/puffin-cli/tests/pip_compile.rs
|
||||
info:
|
||||
program: puffin
|
||||
args:
|
||||
- pip-compile
|
||||
- pyproject.toml
|
||||
- "--extra"
|
||||
- bar
|
||||
- "--cache-dir"
|
||||
- /var/folders/bc/qlsk3t6x7c9fhhbvvcg68k9c0000gp/T/.tmpllq43n
|
||||
env:
|
||||
VIRTUAL_ENV: /var/folders/bc/qlsk3t6x7c9fhhbvvcg68k9c0000gp/T/.tmpdPZj3S/.venv
|
||||
---
|
||||
success: false
|
||||
exit_code: 2
|
||||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
error: Requested extra not found: bar
|
||||
|
|
@ -0,0 +1,25 @@
|
|||
---
|
||||
source: crates/puffin-cli/tests/pip_compile.rs
|
||||
info:
|
||||
program: puffin
|
||||
args:
|
||||
- pip-compile
|
||||
- pyproject.toml
|
||||
- "--extra"
|
||||
- foo
|
||||
- "--extra"
|
||||
- bar
|
||||
- "--extra"
|
||||
- foobar
|
||||
- "--cache-dir"
|
||||
- /var/folders/bc/qlsk3t6x7c9fhhbvvcg68k9c0000gp/T/.tmp6Lgir2
|
||||
env:
|
||||
VIRTUAL_ENV: /var/folders/bc/qlsk3t6x7c9fhhbvvcg68k9c0000gp/T/.tmpHiL1wt/.venv
|
||||
---
|
||||
success: false
|
||||
exit_code: 2
|
||||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
error: Requested extras not found: bar, foobar
|
||||
|
Loading…
Add table
Add a link
Reference in a new issue