Initial workspace support (#3705)

Add workspace support when using `-r <path>/pyproject.toml` or `-e
<path>` in the pip interface. It is limited to all-editable
static-metadata workspaces, and tests only include a single main
workspace, ignoring path dependencies in another workspace. This can be
considered the MVP for workspace support: You can create a workspace,
you can install from it, but some options and conveniences are still
missing. I'll file follow-up tickets (support in lockfiles, support path
deps in other workspace, #3625)

There is also support in `uv run`, but we need
https://github.com/astral-sh/uv/issues/3700 first to properly support
using different current projects in the bluejay interface, currently the
resolution and therefore the lockfile depends on the current project.
I'd do this change first (it's big enough already), then #3700, and then
add workspace support properly to bluejay.

Fixes #3404
This commit is contained in:
konsti 2024-05-28 09:41:53 +02:00 committed by GitHub
parent 89cfecea77
commit a89e146107
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
23 changed files with 952 additions and 210 deletions

3
Cargo.lock generated
View file

@ -4960,7 +4960,6 @@ dependencies = [
"indexmap",
"indoc",
"insta",
"itertools 0.13.0",
"path-absolutize",
"pep440_rs",
"pep508_rs",
@ -4968,9 +4967,11 @@ dependencies = [
"regex",
"requirements-txt",
"rustc-hash",
"same-file",
"schemars",
"serde",
"thiserror",
"tokio",
"toml",
"tracing",
"url",

View file

@ -70,15 +70,16 @@ impl VerbatimUrl {
#[cfg(feature = "non-pep508-extensions")] // PEP 508 arguably only allows absolute file URLs.
pub fn parse_path(
path: impl AsRef<Path>,
working_dir: impl AsRef<Path>,
base_dir: impl AsRef<Path>,
) -> Result<Self, VerbatimUrlError> {
debug_assert!(base_dir.as_ref().is_absolute(), "base dir must be absolute");
let path = path.as_ref();
// Convert the path to an absolute path, if necessary.
let path = if path.is_absolute() {
path.to_path_buf()
} else {
working_dir.as_ref().join(path)
base_dir.as_ref().join(path)
};
// Normalize the path.

View file

@ -35,9 +35,9 @@ fs-err = { workspace = true, features = ["tokio"] }
futures = { workspace = true }
glob = { workspace = true }
indexmap = { workspace = true }
itertools = { workspace = true }
path-absolutize = { workspace = true }
rustc-hash = { workspace = true }
same-file = { workspace = true }
schemars = { workspace = true, optional = true }
serde = { workspace = true }
thiserror = { workspace = true }
@ -52,6 +52,7 @@ schemars = ["dep:schemars"]
indoc = "2.0.5"
insta = { version = "1.38.0", features = ["filters", "redactions", "json"] }
regex = { workspace = true }
tokio = { workspace = true }
[lints]
workspace = true

View file

@ -4,6 +4,7 @@ use futures::stream::FuturesUnordered;
use futures::StreamExt;
use rustc_hash::FxHashSet;
use thiserror::Error;
use tracing::trace;
use distribution_types::{
BuiltDist, Dist, DistributionMetadata, GitSourceDist, Requirement, RequirementSource,
@ -152,6 +153,7 @@ impl<'a, Context: BuildContext> LookaheadResolver<'a, Context> {
&self,
requirement: Requirement,
) -> Result<Option<RequestedRequirements>, LookaheadError> {
trace!("Performing lookahead for {requirement}");
// Determine whether the requirement represents a local distribution and convert to a
// buildable distribution.
let dist = match requirement.source {

View file

@ -30,7 +30,7 @@ use uv_git::GitReference;
use uv_normalize::{ExtraName, PackageName};
use uv_warnings::warn_user_once;
use crate::ExtrasSpecification;
use crate::{ExtrasSpecification, Workspace};
#[derive(Debug, Error)]
pub enum Pep621Error {
@ -39,7 +39,7 @@ pub enum Pep621Error {
#[error("Must specify a `[project]` section alongside `[tool.uv.sources]`")]
MissingProjectSection,
#[error("pyproject.toml section is declared as dynamic, but must be static: `{0}`")]
CantBeDynamic(&'static str),
DynamicNotAllowed(&'static str),
#[error("Failed to parse entry for: `{0}`")]
LoweringError(PackageName, #[source] LoweringError),
}
@ -68,14 +68,16 @@ pub enum LoweringError {
InvalidVerbatimUrl(#[from] pep508_rs::VerbatimUrlError),
#[error("Can't combine URLs from both `project.dependencies` and `tool.uv.sources`")]
ConflictingUrls,
#[error("Could not normalize path: `{0}`")]
AbsolutizeError(String, #[source] io::Error),
#[error("Could not normalize path: `{}`", _0.user_display())]
AbsolutizeError(PathBuf, #[source] io::Error),
#[error("Fragments are not allowed in URLs: `{0}`")]
ForbiddenFragment(Url),
#[error("`workspace = false` is not yet supported")]
WorkspaceFalse,
#[error("`tool.uv.sources` is a preview feature; use `--preview` or set `UV_PREVIEW=1` to enable it")]
MissingPreview,
#[error("`editable = false` is not yet supported")]
NonEditableWorkspaceDependency,
}
/// A `pyproject.toml` as specified in PEP 517.
@ -241,12 +243,11 @@ impl Pep621Metadata {
///
/// Returns an error if the requirements are not valid PEP 508 requirements.
pub(crate) fn try_from(
pyproject: PyProjectToml,
pyproject: &PyProjectToml,
extras: &ExtrasSpecification,
pyproject_path: &Path,
project_dir: &Path,
workspace_sources: &BTreeMap<PackageName, Source>,
workspace_packages: &BTreeMap<PackageName, String>,
workspace: &Workspace,
preview: PreviewMode,
) -> Result<Option<Self>, Pep621Error> {
let project_sources = pyproject
@ -255,9 +256,9 @@ impl Pep621Metadata {
.and_then(|tool| tool.uv.as_ref())
.and_then(|uv| uv.sources.clone());
let has_sources = project_sources.is_some() || !workspace_sources.is_empty();
let has_sources = project_sources.is_some() || !workspace.sources().is_empty();
let Some(project) = pyproject.project else {
let Some(project) = &pyproject.project else {
return if has_sources {
Err(Pep621Error::MissingProjectSection)
} else {
@ -268,7 +269,7 @@ impl Pep621Metadata {
// If the project specifies dynamic dependencies, we can't extract the requirements.
if dynamic.iter().any(|field| field == "dependencies") {
return if has_sources {
Err(Pep621Error::CantBeDynamic("project.dependencies"))
Err(Pep621Error::DynamicNotAllowed("project.dependencies"))
} else {
Ok(None)
};
@ -277,7 +278,9 @@ impl Pep621Metadata {
// extract the requirements.
if !extras.is_empty() && dynamic.iter().any(|field| field == "optional-dependencies") {
return if has_sources {
Err(Pep621Error::CantBeDynamic("project.optional-dependencies"))
Err(Pep621Error::DynamicNotAllowed(
"project.optional-dependencies",
))
} else {
Ok(None)
};
@ -285,14 +288,13 @@ impl Pep621Metadata {
}
let requirements = lower_requirements(
&project.dependencies.unwrap_or_default(),
&project.optional_dependencies.unwrap_or_default(),
project.dependencies.as_deref(),
project.optional_dependencies.as_ref(),
pyproject_path,
&project.name,
project_dir,
&project_sources.unwrap_or_default(),
workspace_sources,
workspace_packages,
workspace,
preview,
)?;
@ -316,7 +318,7 @@ impl Pep621Metadata {
}
Ok(Some(Self {
name: project.name,
name: project.name.clone(),
requirements: requirements_with_extras,
used_extras,
}))
@ -325,18 +327,18 @@ impl Pep621Metadata {
#[allow(clippy::too_many_arguments)]
pub(crate) fn lower_requirements(
dependencies: &[String],
optional_dependencies: &IndexMap<ExtraName, Vec<String>>,
dependencies: Option<&[String]>,
optional_dependencies: Option<&IndexMap<ExtraName, Vec<String>>>,
pyproject_path: &Path,
project_name: &PackageName,
project_dir: &Path,
project_sources: &BTreeMap<PackageName, Source>,
workspace_sources: &BTreeMap<PackageName, Source>,
workspace_packages: &BTreeMap<PackageName, String>,
workspace: &Workspace,
preview: PreviewMode,
) -> Result<Requirements, Pep621Error> {
let dependencies = dependencies
.iter()
.into_iter()
.flatten()
.map(|dependency| {
let requirement = pep508_rs::Requirement::from_str(dependency)?.with_origin(
RequirementOrigin::Project(pyproject_path.to_path_buf(), project_name.clone()),
@ -347,15 +349,15 @@ pub(crate) fn lower_requirements(
project_name,
project_dir,
project_sources,
workspace_sources,
workspace_packages,
workspace,
preview,
)
.map_err(|err| Pep621Error::LoweringError(name, err))
})
.collect::<Result<_, Pep621Error>>()?;
let optional_dependencies = optional_dependencies
.iter()
.into_iter()
.flatten()
.map(|(extra_name, dependencies)| {
let dependencies: Vec<_> = dependencies
.iter()
@ -372,8 +374,7 @@ pub(crate) fn lower_requirements(
project_name,
project_dir,
project_sources,
workspace_sources,
workspace_packages,
workspace,
preview,
)
.map_err(|err| Pep621Error::LoweringError(name, err))
@ -394,29 +395,35 @@ pub(crate) fn lower_requirement(
project_name: &PackageName,
project_dir: &Path,
project_sources: &BTreeMap<PackageName, Source>,
workspace_sources: &BTreeMap<PackageName, Source>,
workspace_packages: &BTreeMap<PackageName, String>,
workspace: &Workspace,
preview: PreviewMode,
) -> Result<Requirement, LoweringError> {
let source = project_sources
.get(&requirement.name)
.or(workspace_sources.get(&requirement.name))
.or(workspace.sources().get(&requirement.name))
.cloned();
if !matches!(
source,
Some(Source::Workspace {
// By using toml, we technically support `workspace = false`.
workspace: true,
..
})
) && workspace_packages.contains_key(&requirement.name)
{
let workspace_package_declared =
// We require that when you use a package that's part of the workspace, ...
!workspace.packages().contains_key(&requirement.name)
// ... it must be declared as a workspace dependency (`workspace = true`), ...
|| matches!(
source,
Some(Source::Workspace {
// By using toml, we technically support `workspace = false`.
workspace: true,
..
})
)
// ... except for recursive self-inclusion (extras that activate other extras), e.g.
// `framework[machine_learning]` depends on `framework[cuda]`.
|| &requirement.name == project_name;
if !workspace_package_declared {
return Err(LoweringError::UndeclaredWorkspacePackage);
}
let Some(source) = source else {
let has_sources = !project_sources.is_empty() || !workspace_sources.is_empty();
let has_sources = !project_sources.is_empty() || !workspace.sources().is_empty();
// Support recursive editable inclusions.
if has_sources && requirement.version_or_url.is_none() && &requirement.name != project_name
{
@ -523,20 +530,21 @@ pub(crate) fn lower_requirement(
Some(VersionOrUrl::Url(_)) => return Err(LoweringError::ConflictingUrls),
},
Source::Workspace {
workspace,
workspace: is_workspace,
editable,
} => {
if !workspace {
if !is_workspace {
return Err(LoweringError::WorkspaceFalse);
}
if matches!(requirement.version_or_url, Some(VersionOrUrl::Url(_))) {
return Err(LoweringError::ConflictingUrls);
}
let path = workspace_packages
let path = workspace
.packages()
.get(&requirement.name)
.ok_or(LoweringError::UndeclaredWorkspacePackage)?
.clone();
path_source(path, project_dir, editable.unwrap_or(true))?
path_source(path.root(), workspace.root(), editable.unwrap_or(true))?
}
Source::CatchAll { .. } => {
// Emit a dedicated error message, which is an improvement over Serde's default error.
@ -554,16 +562,22 @@ pub(crate) fn lower_requirement(
/// Convert a path string to a path section.
fn path_source(
path: String,
path: impl AsRef<Path>,
project_dir: &Path,
editable: bool,
) -> Result<RequirementSource, LoweringError> {
let url = VerbatimUrl::parse_path(&path, project_dir)?.with_given(path.clone());
let path_buf = PathBuf::from(&path);
let url = VerbatimUrl::parse_path(path.as_ref(), project_dir)?
.with_given(path.as_ref().to_string_lossy().to_string());
let path_buf = path.as_ref().to_path_buf();
let path_buf = path_buf
.absolutize_from(project_dir)
.map_err(|err| LoweringError::AbsolutizeError(path, err))?
.map_err(|err| LoweringError::AbsolutizeError(path.as_ref().to_path_buf(), err))?
.to_path_buf();
if !editable {
// TODO(konsti): Support this. Currently we support `{ workspace = true }`, but we don't
// support `{ workspace = true, editable = false }` since we only collect editables.
return Err(LoweringError::NonEditableWorkspaceDependency);
}
Ok(RequirementSource::Path {
path: path_buf,
url,
@ -663,6 +677,7 @@ mod serde_from_and_to_string {
#[cfg(test)]
mod test {
use std::path::Path;
use std::str::FromStr;
use anyhow::Context;
use indoc::indoc;
@ -670,7 +685,9 @@ mod test {
use uv_configuration::PreviewMode;
use uv_fs::Simplified;
use uv_normalize::PackageName;
use crate::ProjectWorkspace;
use crate::{ExtrasSpecification, RequirementsSpecification};
fn from_source(
@ -679,13 +696,19 @@ mod test {
extras: &ExtrasSpecification,
) -> anyhow::Result<RequirementsSpecification> {
let path = uv_fs::absolutize_path(path.as_ref())?;
let project_workspace =
ProjectWorkspace::dummy(path.as_ref(), &PackageName::from_str("foo").unwrap());
let pyproject_toml =
toml::from_str(contents).context("Failed to parse: `pyproject.toml`")?;
RequirementsSpecification::parse_direct_pyproject_toml(
contents,
&pyproject_toml,
project_workspace.workspace(),
extras,
path.as_ref(),
PreviewMode::Enabled,
)
.with_context(|| format!("Failed to parse: `{}`", path.user_display()))
.with_context(|| format!("Failed to parse: `{}`", path.user_display()))?
.context("Missing workspace")
}
fn format_err(input: &str) -> String {
@ -803,7 +826,6 @@ mod test {
"tqdm",
]
"#};
assert!(from_source(input, "pyproject.toml", &ExtrasSpecification::None).is_ok());
}

View file

@ -1,10 +1,41 @@
use std::collections::BTreeMap;
//! Collecting the requirements to compile, sync or install.
//!
//! # `requirements.txt` format
//!
//! The `requirements.txt` format (also known as `requirements.in`) is static except for the
//! possibility of making network requests.
//!
//! All entries are stored as `requirements` and `editables` or `constraints` depending on the kind
//! of inclusion (`uv pip install -r` and `uv pip compile` vs. `uv pip install -c` and
//! `uv pip compile -c`).
//!
//! # `pyproject.toml` and directory source.
//!
//! `pyproject.toml` files come in two forms: PEP 621 compliant with static dependencies and non-PEP 621
//! compliant or PEP 621 compliant with dynamic metadata. There are different ways how the requirements are evaluated:
//! * `uv pip install -r pyproject.toml` or `uv pip compile requirements.in`: The `pyproject.toml`
//! must be valid (in other circumstances we allow invalid `dependencies` e.g. for hatch's
//! relative path support), but it can be dynamic. We set the `project` from the `name` entry. If it is static, we add
//! all `dependencies` from the pyproject.toml as `requirements` (and drop the directory). If it
//! is dynamic, we add the directory to `source_trees`.
//! * `uv pip install .` in a directory with `pyproject.toml` or `uv pip compile requirements.in`
//! where the `requirements.in` points to that directory: The directory is listed in
//! `requirements`. The lookahead resolver reads the static metadata from `pyproject.toml` if
//! available, otherwise it calls PEP 517 to resolve.
//! * `uv pip install -e`: We add the directory in `editables` instead of `requirements`. The
//! lookahead resolver resolves it the same.
//! * `setup.py` or `setup.cfg` instead of `pyproject.toml`: Directory is an entry in
//! `source_trees`.
use std::collections::VecDeque;
use std::iter;
use std::path::{Path, PathBuf};
use anyhow::{Context, Result};
use itertools::{Either, Itertools};
use path_absolutize::Absolutize;
use rustc_hash::FxHashSet;
use tracing::{debug, instrument};
use same_file::is_same_file;
use tracing::{debug, instrument, trace};
use cache_key::CanonicalUrl;
use distribution_types::{
@ -22,7 +53,8 @@ use uv_fs::Simplified;
use uv_normalize::{ExtraName, PackageName};
use crate::pyproject::{Pep621Metadata, PyProjectToml};
use crate::{ExtrasSpecification, RequirementsSource};
use crate::ProjectWorkspace;
use crate::{ExtrasSpecification, RequirementsSource, Workspace, WorkspaceError};
#[derive(Debug, Default)]
pub struct RequirementsSpecification {
@ -61,6 +93,7 @@ impl RequirementsSpecification {
source: &RequirementsSource,
extras: &ExtrasSpecification,
client_builder: &BaseClientBuilder<'_>,
workspace: Option<&Workspace>,
preview: PreviewMode,
) -> Result<Self> {
Ok(match source {
@ -78,12 +111,7 @@ impl RequirementsSpecification {
}
}
RequirementsSource::Editable(name) => {
let requirement = EditableRequirement::parse(name, None, std::env::current_dir()?)
.with_context(|| format!("Failed to parse: `{name}`"))?;
Self {
editables: vec![requirement],
..Self::default()
}
Self::from_editable_source(name, extras, workspace, preview).await?
}
RequirementsSource::RequirementsTxt(path) => {
let requirements_txt =
@ -121,9 +149,7 @@ impl RequirementsSpecification {
}
}
RequirementsSource::PyprojectToml(path) => {
let contents = uv_fs::read_to_string(&path).await?;
Self::parse_direct_pyproject_toml(&contents, extras, path.as_ref(), preview)
.with_context(|| format!("Failed to parse: `{}`", path.user_display()))?
Self::from_pyproject_toml_source(path, extras, preview).await?
}
RequirementsSource::SetupPy(path) | RequirementsSource::SetupCfg(path) => Self {
source_trees: vec![path.clone()],
@ -145,19 +171,140 @@ impl RequirementsSpecification {
})
}
/// Attempt to read metadata from the `pyproject.toml` directly.
async fn from_editable_source(
name: &str,
extras: &ExtrasSpecification,
workspace: Option<&Workspace>,
preview: PreviewMode,
) -> Result<RequirementsSpecification> {
let requirement = EditableRequirement::parse(name, None, std::env::current_dir()?)
.with_context(|| format!("Failed to parse: `{name}`"))?;
// First try to find the project in the existing workspace (if any), then try workspace
// discovery.
let project_in_exiting_workspace = workspace.and_then(|workspace| {
// We use `is_same_file` instead of indexing by path to support different versions of
// the same path (e.g. symlinks).
workspace
.packages()
.values()
.find(|member| is_same_file(member.root(), &requirement.path).unwrap_or(false))
.map(|member| (member.pyproject_toml(), workspace))
});
let editable_spec = if let Some((pyproject_toml, workspace)) = project_in_exiting_workspace
{
Self::parse_direct_pyproject_toml(
pyproject_toml,
workspace,
extras,
requirement.path.as_ref(),
preview,
)
.with_context(|| format!("Failed to parse: `{}`", requirement.path.user_display()))?
} else if let Some(project_workspace) =
ProjectWorkspace::from_maybe_project_root(&requirement.path).await?
{
let pyproject_toml = project_workspace.current_project().pyproject_toml();
let workspace = project_workspace.workspace();
Self::parse_direct_pyproject_toml(
pyproject_toml,
workspace,
extras,
requirement.path.as_ref(),
preview,
)
.with_context(|| format!("Failed to parse: `{}`", requirement.path.user_display()))?
} else {
// No `pyproject.toml` or no static metadata also means no workspace support (at the
// moment).
debug!(
"pyproject.toml has dynamic metadata at: `{}`",
requirement.path.user_display()
);
return Ok(Self {
editables: vec![requirement],
..Self::default()
});
};
if let Some(editable_spec) = editable_spec {
// We only collect the editables here to keep the count of root packages
// correct.
// TODO(konsti): Collect all workspace packages, even the non-editable ones.
let editables = editable_spec
.editables
.into_iter()
.chain(iter::once(requirement))
.collect();
Ok(Self {
editables,
..Self::default()
})
} else {
debug!(
"pyproject.toml has dynamic metadata at: `{}`",
requirement.path.user_display()
);
Ok(Self {
editables: vec![requirement],
..Self::default()
})
}
}
async fn from_pyproject_toml_source(
path: &Path,
extras: &ExtrasSpecification,
preview: PreviewMode,
) -> Result<RequirementsSpecification> {
let dir = path.parent().context("pyproject.toml must have a parent")?;
// We have to handle three cases:
// * There is a workspace (possibly implicit) with static dependencies.
// * There are dynamic dependencies, we have to build and don't use workspace information if
// present.
// * There was an error during workspace discovery, such as an IO error or a
// `pyproject.toml` in the workspace not matching the (lenient) schema.
match ProjectWorkspace::from_project_root(dir).await {
Ok(project_workspace) => {
let static_pyproject_toml = Self::parse_direct_pyproject_toml(
project_workspace.current_project().pyproject_toml(),
project_workspace.workspace(),
extras,
path,
preview,
)
.with_context(|| format!("Failed to parse: `{}`", path.user_display()))?;
// The workspace discovery succeeds even with dynamic metadata, in which case we
// fall back to building here.
let dynamic_pyproject_toml = Self {
source_trees: vec![path.to_path_buf()],
..Self::default()
};
Ok(static_pyproject_toml.unwrap_or(dynamic_pyproject_toml))
}
Err(WorkspaceError::MissingProject(_)) => {
// The dependencies are dynamic, we have to build to get the actual list.
debug!("Dynamic pyproject.toml at: `{}`", path.user_display());
Ok(Self {
source_trees: vec![path.to_path_buf()],
..Self::default()
})
}
Err(err) => Err(anyhow::Error::new(err)),
}
}
/// Parse and lower a `pyproject.toml`, including all editable workspace dependencies.
///
/// Since we only use this path for directly included pyproject.toml, we are strict about
/// PEP 621 and don't allow invalid `project.dependencies` (e.g., Hatch's relative path
/// support).
/// When dependency information is dynamic or invalid `project.dependencies` (e.g., Hatch's
/// relative path support), we return `None` and query the metadata with PEP 517 later.
pub(crate) fn parse_direct_pyproject_toml(
contents: &str,
pyproject: &PyProjectToml,
workspace: &Workspace,
extras: &ExtrasSpecification,
pyproject_path: &Path,
preview: PreviewMode,
) -> Result<Self> {
let pyproject = toml::from_str::<PyProjectToml>(contents)?;
) -> Result<Option<Self>> {
// We need use this path as base for the relative paths inside pyproject.toml, so
// we need the absolute path instead of a potentially relative path. E.g. with
// `foo = { path = "../foo" }`, we will join `../foo` onto this path.
@ -166,66 +313,127 @@ impl RequirementsSpecification {
.parent()
.context("`pyproject.toml` has no parent directory")?;
let workspace_sources = BTreeMap::default();
let workspace_packages = BTreeMap::default();
match Pep621Metadata::try_from(
let Some(project) = Pep621Metadata::try_from(
pyproject,
extras,
pyproject_path,
project_dir,
&workspace_sources,
&workspace_packages,
workspace,
preview,
) {
Ok(Some(project)) => {
// Partition into editable and non-editable requirements.
let (editables, requirements): (Vec<_>, Vec<_>) = project
)?
else {
debug!(
"Dynamic pyproject.toml at: `{}`",
pyproject_path.user_display()
);
return Ok(None);
};
if preview.is_disabled() {
Ok(Some(Self {
project: Some(project.name),
requirements: project
.requirements
.into_iter()
.partition_map(|requirement| {
if let RequirementSource::Path {
path,
editable: true,
url,
} = requirement.source
{
Either::Left(EditableRequirement {
url,
path,
marker: requirement.marker,
extras: requirement.extras,
origin: requirement.origin,
})
} else {
Either::Right(UnresolvedRequirementSpecification {
requirement: UnresolvedRequirement::Named(requirement),
hashes: vec![],
})
}
.map(|requirement| UnresolvedRequirementSpecification {
requirement: UnresolvedRequirement::Named(requirement),
hashes: vec![],
})
.collect(),
extras: project.used_extras,
..Self::default()
}))
} else {
Ok(Some(Self::collect_transitive_editables(
workspace, extras, preview, project,
)?))
}
}
/// Perform a workspace dependency DAG traversal (breadth-first search) to collect all editables
/// eagerly.
///
/// Consider a requirement on A in a workspace with workspace packages A, B, C where
/// A -> B and B -> C.
fn collect_transitive_editables(
workspace: &Workspace,
extras: &ExtrasSpecification,
preview: PreviewMode,
project: Pep621Metadata,
) -> Result<RequirementsSpecification> {
let mut seen_editables = FxHashSet::from_iter([project.name.clone()]);
let mut queue = VecDeque::from([project.name.clone()]);
let mut editables = Vec::new();
let mut requirements = Vec::new();
let mut used_extras = FxHashSet::default();
while let Some(project_name) = queue.pop_front() {
let Some(current) = &workspace.packages().get(&project_name) else {
continue;
};
trace!("Processing metadata for workspace package {project_name}");
let project_root_absolute = current.root().absolutize_from(workspace.root())?;
let pyproject = current.pyproject_toml().clone();
let project = Pep621Metadata::try_from(
&pyproject,
extras,
&project_root_absolute.join("pyproject.toml"),
project_root_absolute.as_ref(),
workspace,
preview,
)
.with_context(|| {
format!(
"Invalid requirements in: `{}`",
current.root().join("pyproject.toml").user_display()
)
})?
// TODO(konsti): We should support this by building and using the built PEP 517 metadata
.with_context(|| {
format!(
"Workspace member doesn't declare static metadata: `{}`",
current.root().user_display()
)
})?;
used_extras.extend(project.used_extras);
// Partition into editable and non-editable requirements.
for requirement in project.requirements {
if let RequirementSource::Path {
path,
editable: true,
url,
} = requirement.source
{
editables.push(EditableRequirement {
url,
path,
marker: requirement.marker,
extras: requirement.extras,
origin: requirement.origin,
});
Ok(Self {
project: Some(project.name),
editables,
requirements,
extras: project.used_extras,
..Self::default()
})
if seen_editables.insert(requirement.name.clone()) {
queue.push_back(requirement.name.clone());
}
} else {
requirements.push(UnresolvedRequirementSpecification {
requirement: UnresolvedRequirement::Named(requirement),
hashes: vec![],
});
}
}
Ok(None) => {
debug!(
"Dynamic pyproject.toml at: `{}`",
pyproject_path.user_display()
);
Ok(Self {
project: None,
requirements: vec![],
source_trees: vec![pyproject_path.to_path_buf()],
..Self::default()
})
}
Err(err) => Err(err.into()),
}
let spec = Self {
project: Some(project.name),
editables,
requirements,
extras: used_extras,
..Self::default()
};
Ok(spec)
}
/// Read the combined requirements and constraints from a set of sources.
@ -233,6 +441,8 @@ impl RequirementsSpecification {
requirements: &[RequirementsSource],
constraints: &[RequirementsSource],
overrides: &[RequirementsSource],
// Avoid re-discovering the workspace if we already loaded it.
workspace: Option<&Workspace>,
extras: &ExtrasSpecification,
client_builder: &BaseClientBuilder<'_>,
preview: PreviewMode,
@ -243,7 +453,8 @@ impl RequirementsSpecification {
// A `requirements.txt` can contain a `-c constraints.txt` directive within it, so reading
// a requirements file can also add constraints.
for source in requirements {
let source = Self::from_source(source, extras, client_builder, preview).await?;
let source =
Self::from_source(source, extras, client_builder, workspace, preview).await?;
spec.requirements.extend(source.requirements);
spec.constraints.extend(source.constraints);
spec.overrides.extend(source.overrides);
@ -276,7 +487,8 @@ impl RequirementsSpecification {
// Read all constraints, treating both requirements _and_ constraints as constraints.
// Overrides are ignored, as are the hashes, as they are not relevant for constraints.
for source in constraints {
let source = Self::from_source(source, extras, client_builder, preview).await?;
let source =
Self::from_source(source, extras, client_builder, workspace, preview).await?;
for entry in source.requirements {
match entry.requirement {
UnresolvedRequirement::Named(requirement) => {
@ -311,7 +523,7 @@ impl RequirementsSpecification {
// Read all overrides, treating both requirements _and_ overrides as overrides.
// Constraints are ignored.
for source in overrides {
let source = Self::from_source(source, extras, client_builder, preview).await?;
let source = Self::from_source(source, extras, client_builder, None, preview).await?;
spec.overrides.extend(source.requirements);
spec.overrides.extend(source.overrides);
@ -345,6 +557,7 @@ impl RequirementsSpecification {
requirements,
&[],
&[],
None,
&ExtrasSpecification::None,
client_builder,
preview,

View file

@ -4,9 +4,10 @@ use std::collections::BTreeMap;
use std::path::{Path, PathBuf};
use glob::{glob, GlobError, PatternError};
use rustc_hash::FxHashSet;
use tracing::{debug, trace};
use uv_fs::Simplified;
use uv_fs::{absolutize_path, Simplified};
use uv_normalize::PackageName;
use uv_warnings::warn_user;
@ -25,8 +26,12 @@ pub enum WorkspaceError {
Io(#[from] std::io::Error),
#[error("Failed to parse: `{}`", _0.user_display())]
Toml(PathBuf, #[source] Box<toml::de::Error>),
#[error("No `project` section found in: `{}`", _0.simplified_display())]
#[error("No `project` table found in: `{}`", _0.simplified_display())]
MissingProject(PathBuf),
#[error("pyproject.toml section is declared as dynamic, but must be static: `{0}`")]
DynamicNotAllowed(&'static str),
#[error("Failed to normalize workspace member path")]
Normalize(#[source] std::io::Error),
}
/// A workspace, consisting of a root directory and members. See [`ProjectWorkspace`].
@ -172,22 +177,62 @@ pub struct ProjectWorkspace {
}
impl ProjectWorkspace {
/// Find the current project and workspace.
pub fn discover(path: impl AsRef<Path>) -> Result<Self, WorkspaceError> {
let Some(project_root) = path
/// Find the current project and workspace, given the current directory.
pub async fn discover(path: impl AsRef<Path>) -> Result<Self, WorkspaceError> {
let project_root = path
.as_ref()
.ancestors()
.find(|path| path.join("pyproject.toml").is_file())
else {
return Err(WorkspaceError::MissingPyprojectToml);
};
.ok_or(WorkspaceError::MissingPyprojectToml)?;
debug!(
"Found project root: `{}`",
project_root.simplified_display()
);
Self::from_project_root(project_root)
Self::from_project_root(project_root).await
}
/// Discover the workspace starting from the directory containing the `pyproject.toml`.
pub async fn from_project_root(project_root: &Path) -> Result<Self, WorkspaceError> {
// Read the current `pyproject.toml`.
let pyproject_path = project_root.join("pyproject.toml");
let contents = fs_err::tokio::read_to_string(&pyproject_path).await?;
let pyproject_toml: PyProjectToml = toml::from_str(&contents)
.map_err(|err| WorkspaceError::Toml(pyproject_path.clone(), Box::new(err)))?;
// It must have a `[project]` table.
let project = pyproject_toml
.project
.clone()
.ok_or_else(|| WorkspaceError::MissingProject(pyproject_path.clone()))?;
Self::from_project(project_root, &pyproject_toml, project.name).await
}
/// If the current directory contains a `pyproject.toml` with a `project` table, discover the
/// workspace and return it, otherwise it is a dynamic path dependency and we return `Ok(None)`.
pub async fn from_maybe_project_root(
project_root: &Path,
) -> Result<Option<Self>, WorkspaceError> {
// Read the `pyproject.toml`.
let pyproject_path = project_root.join("pyproject.toml");
let Ok(contents) = fs_err::tokio::read_to_string(&pyproject_path).await else {
// No `pyproject.toml`, but there may still be a `setup.py` or `setup.cfg`.
return Ok(None);
};
let pyproject_toml: PyProjectToml = toml::from_str(&contents)
.map_err(|err| WorkspaceError::Toml(pyproject_path.clone(), Box::new(err)))?;
// Extract the `[project]` metadata.
let Some(project) = pyproject_toml.project.clone() else {
// We have to build to get the metadata.
return Ok(None);
};
Ok(Some(
Self::from_project(project_root, &pyproject_toml, project.name).await?,
))
}
/// The directory containing the closest `pyproject.toml`, defining the current project.
@ -205,48 +250,31 @@ impl ProjectWorkspace {
&self.workspace
}
/// Return the requirements for the project.
pub fn requirements(&self) -> Vec<RequirementsSource> {
vec![
RequirementsSource::from_requirements_file(self.project_root.join("pyproject.toml")),
RequirementsSource::from_source_tree(self.project_root.clone()),
]
/// The current project.
pub fn current_project(&self) -> &WorkspaceMember {
&self.workspace().packages[&self.project_name]
}
fn from_project_root(path: &Path) -> Result<Self, WorkspaceError> {
let pyproject_path = path.join("pyproject.toml");
// Read the `pyproject.toml`.
let contents = fs_err::read_to_string(&pyproject_path)?;
let pyproject_toml: PyProjectToml = toml::from_str(&contents)
.map_err(|err| WorkspaceError::Toml(pyproject_path.clone(), Box::new(err)))?;
// Extract the `[project]` metadata.
let Some(project) = pyproject_toml.project.clone() else {
return Err(WorkspaceError::MissingProject(pyproject_path));
};
Self::from_project(path.to_path_buf(), &pyproject_toml, project.name)
/// Return the requirements for the project, which is the current project as editable.
pub fn requirements(&self) -> Vec<RequirementsSource> {
vec![RequirementsSource::Editable(
self.project_root.to_string_lossy().to_string(),
)]
}
/// Find the workspace for a project.
fn from_project(
project_path: PathBuf,
async fn from_project(
project_path: &Path,
project: &PyProjectToml,
project_name: PackageName,
) -> Result<Self, WorkspaceError> {
let mut workspace = project
.tool
.as_ref()
.and_then(|tool| tool.uv.as_ref())
.and_then(|uv| uv.workspace.as_ref())
.map(|workspace| (project_path.clone(), workspace.clone(), project.clone()));
if workspace.is_none() {
workspace = find_workspace(&project_path)?;
}
let project_path = absolutize_path(project_path)
.map_err(WorkspaceError::Normalize)?
.to_path_buf();
let mut workspace_members = BTreeMap::new();
// The current project is always a workspace member, especially in a single project
// workspace.
workspace_members.insert(
project_name.clone(),
WorkspaceMember {
@ -255,9 +283,24 @@ impl ProjectWorkspace {
},
);
let Some((workspace_root, workspace_definition, project_in_workspace_root)) = workspace
// Check if the current project is also an explicit workspace root.
let mut workspace = project
.tool
.as_ref()
.and_then(|tool| tool.uv.as_ref())
.and_then(|uv| uv.workspace.as_ref())
.map(|workspace| (project_path.clone(), workspace.clone(), project.clone()));
if workspace.is_none() {
// The project isn't an explicit workspace root, check if we're a regular workspace
// member by looking for an explicit workspace root above.
workspace = find_workspace(&project_path).await?;
}
let Some((workspace_root, workspace_definition, workspace_pyproject_toml)) = workspace
else {
// The project and the workspace root are identical
// The project isn't an explicit workspace root, but there's also no workspace root
// above it, so the project is an implicit workspace root identical to the project root.
debug!("No workspace root found, using project root");
return Ok(Self {
project_root: project_path.clone(),
@ -282,7 +325,7 @@ impl ProjectWorkspace {
let pyproject_toml = toml::from_str(&contents)
.map_err(|err| WorkspaceError::Toml(pyproject_path, Box::new(err)))?;
if let Some(project) = &project_in_workspace_root.project {
if let Some(project) = &workspace_pyproject_toml.project {
workspace_members.insert(
project.name.clone(),
WorkspaceMember {
@ -292,6 +335,7 @@ impl ProjectWorkspace {
);
};
}
let mut seen = FxHashSet::default();
for member_glob in workspace_definition.members.unwrap_or_default() {
let absolute_glob = workspace_root
.join(member_glob.as_str())
@ -300,10 +344,18 @@ impl ProjectWorkspace {
for member_root in glob(&absolute_glob)
.map_err(|err| WorkspaceError::Pattern(absolute_glob.to_string(), err))?
{
// TODO(konsti): Filter already seen.
let member_root = member_root
.map_err(|err| WorkspaceError::Glob(absolute_glob.to_string(), err))?;
// Read the `pyproject.toml`.
// Avoid reading the file more than once.
if !seen.insert(member_root.clone()) {
continue;
}
let member_root = absolutize_path(&member_root)
.map_err(WorkspaceError::Normalize)?
.to_path_buf();
trace!("Processing workspace member {}", member_root.user_display());
// Read the member `pyproject.toml`.
let pyproject_path = member_root.join("pyproject.toml");
let contents = fs_err::read_to_string(&pyproject_path)?;
let pyproject_toml: PyProjectToml = toml::from_str(&contents)
@ -314,10 +366,6 @@ impl ProjectWorkspace {
return Err(WorkspaceError::MissingProject(member_root));
};
let pyproject_toml = workspace_root.join("pyproject.toml");
let contents = fs_err::read_to_string(&pyproject_toml)?;
let pyproject_toml = toml::from_str(&contents)
.map_err(|err| WorkspaceError::Toml(pyproject_toml, Box::new(err)))?;
let member = WorkspaceMember {
root: member_root.clone(),
pyproject_toml,
@ -325,7 +373,7 @@ impl ProjectWorkspace {
workspace_members.insert(project.name, member);
}
}
let workspace_sources = project_in_workspace_root
let workspace_sources = workspace_pyproject_toml
.tool
.as_ref()
.and_then(|tool| tool.uv.as_ref())
@ -344,10 +392,37 @@ impl ProjectWorkspace {
},
})
}
#[cfg(test)]
pub(crate) fn dummy(root: &Path, project_name: &PackageName) -> Self {
// This doesn't necessarily match the exact test case, but we don't use the other fields
// for the test cases atm.
let root_member = WorkspaceMember {
root: root.to_path_buf(),
pyproject_toml: PyProjectToml {
project: Some(crate::pyproject::Project {
name: project_name.clone(),
dependencies: None,
optional_dependencies: None,
dynamic: None,
}),
tool: None,
},
};
Self {
project_root: root.to_path_buf(),
project_name: project_name.clone(),
workspace: Workspace {
root: root.to_path_buf(),
packages: [(project_name.clone(), root_member)].into_iter().collect(),
sources: BTreeMap::default(),
},
}
}
}
/// Find the workspace root above the current project, if any.
fn find_workspace(
async fn find_workspace(
project_root: &Path,
) -> Result<Option<(PathBuf, ToolUvWorkspace, PyProjectToml)>, WorkspaceError> {
// Skip 1 to ignore the current project itself.
@ -362,7 +437,7 @@ fn find_workspace(
);
// Read the `pyproject.toml`.
let contents = fs_err::read_to_string(&pyproject_path)?;
let contents = fs_err::tokio::read_to_string(&pyproject_path).await?;
let pyproject_toml: PyProjectToml = toml::from_str(&contents)
.map_err(|err| WorkspaceError::Toml(pyproject_path.clone(), Box::new(err)))?;
@ -521,7 +596,7 @@ mod tests {
use crate::workspace::ProjectWorkspace;
fn workspace_test(folder: impl AsRef<Path>) -> (ProjectWorkspace, String) {
async fn workspace_test(folder: impl AsRef<Path>) -> (ProjectWorkspace, String) {
let root_dir = env::current_dir()
.unwrap()
.parent()
@ -530,14 +605,17 @@ mod tests {
.unwrap()
.join("scripts")
.join("workspaces");
let project = ProjectWorkspace::discover(root_dir.join(folder)).unwrap();
let project = ProjectWorkspace::discover(root_dir.join(folder))
.await
.unwrap();
let root_escaped = regex::escape(root_dir.to_string_lossy().as_ref());
(project, root_escaped)
}
#[test]
fn albatross_in_example() {
let (project, root_escaped) = workspace_test("albatross-in-example/examples/bird-feeder");
#[tokio::test]
async fn albatross_in_example() {
let (project, root_escaped) =
workspace_test("albatross-in-example/examples/bird-feeder").await;
let filters = vec![(root_escaped.as_str(), "[ROOT]")];
insta::with_settings!({filters => filters}, {
assert_json_snapshot!(
@ -564,10 +642,10 @@ mod tests {
});
}
#[test]
fn albatross_project_in_excluded() {
#[tokio::test]
async fn albatross_project_in_excluded() {
let (project, root_escaped) =
workspace_test("albatross-project-in-excluded/excluded/bird-feeder");
workspace_test("albatross-project-in-excluded/excluded/bird-feeder").await;
let filters = vec![(root_escaped.as_str(), "[ROOT]")];
insta::with_settings!({filters => filters}, {
assert_json_snapshot!(
@ -594,9 +672,9 @@ mod tests {
});
}
#[test]
fn albatross_root_workspace() {
let (project, root_escaped) = workspace_test("albatross-root-workspace");
#[tokio::test]
async fn albatross_root_workspace() {
let (project, root_escaped) = workspace_test("albatross-root-workspace").await;
let filters = vec![(root_escaped.as_str(), "[ROOT]")];
insta::with_settings!({filters => filters}, {
assert_json_snapshot!(
@ -636,10 +714,10 @@ mod tests {
});
}
#[test]
fn albatross_virtual_workspace() {
#[tokio::test]
async fn albatross_virtual_workspace() {
let (project, root_escaped) =
workspace_test("albatross-virtual-workspace/packages/albatross");
workspace_test("albatross-virtual-workspace/packages/albatross").await;
let filters = vec![(root_escaped.as_str(), "[ROOT]")];
insta::with_settings!({filters => filters}, {
assert_json_snapshot!(
@ -674,9 +752,9 @@ mod tests {
});
}
#[test]
fn albatross_just_project() {
let (project, root_escaped) = workspace_test("albatross-just-project");
#[tokio::test]
async fn albatross_just_project() {
let (project, root_escaped) = workspace_test("albatross-just-project").await;
let filters = vec![(root_escaped.as_str(), "[ROOT]")];
insta::with_settings!({filters => filters}, {
assert_json_snapshot!(

View file

@ -136,6 +136,7 @@ pub(crate) async fn pip_compile(
requirements,
constraints,
overrides,
None,
&extras,
&client_builder,
preview,

View file

@ -102,6 +102,7 @@ pub(crate) async fn pip_install(
requirements,
constraints,
overrides,
None,
extras,
&client_builder,
preview,

View file

@ -31,7 +31,7 @@ use uv_interpreter::{Interpreter, PythonEnvironment};
use uv_normalize::PackageName;
use uv_requirements::{
ExtrasSpecification, LookaheadResolver, NamedRequirementsResolver, RequirementsSource,
RequirementsSpecification, SourceTreeResolver,
RequirementsSpecification, SourceTreeResolver, Workspace,
};
use uv_resolver::{
DependencyMode, Exclusions, FlatIndex, InMemoryIndex, Manifest, Options, Preference,
@ -51,6 +51,7 @@ pub(crate) async fn read_requirements(
requirements: &[RequirementsSource],
constraints: &[RequirementsSource],
overrides: &[RequirementsSource],
workspace: Option<&Workspace>,
extras: &ExtrasSpecification,
client_builder: &BaseClientBuilder<'_>,
preview: PreviewMode,
@ -69,6 +70,7 @@ pub(crate) async fn read_requirements(
requirements,
constraints,
overrides,
workspace,
extras,
client_builder,
preview,

View file

@ -99,6 +99,7 @@ pub(crate) async fn pip_sync(
requirements,
constraints,
overrides,
None,
&ExtrasSpecification::default(),
&client_builder,
preview,

View file

@ -31,7 +31,7 @@ pub(crate) async fn lock(
}
// Find the project requirements.
let project = ProjectWorkspace::discover(std::env::current_dir()?)?;
let project = ProjectWorkspace::discover(std::env::current_dir()?).await?;
// Discover or create the virtual environment.
let venv = project::init_environment(&project, preview, cache, printer)?;
@ -43,9 +43,13 @@ pub(crate) async fn lock(
// TODO(zanieb): Consider allowing constraints and extras
// TODO(zanieb): Allow specifying extras somehow
let spec = RequirementsSpecification::from_sources(
// TODO(konsti): With workspace (just like with extras), these are the requirements for
// syncing. For locking, we want to use the entire workspace with all extras.
// See https://github.com/astral-sh/uv/issues/3700
&project.requirements(),
&[],
&[],
None,
&ExtrasSpecification::None,
&client_builder,
preview,

View file

@ -19,7 +19,7 @@ use uv_fs::Simplified;
use uv_installer::{SatisfiesResult, SitePackages};
use uv_interpreter::{find_default_interpreter, PythonEnvironment};
use uv_requirements::{
ExtrasSpecification, ProjectWorkspace, RequirementsSource, RequirementsSpecification,
ExtrasSpecification, ProjectWorkspace, RequirementsSource, RequirementsSpecification, Workspace,
};
use uv_resolver::{FlatIndex, InMemoryIndex, Options};
use uv_types::{BuildIsolation, HashStrategy, InFlight};
@ -92,6 +92,7 @@ pub(crate) fn init_environment(
pub(crate) async fn update_environment(
venv: PythonEnvironment,
requirements: &[RequirementsSource],
workspace: Option<&Workspace>,
preview: PreviewMode,
connectivity: Connectivity,
cache: &Cache,
@ -107,6 +108,7 @@ pub(crate) async fn update_environment(
requirements,
&[],
&[],
workspace,
&ExtrasSpecification::None,
&client_builder,
preview,

View file

@ -56,7 +56,7 @@ pub(crate) async fn run(
} else {
debug!("Syncing project environment.");
let project = ProjectWorkspace::discover(std::env::current_dir()?)?;
let project = ProjectWorkspace::discover(std::env::current_dir()?).await?;
let venv = project::init_environment(&project, preview, cache, printer)?;
@ -65,6 +65,7 @@ pub(crate) async fn run(
project::update_environment(
venv,
&project.requirements(),
Some(project.workspace()),
preview,
connectivity,
cache,
@ -111,8 +112,16 @@ pub(crate) async fn run(
// Install the ephemeral requirements.
Some(
project::update_environment(venv, &requirements, preview, connectivity, cache, printer)
.await?,
project::update_environment(
venv,
&requirements,
None,
preview,
connectivity,
cache,
printer,
)
.await?,
)
};

View file

@ -31,7 +31,7 @@ pub(crate) async fn sync(
}
// Find the project requirements.
let project = ProjectWorkspace::discover(std::env::current_dir()?)?;
let project = ProjectWorkspace::discover(std::env::current_dir()?).await?;
// Discover or create the virtual environment.
let venv = project::init_environment(&project, preview, cache, printer)?;

View file

@ -71,8 +71,18 @@ pub(crate) async fn run(
)?;
// Install the ephemeral requirements.
let ephemeral_env =
Some(update_environment(venv, &requirements, preview, connectivity, cache, printer).await?);
let ephemeral_env = Some(
update_environment(
venv,
&requirements,
None,
preview,
connectivity,
cache,
printer,
)
.await?,
);
// TODO(zanieb): Determine the command via the package entry points
let command = target;

View file

@ -236,6 +236,17 @@ impl TestContext {
.assert()
}
/// Run the given python file and check whether it succeeds.
pub fn assert_file(&self, file: impl AsRef<Path>) -> Assert {
std::process::Command::new(venv_to_interpreter(&self.venv))
// Our tests change files in <1s, so we must disable CPython bytecode caching or we'll get stale files
// https://github.com/python/cpython/issues/75953
.arg("-B")
.arg(file.as_ref())
.current_dir(&self.temp_dir)
.assert()
}
/// Assert a package is installed with the given version.
pub fn assert_installed(&self, package: &'static str, version: &'static str) {
self.assert_command(

View file

@ -9267,3 +9267,35 @@ fn tool_uv_sources() -> Result<()> {
Ok(())
}
/// Check that a dynamic `pyproject.toml` is supported a compile input file.
#[test]
fn dynamic_pyproject_toml() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(indoc! {r#"
[project]
name = "bird-feeder"
version = "1.0.0"
dynamic = ["dependencies"]
[build-system]
requires = ["hatchling"]
build-backend = "hatchling.build"
"#})?;
let bird_feeder = context.temp_dir.child("bird_feeder/__init__.py");
bird_feeder.write_str("__all__= []")?;
uv_snapshot!(context.compile().arg("pyproject.toml"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z pyproject.toml
----- stderr -----
Resolved 0 packages in [TIME]
"###);
Ok(())
}

View file

@ -0,0 +1,351 @@
use std::env;
use std::path::PathBuf;
use crate::common::{get_bin, uv_snapshot, TestContext, EXCLUDE_NEWER};
mod common;
/// A `pip install` command for workspaces.
///
/// The goal of the workspace tests is to resolve local workspace packages correctly. We add some
/// non-workspace dependencies to ensure that transitive non-workspace dependencies are also
/// correctly resolved.
pub fn install_workspace(context: &TestContext) -> std::process::Command {
let mut command = std::process::Command::new(get_bin());
command
.arg("pip")
.arg("install")
.arg("--preview")
.arg("--cache-dir")
.arg(context.cache_dir.path())
.arg("--exclude-newer")
.arg(EXCLUDE_NEWER)
.arg("-e")
.env("VIRTUAL_ENV", context.venv.as_os_str())
.env("UV_NO_WRAP", "1")
.current_dir(&context.temp_dir);
if cfg!(all(windows, debug_assertions)) {
// TODO(konstin): Reduce stack usage in debug mode enough that the tests pass with the
// default windows stack of 1MB
command.env("UV_STACK_SIZE", (4 * 1024 * 1024).to_string());
}
command
}
fn workspaces_dir() -> PathBuf {
env::current_dir()
.unwrap()
.parent()
.unwrap()
.parent()
.unwrap()
.join("scripts")
.join("workspaces")
}
#[test]
fn test_albatross_in_examples_bird_feeder() {
let context = TestContext::new("3.12");
let current_dir = workspaces_dir()
.join("albatross-in-example")
.join("examples")
.join("bird-feeder");
uv_snapshot!(context.filters(), install_workspace(&context).arg(&current_dir), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Built 1 editable in [TIME]
Resolved 4 packages in [TIME]
Downloaded 3 packages in [TIME]
Installed 4 packages in [TIME]
+ anyio==4.3.0
+ bird-feeder==1.0.0 (from file://[WORKSPACE]/scripts/workspaces/albatross-in-example/examples/bird-feeder)
+ idna==3.6
+ sniffio==1.3.1
"###
);
context.assert_file(current_dir.join("check_installed_bird_feeder.py"));
uv_snapshot!(context.filters(), install_workspace(&context).arg(&current_dir), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Audited 1 package in [TIME]
"###
);
context.assert_file(current_dir.join("check_installed_bird_feeder.py"));
}
#[test]
fn test_albatross_in_examples() {
let context = TestContext::new("3.12");
let current_dir = workspaces_dir().join("albatross-in-example");
uv_snapshot!(context.filters(), install_workspace(&context).arg(&current_dir), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Built 1 editable in [TIME]
Resolved 2 packages in [TIME]
Downloaded 1 package in [TIME]
Installed 2 packages in [TIME]
+ albatross==0.1.0 (from file://[WORKSPACE]/scripts/workspaces/albatross-in-example)
+ tqdm==4.66.2
"###
);
context.assert_file(current_dir.join("check_installed_albatross.py"));
uv_snapshot!(context.filters(), install_workspace(&context).arg(&current_dir), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Audited 1 package in [TIME]
"###
);
context.assert_file(current_dir.join("check_installed_albatross.py"));
}
#[test]
fn test_albatross_just_project() {
let context = TestContext::new("3.12");
let current_dir = workspaces_dir().join("albatross-just-project");
uv_snapshot!(context.filters(), install_workspace(&context).arg(&current_dir), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Built 1 editable in [TIME]
Resolved 2 packages in [TIME]
Downloaded 1 package in [TIME]
Installed 2 packages in [TIME]
+ albatross==0.1.0 (from file://[WORKSPACE]/scripts/workspaces/albatross-just-project)
+ tqdm==4.66.2
"###
);
context.assert_file(current_dir.join("check_installed_albatross.py"));
uv_snapshot!(context.filters(), install_workspace(&context).arg(&current_dir), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Audited 1 package in [TIME]
"###
);
context.assert_file(current_dir.join("check_installed_albatross.py"));
}
#[test]
fn test_albatross_project_in_excluded() {
let context = TestContext::new("3.12");
let current_dir = workspaces_dir()
.join("albatross-project-in-excluded")
.join("excluded")
.join("bird-feeder");
uv_snapshot!(context.filters(), install_workspace(&context).arg(&current_dir), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Built 1 editable in [TIME]
Resolved 4 packages in [TIME]
Downloaded 3 packages in [TIME]
Installed 4 packages in [TIME]
+ anyio==4.3.0
+ bird-feeder==1.0.0 (from file://[WORKSPACE]/scripts/workspaces/albatross-project-in-excluded/excluded/bird-feeder)
+ idna==3.6
+ sniffio==1.3.1
"###
);
context.assert_file(current_dir.join("check_installed_bird_feeder.py"));
uv_snapshot!(context.filters(), install_workspace(&context).arg(&current_dir), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Audited 1 package in [TIME]
"###
);
context.assert_file(current_dir.join("check_installed_bird_feeder.py"));
}
#[test]
fn test_albatross_root_workspace() {
let context = TestContext::new("3.12");
let current_dir = workspaces_dir().join("albatross-root-workspace");
uv_snapshot!(context.filters(), install_workspace(&context).arg(&current_dir), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Built 3 editables in [TIME]
Resolved 7 packages in [TIME]
Downloaded 4 packages in [TIME]
Installed 7 packages in [TIME]
+ albatross==0.1.0 (from file://[WORKSPACE]/scripts/workspaces/albatross-root-workspace)
+ anyio==4.3.0
+ bird-feeder==1.0.0 (from file://[WORKSPACE]/scripts/workspaces/albatross-root-workspace/packages/bird-feeder)
+ idna==3.6
+ seeds==1.0.0 (from file://[WORKSPACE]/scripts/workspaces/albatross-root-workspace/packages/seeds)
+ sniffio==1.3.1
+ tqdm==4.66.2
"###
);
context.assert_file(current_dir.join("check_installed_albatross.py"));
uv_snapshot!(context.filters(), install_workspace(&context).arg(&current_dir), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Audited 3 packages in [TIME]
"###
);
context.assert_file(current_dir.join("check_installed_albatross.py"));
}
#[test]
fn test_albatross_root_workspace_bird_feeder() {
let context = TestContext::new("3.12");
let current_dir = workspaces_dir()
.join("albatross-root-workspace")
.join("packages")
.join("bird-feeder");
uv_snapshot!(context.filters(), install_workspace(&context).arg(&current_dir), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Built 2 editables in [TIME]
Resolved 5 packages in [TIME]
Downloaded 3 packages in [TIME]
Installed 5 packages in [TIME]
+ anyio==4.3.0
+ bird-feeder==1.0.0 (from file://[WORKSPACE]/scripts/workspaces/albatross-root-workspace/packages/bird-feeder)
+ idna==3.6
+ seeds==1.0.0 (from file://[WORKSPACE]/scripts/workspaces/albatross-root-workspace/packages/seeds)
+ sniffio==1.3.1
"###
);
context.assert_file(current_dir.join("check_installed_bird_feeder.py"));
uv_snapshot!(context.filters(), install_workspace(&context).arg(&current_dir), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Audited 2 packages in [TIME]
"###
);
context.assert_file(current_dir.join("check_installed_bird_feeder.py"));
}
#[test]
fn test_albatross_root_workspace_albatross() {
let context = TestContext::new("3.12");
let current_dir = workspaces_dir()
.join("albatross-root-workspace")
.join("packages")
.join("bird-feeder");
uv_snapshot!(context.filters(), install_workspace(&context).arg(&current_dir), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Built 2 editables in [TIME]
Resolved 5 packages in [TIME]
Downloaded 3 packages in [TIME]
Installed 5 packages in [TIME]
+ anyio==4.3.0
+ bird-feeder==1.0.0 (from file://[WORKSPACE]/scripts/workspaces/albatross-root-workspace/packages/bird-feeder)
+ idna==3.6
+ seeds==1.0.0 (from file://[WORKSPACE]/scripts/workspaces/albatross-root-workspace/packages/seeds)
+ sniffio==1.3.1
"###
);
context.assert_file(current_dir.join("check_installed_albatross.py"));
uv_snapshot!(context.filters(), install_workspace(&context).arg(&current_dir), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Audited 2 packages in [TIME]
"###
);
context.assert_file(current_dir.join("check_installed_albatross.py"));
}
#[test]
fn test_albatross_virtual_workspace() {
let context = TestContext::new("3.12");
let current_dir = workspaces_dir()
.join("albatross-virtual-workspace")
.join("packages")
.join("bird-feeder");
uv_snapshot!(context.filters(), install_workspace(&context).arg(&current_dir), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Built 2 editables in [TIME]
Resolved 5 packages in [TIME]
Downloaded 3 packages in [TIME]
Installed 5 packages in [TIME]
+ anyio==4.3.0
+ bird-feeder==1.0.0 (from file://[WORKSPACE]/scripts/workspaces/albatross-virtual-workspace/packages/bird-feeder)
+ idna==3.6
+ seeds==1.0.0 (from file://[WORKSPACE]/scripts/workspaces/albatross-virtual-workspace/packages/seeds)
+ sniffio==1.3.1
"###
);
context.assert_file(current_dir.join("check_installed_bird_feeder.py"));
uv_snapshot!(context.filters(), install_workspace(&context).arg(&current_dir), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Audited 2 packages in [TIME]
"###
);
context.assert_file(current_dir.join("check_installed_bird_feeder.py"));
}

View file

@ -1,7 +1,7 @@
[project]
name = "seeds"
version = "1.0.0"
dependencies = ["boltons==24.0.0"]
dependencies = ["idna==3.6"]
[build-system]
requires = ["hatchling"]

View file

@ -1,4 +1,4 @@
import boltons
import idna
def seeds():

View file

@ -1,7 +1,7 @@
[project]
name = "seeds"
version = "1.0.0"
dependencies = ["boltons==24.0.0"]
dependencies = ["idna==3.6"]
[build-system]
requires = ["hatchling"]

View file

@ -1,4 +1,4 @@
import boltons
import idna
def seeds():